Merge pull request 'feat: Enhanced database status monitoring and configuration management' (#34) from feature/sqlite-database-integration into main
Reviewed-on: #34
This commit is contained in:
commit
0b0de87be0
44 changed files with 9315 additions and 35 deletions
10
.gitignore
vendored
10
.gitignore
vendored
|
|
@ -36,4 +36,12 @@ Thumbs.db
|
||||||
|
|
||||||
# Temporary files
|
# Temporary files
|
||||||
tmp/
|
tmp/
|
||||||
temp/
|
temp/
|
||||||
|
|
||||||
|
# Database files
|
||||||
|
*.db
|
||||||
|
*.db-shm
|
||||||
|
*.db-wal
|
||||||
|
*.sqlite
|
||||||
|
*.sqlite3
|
||||||
|
dev-skyview.db
|
||||||
|
|
@ -103,4 +103,5 @@ This document outlines coding standards, architectural principles, and developme
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
These guidelines ensure SkyView remains reliable, maintainable, and suitable for aviation use while supporting continued development and enhancement.
|
These guidelines ensure SkyView remains reliable, maintainable, and suitable for aviation use while supporting continued development and enhancement.
|
||||||
|
- All future changes to the UX should keep in mind a gradual move to a mobile friendly design, but not at the cost of a working and useful UI for non-mobile clients.
|
||||||
10
Makefile
10
Makefile
|
|
@ -17,8 +17,14 @@ build-beast-dump:
|
||||||
@mkdir -p $(BUILD_DIR)
|
@mkdir -p $(BUILD_DIR)
|
||||||
go build -ldflags="$(LDFLAGS)" -o $(BUILD_DIR)/beast-dump ./cmd/beast-dump
|
go build -ldflags="$(LDFLAGS)" -o $(BUILD_DIR)/beast-dump ./cmd/beast-dump
|
||||||
|
|
||||||
|
# Build skyview-data database management binary
|
||||||
|
build-skyview-data:
|
||||||
|
@echo "Building skyview-data..."
|
||||||
|
@mkdir -p $(BUILD_DIR)
|
||||||
|
go build -ldflags="$(LDFLAGS)" -o $(BUILD_DIR)/skyview-data ./cmd/skyview-data
|
||||||
|
|
||||||
# Build all binaries
|
# Build all binaries
|
||||||
build-all: build build-beast-dump
|
build-all: build build-beast-dump build-skyview-data
|
||||||
@echo "Built all binaries successfully:"
|
@echo "Built all binaries successfully:"
|
||||||
@ls -la $(BUILD_DIR)/
|
@ls -la $(BUILD_DIR)/
|
||||||
|
|
||||||
|
|
@ -99,4 +105,4 @@ vet:
|
||||||
check: format vet lint test
|
check: format vet lint test
|
||||||
@echo "All checks passed!"
|
@echo "All checks passed!"
|
||||||
|
|
||||||
.DEFAULT_GOAL := build
|
.DEFAULT_GOAL := build-all
|
||||||
68
README.md
68
README.md
|
|
@ -27,12 +27,15 @@ A high-performance, multi-source ADS-B aircraft tracking application that connec
|
||||||
- **Map Controls**: Center on aircraft, reset to origin, toggle overlays
|
- **Map Controls**: Center on aircraft, reset to origin, toggle overlays
|
||||||
- **Signal Heatmaps**: Coverage heatmap visualization *(under construction)* 🚧
|
- **Signal Heatmaps**: Coverage heatmap visualization *(under construction)* 🚧
|
||||||
|
|
||||||
### Aircraft Data
|
### Aircraft Data Enhancement
|
||||||
- **Complete Mode S Decoding**: Position, velocity, altitude, heading
|
- **Complete Mode S Decoding**: Position, velocity, altitude, heading
|
||||||
- **Aircraft Identification**: Callsign, category, country, registration
|
- **Aircraft Identification**: Callsign, category, country, registration
|
||||||
|
- **Enhanced Callsign Lookup**: Multi-source airline database with 6,162+ airlines and 83,557+ airports
|
||||||
|
- **Aviation Data Integration**: OpenFlights and OurAirports databases with automatic updates
|
||||||
- **ICAO Country Database**: Comprehensive embedded database with 70+ allocations covering 40+ countries
|
- **ICAO Country Database**: Comprehensive embedded database with 70+ allocations covering 40+ countries
|
||||||
- **Multi-source Tracking**: Signal strength from each receiver
|
- **Multi-source Tracking**: Signal strength from each receiver
|
||||||
- **Historical Data**: Position history and trail visualization
|
- **Historical Data**: Position history with configurable retention
|
||||||
|
- **Database Optimization**: Automatic VACUUM operations and storage efficiency monitoring
|
||||||
|
|
||||||
## 🚀 Quick Start
|
## 🚀 Quick Start
|
||||||
|
|
||||||
|
|
@ -255,23 +258,66 @@ sudo journalctl -u skyview -f
|
||||||
make build
|
make build
|
||||||
|
|
||||||
# Create user and directories
|
# Create user and directories
|
||||||
sudo useradd -r -s /bin/false skyview
|
sudo useradd -r -s /bin/false skyview-adsb
|
||||||
sudo mkdir -p /etc/skyview /var/lib/skyview /var/log/skyview
|
sudo mkdir -p /etc/skyview-adsb /var/lib/skyview-adsb /var/log/skyview-adsb
|
||||||
sudo chown skyview:skyview /var/lib/skyview /var/log/skyview
|
sudo chown skyview-adsb:skyview-adsb /var/lib/skyview-adsb /var/log/skyview-adsb
|
||||||
|
|
||||||
# Install binary and config
|
# Install binary and config
|
||||||
sudo cp build/skyview /usr/bin/
|
sudo cp build/skyview /usr/bin/
|
||||||
sudo cp config.example.json /etc/skyview/config.json
|
sudo cp build/skyview-data /usr/bin/
|
||||||
sudo chown root:skyview /etc/skyview/config.json
|
sudo cp config.example.json /etc/skyview-adsb/config.json
|
||||||
sudo chmod 640 /etc/skyview/config.json
|
sudo chown root:skyview-adsb /etc/skyview-adsb/config.json
|
||||||
|
sudo chmod 640 /etc/skyview-adsb/config.json
|
||||||
|
|
||||||
# Create systemd service
|
# Create systemd service
|
||||||
sudo cp debian/lib/systemd/system/skyview.service /lib/systemd/system/
|
sudo cp debian/lib/systemd/system/skyview-adsb.service /lib/systemd/system/
|
||||||
|
sudo cp debian/lib/systemd/system/skyview-database-update.service /lib/systemd/system/
|
||||||
|
sudo cp debian/lib/systemd/system/skyview-database-update.timer /lib/systemd/system/
|
||||||
sudo systemctl daemon-reload
|
sudo systemctl daemon-reload
|
||||||
sudo systemctl enable skyview
|
sudo systemctl enable skyview-adsb
|
||||||
sudo systemctl start skyview
|
sudo systemctl enable skyview-database-update.timer
|
||||||
|
sudo systemctl start skyview-adsb
|
||||||
|
sudo systemctl start skyview-database-update.timer
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Database Management
|
||||||
|
|
||||||
|
SkyView includes powerful database management capabilities through the `skyview-data` command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Update aviation data sources (airlines, airports)
|
||||||
|
skyview-data update
|
||||||
|
|
||||||
|
# Optimize database storage and performance
|
||||||
|
skyview-data optimize
|
||||||
|
|
||||||
|
# Check database optimization statistics
|
||||||
|
skyview-data optimize --stats-only
|
||||||
|
|
||||||
|
# List available data sources
|
||||||
|
skyview-data list
|
||||||
|
|
||||||
|
# Check current database status
|
||||||
|
skyview-data status
|
||||||
|
```
|
||||||
|
|
||||||
|
The system automatically:
|
||||||
|
- Updates aviation databases on service startup
|
||||||
|
- Runs weekly database updates via systemd timer
|
||||||
|
- Optimizes storage with VACUUM operations
|
||||||
|
- Monitors database efficiency and statistics
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
SkyView supports comprehensive configuration including external aviation data sources:
|
||||||
|
|
||||||
|
- **3 External Data Sources**: OpenFlights Airlines (~6,162), OpenFlights Airports (~7,698), OurAirports (~83,557)
|
||||||
|
- **Database Management**: Automatic optimization, configurable retention, backup settings
|
||||||
|
- **Privacy Controls**: Privacy mode for air-gapped operation, selective source control
|
||||||
|
- **Performance Tuning**: Connection pooling, cache settings, update intervals
|
||||||
|
|
||||||
|
See **[Configuration Guide](docs/CONFIGURATION.md)** for complete documentation of all options.
|
||||||
|
|
||||||
## 🔒 Security
|
## 🔒 Security
|
||||||
|
|
||||||
The application includes security hardening:
|
The application includes security hardening:
|
||||||
|
|
|
||||||
|
|
@ -28,5 +28,6 @@ import "embed"
|
||||||
// This approach ensures the web interface is always available without requiring
|
// This approach ensures the web interface is always available without requiring
|
||||||
// external file deployment or complicated asset management.
|
// external file deployment or complicated asset management.
|
||||||
//
|
//
|
||||||
|
// Updated to include database.html for database status page
|
||||||
//go:embed static
|
//go:embed static
|
||||||
var Static embed.FS
|
var Static embed.FS
|
||||||
|
|
|
||||||
|
|
@ -566,6 +566,95 @@ body {
|
||||||
color: #00ff88 !important;
|
color: #00ff88 !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Rich callsign display styles */
|
||||||
|
.callsign-display {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.callsign-display.enriched {
|
||||||
|
display: inline-flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.25rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.callsign-code {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.25rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.airline-code {
|
||||||
|
color: #00ff88 !important;
|
||||||
|
font-weight: 600;
|
||||||
|
font-family: monospace;
|
||||||
|
background: rgba(0, 255, 136, 0.1);
|
||||||
|
padding: 0.1rem 0.3rem;
|
||||||
|
border-radius: 3px;
|
||||||
|
border: 1px solid rgba(0, 255, 136, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.flight-number {
|
||||||
|
color: #00a8ff !important;
|
||||||
|
font-weight: 500;
|
||||||
|
font-family: monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
.callsign-details {
|
||||||
|
font-size: 0.85rem;
|
||||||
|
opacity: 0.9;
|
||||||
|
}
|
||||||
|
|
||||||
|
.airline-name {
|
||||||
|
color: #ffd700 !important;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.airline-country {
|
||||||
|
color: #cccccc !important;
|
||||||
|
font-size: 0.8rem;
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
.callsign-display.simple {
|
||||||
|
color: #00ff88 !important;
|
||||||
|
font-family: monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
.callsign-display.no-data {
|
||||||
|
color: #888888 !important;
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Compact callsign for table view */
|
||||||
|
.callsign-compact {
|
||||||
|
color: #00ff88 !important;
|
||||||
|
font-family: monospace;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Loading state for callsign enhancement */
|
||||||
|
.callsign-loading {
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.callsign-loading::after {
|
||||||
|
content: '⟳';
|
||||||
|
margin-left: 0.25rem;
|
||||||
|
opacity: 0.6;
|
||||||
|
animation: spin 1s linear infinite;
|
||||||
|
font-size: 0.8rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes spin {
|
||||||
|
from { transform: rotate(0deg); }
|
||||||
|
to { transform: rotate(360deg); }
|
||||||
|
}
|
||||||
|
|
||||||
|
.callsign-enhanced {
|
||||||
|
/* Smooth transition when enhanced */
|
||||||
|
transition: all 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
.popup-details {
|
.popup-details {
|
||||||
font-size: 0.9rem;
|
font-size: 0.9rem;
|
||||||
color: #ffffff !important;
|
color: #ffffff !important;
|
||||||
|
|
|
||||||
360
assets/static/database.html
Normal file
360
assets/static/database.html
Normal file
|
|
@ -0,0 +1,360 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>Database Status - SkyView</title>
|
||||||
|
<link rel="icon" type="image/x-icon" href="/favicon.ico">
|
||||||
|
<link rel="stylesheet" href="/static/css/style.css">
|
||||||
|
<style>
|
||||||
|
.database-container {
|
||||||
|
max-width: 1000px;
|
||||||
|
margin: 20px auto;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-card {
|
||||||
|
background: var(--card-bg, #fff);
|
||||||
|
border: 1px solid var(--border-color, #ddd);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 20px;
|
||||||
|
margin: 20px 0;
|
||||||
|
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-card h2 {
|
||||||
|
margin-top: 0;
|
||||||
|
color: var(--text-primary, #333);
|
||||||
|
border-bottom: 2px solid var(--accent-color, #007acc);
|
||||||
|
padding-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stats-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
||||||
|
gap: 15px;
|
||||||
|
margin: 15px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-item {
|
||||||
|
background: var(--bg-secondary, #f8f9fa);
|
||||||
|
padding: 15px;
|
||||||
|
border-radius: 6px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-value {
|
||||||
|
font-size: 24px;
|
||||||
|
font-weight: bold;
|
||||||
|
color: var(--accent-color, #007acc);
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-label {
|
||||||
|
font-size: 14px;
|
||||||
|
color: var(--text-secondary, #666);
|
||||||
|
margin-top: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.data-source {
|
||||||
|
background: var(--bg-secondary, #f8f9fa);
|
||||||
|
border: 1px solid var(--border-color, #ddd);
|
||||||
|
border-radius: 6px;
|
||||||
|
padding: 15px;
|
||||||
|
margin: 10px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.source-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.source-name {
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.source-license {
|
||||||
|
padding: 4px 8px;
|
||||||
|
border-radius: 4px;
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.license-public {
|
||||||
|
background: #d4edda;
|
||||||
|
color: #155724;
|
||||||
|
}
|
||||||
|
|
||||||
|
.license-agpl {
|
||||||
|
background: #fff3cd;
|
||||||
|
color: #856404;
|
||||||
|
}
|
||||||
|
|
||||||
|
.source-url {
|
||||||
|
font-family: monospace;
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-secondary, #666);
|
||||||
|
word-break: break-all;
|
||||||
|
}
|
||||||
|
|
||||||
|
.loading {
|
||||||
|
text-align: center;
|
||||||
|
padding: 20px;
|
||||||
|
color: var(--text-secondary, #666);
|
||||||
|
}
|
||||||
|
|
||||||
|
.error {
|
||||||
|
background: #f8d7da;
|
||||||
|
color: #721c24;
|
||||||
|
padding: 15px;
|
||||||
|
border-radius: 6px;
|
||||||
|
margin: 10px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-header {
|
||||||
|
background: var(--header-bg, #2c3e50);
|
||||||
|
color: white;
|
||||||
|
padding: 15px;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-header h1 {
|
||||||
|
margin: 0;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-links {
|
||||||
|
float: right;
|
||||||
|
margin-top: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-links a {
|
||||||
|
color: #ecf0f1;
|
||||||
|
text-decoration: none;
|
||||||
|
margin-left: 20px;
|
||||||
|
padding: 5px 10px;
|
||||||
|
border-radius: 4px;
|
||||||
|
transition: background-color 0.3s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-links a:hover {
|
||||||
|
background-color: rgba(255,255,255,0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Override main CSS to allow scrolling */
|
||||||
|
body {
|
||||||
|
overflow: auto !important;
|
||||||
|
height: auto !important;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="nav-header">
|
||||||
|
<h1>SkyView Database Status</h1>
|
||||||
|
<div class="nav-links">
|
||||||
|
<a href="/">← Back to Map</a>
|
||||||
|
<a href="#" onclick="refreshData()">🔄 Refresh</a>
|
||||||
|
</div>
|
||||||
|
<div style="clear: both;"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="database-container">
|
||||||
|
<!-- Database Status -->
|
||||||
|
<div class="status-card">
|
||||||
|
<h2>📊 Database Statistics</h2>
|
||||||
|
<div id="database-stats" class="loading">Loading database statistics...</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- External Data Sources -->
|
||||||
|
<div class="status-card">
|
||||||
|
<h2>📦 External Data Sources</h2>
|
||||||
|
<div id="data-sources" class="loading">Loading data sources...</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
let dbStatusData = null;
|
||||||
|
let dataSourcesData = null;
|
||||||
|
|
||||||
|
async function loadDatabaseStats() {
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/database/status');
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||||
|
}
|
||||||
|
dbStatusData = await response.json();
|
||||||
|
renderDatabaseStats();
|
||||||
|
} catch (error) {
|
||||||
|
document.getElementById('database-stats').innerHTML =
|
||||||
|
`<div class="error">Failed to load database statistics: ${error.message}</div>`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadDataSources() {
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/database/sources');
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||||
|
}
|
||||||
|
dataSourcesData = await response.json();
|
||||||
|
renderDataSources();
|
||||||
|
} catch (error) {
|
||||||
|
document.getElementById('data-sources').innerHTML =
|
||||||
|
`<div class="error">Failed to load data sources: ${error.message}</div>`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderDatabaseStats() {
|
||||||
|
if (!dbStatusData) return;
|
||||||
|
|
||||||
|
const container = document.getElementById('database-stats');
|
||||||
|
const history = dbStatusData.history || {};
|
||||||
|
const callsign = dbStatusData.callsign || {};
|
||||||
|
const referenceData = dbStatusData.reference_data || {};
|
||||||
|
|
||||||
|
// Format file size
|
||||||
|
const sizeFormatted = dbStatusData.size_mb ? `${dbStatusData.size_mb.toFixed(1)} MB` : 'Unknown';
|
||||||
|
|
||||||
|
// Format database path (show only filename for space)
|
||||||
|
const dbPath = dbStatusData.path || 'Unknown';
|
||||||
|
const dbFilename = dbPath.split('/').pop();
|
||||||
|
|
||||||
|
// Format last modified date
|
||||||
|
const lastModified = dbStatusData.modified ?
|
||||||
|
new Date(dbStatusData.modified * 1000).toLocaleString() : 'Unknown';
|
||||||
|
|
||||||
|
// Format efficiency percentage
|
||||||
|
const efficiency = dbStatusData.efficiency_percent !== undefined ?
|
||||||
|
`${dbStatusData.efficiency_percent.toFixed(1)}%` : 'Unknown';
|
||||||
|
|
||||||
|
container.innerHTML = `
|
||||||
|
<div class="stats-grid">
|
||||||
|
<div class="stat-item">
|
||||||
|
<span class="stat-value">${history.total_records || 0}</span>
|
||||||
|
<div class="stat-label">Total Aircraft Records</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-item">
|
||||||
|
<span class="stat-value">${history.unique_aircraft || 0}</span>
|
||||||
|
<div class="stat-label">Unique Aircraft</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-item">
|
||||||
|
<span class="stat-value">${history.recent_records_24h || 0}</span>
|
||||||
|
<div class="stat-label">Records (Last 24h)</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-item">
|
||||||
|
<span class="stat-value">${callsign.cache_entries || 0}</span>
|
||||||
|
<div class="stat-label">Cached Callsigns</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-item">
|
||||||
|
<span class="stat-value">${sizeFormatted}</span>
|
||||||
|
<div class="stat-label">Database Size</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-item">
|
||||||
|
<span class="stat-value">${efficiency}</span>
|
||||||
|
<div class="stat-label">Storage Efficiency</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-item">
|
||||||
|
<span class="stat-value">${referenceData.airlines || 0}</span>
|
||||||
|
<div class="stat-label">Airlines Database</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-item">
|
||||||
|
<span class="stat-value">${referenceData.airports || 0}</span>
|
||||||
|
<div class="stat-label">Airports Database</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div style="margin-top: 20px;">
|
||||||
|
<p><strong>Database Location:</strong> <code>${dbPath}</code></p>
|
||||||
|
<p><strong>Last Modified:</strong> ${lastModified}</p>
|
||||||
|
${dbStatusData.page_count ? `
|
||||||
|
<p><strong>Storage Details:</strong> ${dbStatusData.page_count.toLocaleString()} pages
|
||||||
|
(${dbStatusData.page_size} bytes each), ${dbStatusData.used_pages.toLocaleString()} used,
|
||||||
|
${dbStatusData.free_pages.toLocaleString()} free</p>
|
||||||
|
` : ''}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
${history.oldest_record ? `
|
||||||
|
<div style="margin-top: 15px;">
|
||||||
|
<p><strong>Data Range:</strong> ${new Date(history.oldest_record * 1000).toLocaleDateString()}
|
||||||
|
to ${new Date(history.newest_record * 1000).toLocaleDateString()}</p>
|
||||||
|
</div>
|
||||||
|
` : ''}
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderDataSources() {
|
||||||
|
if (!dataSourcesData) return;
|
||||||
|
|
||||||
|
const container = document.getElementById('data-sources');
|
||||||
|
const available = dataSourcesData.available || [];
|
||||||
|
const loaded = dataSourcesData.loaded || [];
|
||||||
|
|
||||||
|
const loadedNames = new Set(loaded.map(s => s.name));
|
||||||
|
|
||||||
|
let html = '<h3>📥 Loaded Sources</h3>';
|
||||||
|
|
||||||
|
if (loaded.length === 0) {
|
||||||
|
html += '<p>No external data sources have been loaded yet. Use <code>skyview-data update</code> to load aviation reference data.</p>';
|
||||||
|
} else {
|
||||||
|
loaded.forEach(source => {
|
||||||
|
const licenseClass = source.license.includes('Public Domain') ? 'license-public' : 'license-agpl';
|
||||||
|
html += `
|
||||||
|
<div class="data-source">
|
||||||
|
<div class="source-header">
|
||||||
|
<span class="source-name">✅ ${source.name}</span>
|
||||||
|
<span class="source-license ${licenseClass}">${source.license}</span>
|
||||||
|
</div>
|
||||||
|
<div class="source-url">${source.url}</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
html += '<h3>📋 Available Sources</h3>';
|
||||||
|
|
||||||
|
available.forEach(source => {
|
||||||
|
const isLoaded = loadedNames.has(source.name);
|
||||||
|
const licenseClass = source.license.includes('Public Domain') ? 'license-public' : 'license-agpl';
|
||||||
|
const statusIcon = isLoaded ? '✅' : (source.requires_consent ? '⚠️' : '📦');
|
||||||
|
|
||||||
|
html += `
|
||||||
|
<div class="data-source">
|
||||||
|
<div class="source-header">
|
||||||
|
<span class="source-name">${statusIcon} ${source.name}</span>
|
||||||
|
<span class="source-license ${licenseClass}">${source.license}</span>
|
||||||
|
</div>
|
||||||
|
<div class="source-url">${source.url}</div>
|
||||||
|
${source.requires_consent ? '<p><small>⚠️ Requires license acceptance for use</small></p>' : ''}
|
||||||
|
${isLoaded ? '<p><small>✅ Currently loaded in database</small></p>' : ''}
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
});
|
||||||
|
|
||||||
|
container.innerHTML = html;
|
||||||
|
}
|
||||||
|
|
||||||
|
function refreshData() {
|
||||||
|
document.getElementById('database-stats').innerHTML = '<div class="loading">Refreshing database statistics...</div>';
|
||||||
|
document.getElementById('data-sources').innerHTML = '<div class="loading">Refreshing data sources...</div>';
|
||||||
|
|
||||||
|
loadDatabaseStats();
|
||||||
|
loadDataSources();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load data on page load
|
||||||
|
document.addEventListener('DOMContentLoaded', function() {
|
||||||
|
loadDatabaseStats();
|
||||||
|
loadDataSources();
|
||||||
|
|
||||||
|
// Auto-refresh every 30 seconds
|
||||||
|
setInterval(() => {
|
||||||
|
loadDatabaseStats();
|
||||||
|
loadDataSources();
|
||||||
|
}, 30000);
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
@ -28,7 +28,10 @@
|
||||||
<body>
|
<body>
|
||||||
<div id="app">
|
<div id="app">
|
||||||
<header class="header">
|
<header class="header">
|
||||||
<h1>SkyView <span class="version-info">v0.0.8</span> <a href="https://kode.naiv.no/olemd/skyview" target="_blank" class="repo-link" title="Project Repository">⚙</a></h1>
|
<h1>SkyView <span class="version-info">v0.0.8</span>
|
||||||
|
<a href="https://kode.naiv.no/olemd/skyview" target="_blank" class="repo-link" title="Project Repository">⚙</a>
|
||||||
|
<a href="/database" class="repo-link" title="Database Status">📊</a>
|
||||||
|
</h1>
|
||||||
|
|
||||||
<!-- Status indicators -->
|
<!-- Status indicators -->
|
||||||
<div class="status-section">
|
<div class="status-section">
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ import { WebSocketManager } from './modules/websocket.js?v=2';
|
||||||
import { AircraftManager } from './modules/aircraft-manager.js?v=2';
|
import { AircraftManager } from './modules/aircraft-manager.js?v=2';
|
||||||
import { MapManager } from './modules/map-manager.js?v=2';
|
import { MapManager } from './modules/map-manager.js?v=2';
|
||||||
import { UIManager } from './modules/ui-manager.js?v=2';
|
import { UIManager } from './modules/ui-manager.js?v=2';
|
||||||
|
import { CallsignManager } from './modules/callsign-manager.js';
|
||||||
|
|
||||||
class SkyView {
|
class SkyView {
|
||||||
constructor() {
|
constructor() {
|
||||||
|
|
@ -15,6 +16,7 @@ class SkyView {
|
||||||
this.aircraftManager = null;
|
this.aircraftManager = null;
|
||||||
this.mapManager = null;
|
this.mapManager = null;
|
||||||
this.uiManager = null;
|
this.uiManager = null;
|
||||||
|
this.callsignManager = null;
|
||||||
|
|
||||||
// 3D Radar
|
// 3D Radar
|
||||||
this.radar3d = null;
|
this.radar3d = null;
|
||||||
|
|
@ -37,12 +39,15 @@ class SkyView {
|
||||||
this.uiManager.initializeViews();
|
this.uiManager.initializeViews();
|
||||||
this.uiManager.initializeEventListeners();
|
this.uiManager.initializeEventListeners();
|
||||||
|
|
||||||
|
// Initialize callsign manager for enriched callsign display
|
||||||
|
this.callsignManager = new CallsignManager();
|
||||||
|
|
||||||
// Initialize map manager and get the main map
|
// Initialize map manager and get the main map
|
||||||
this.mapManager = new MapManager();
|
this.mapManager = new MapManager();
|
||||||
const map = await this.mapManager.initializeMap();
|
const map = await this.mapManager.initializeMap();
|
||||||
|
|
||||||
// Initialize aircraft manager with the map
|
// Initialize aircraft manager with the map and callsign manager
|
||||||
this.aircraftManager = new AircraftManager(map);
|
this.aircraftManager = new AircraftManager(map, this.callsignManager);
|
||||||
|
|
||||||
// Set up selected aircraft trail callback
|
// Set up selected aircraft trail callback
|
||||||
this.aircraftManager.setSelectedAircraftCallback((icao) => {
|
this.aircraftManager.setSelectedAircraftCallback((icao) => {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
// Aircraft marker and data management module
|
// Aircraft marker and data management module
|
||||||
export class AircraftManager {
|
export class AircraftManager {
|
||||||
constructor(map) {
|
constructor(map, callsignManager = null) {
|
||||||
this.map = map;
|
this.map = map;
|
||||||
|
this.callsignManager = callsignManager;
|
||||||
this.aircraftData = new Map();
|
this.aircraftData = new Map();
|
||||||
this.aircraftMarkers = new Map();
|
this.aircraftMarkers = new Map();
|
||||||
this.aircraftTrails = new Map();
|
this.aircraftTrails = new Map();
|
||||||
|
|
@ -228,6 +229,11 @@ export class AircraftManager {
|
||||||
// Handle popup exactly like Leaflet expects
|
// Handle popup exactly like Leaflet expects
|
||||||
if (marker.isPopupOpen()) {
|
if (marker.isPopupOpen()) {
|
||||||
marker.setPopupContent(this.createPopupContent(aircraft));
|
marker.setPopupContent(this.createPopupContent(aircraft));
|
||||||
|
// Enhance callsign display for updated popup
|
||||||
|
const popupElement = marker.getPopup().getElement();
|
||||||
|
if (popupElement) {
|
||||||
|
this.enhanceCallsignDisplay(popupElement);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.markerUpdateCount++;
|
this.markerUpdateCount++;
|
||||||
|
|
@ -250,6 +256,14 @@ export class AircraftManager {
|
||||||
maxWidth: 450,
|
maxWidth: 450,
|
||||||
className: 'aircraft-popup'
|
className: 'aircraft-popup'
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Enhance callsign display when popup opens
|
||||||
|
marker.on('popupopen', (e) => {
|
||||||
|
const popupElement = e.popup.getElement();
|
||||||
|
if (popupElement) {
|
||||||
|
this.enhanceCallsignDisplay(popupElement);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
this.aircraftMarkers.set(icao, marker);
|
this.aircraftMarkers.set(icao, marker);
|
||||||
this.markerCreateCount++;
|
this.markerCreateCount++;
|
||||||
|
|
@ -435,7 +449,7 @@ export class AircraftManager {
|
||||||
<div class="flight-info">
|
<div class="flight-info">
|
||||||
<span class="icao-flag">${flag}</span>
|
<span class="icao-flag">${flag}</span>
|
||||||
<span class="flight-id">${aircraft.ICAO24 || 'N/A'}</span>
|
<span class="flight-id">${aircraft.ICAO24 || 'N/A'}</span>
|
||||||
${aircraft.Callsign ? `→ <span class="callsign">${aircraft.Callsign}</span>` : ''}
|
${aircraft.Callsign ? `→ <span class="callsign-loading" data-callsign="${aircraft.Callsign}"><span class="callsign">${aircraft.Callsign}</span></span>` : ''}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
@ -511,6 +525,29 @@ export class AircraftManager {
|
||||||
return minDistance === Infinity ? null : minDistance;
|
return minDistance === Infinity ? null : minDistance;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Enhance callsign display in popup after it's created
|
||||||
|
async enhanceCallsignDisplay(popupElement) {
|
||||||
|
if (!this.callsignManager) return;
|
||||||
|
|
||||||
|
const callsignElements = popupElement.querySelectorAll('.callsign-loading');
|
||||||
|
|
||||||
|
for (const element of callsignElements) {
|
||||||
|
const callsign = element.dataset.callsign;
|
||||||
|
if (!callsign) continue;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const callsignInfo = await this.callsignManager.getCallsignInfo(callsign);
|
||||||
|
const richDisplay = this.callsignManager.generateCallsignDisplay(callsignInfo, callsign);
|
||||||
|
element.innerHTML = richDisplay;
|
||||||
|
element.classList.remove('callsign-loading');
|
||||||
|
element.classList.add('callsign-enhanced');
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to enhance callsign display for ${callsign}:`, error);
|
||||||
|
// Keep the simple display on error
|
||||||
|
element.classList.remove('callsign-loading');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
toggleTrails() {
|
toggleTrails() {
|
||||||
this.showTrails = !this.showTrails;
|
this.showTrails = !this.showTrails;
|
||||||
|
|
|
||||||
163
assets/static/js/modules/callsign-manager.js
Normal file
163
assets/static/js/modules/callsign-manager.js
Normal file
|
|
@ -0,0 +1,163 @@
|
||||||
|
// Callsign enrichment and display module
|
||||||
|
export class CallsignManager {
|
||||||
|
constructor() {
|
||||||
|
this.callsignCache = new Map();
|
||||||
|
this.pendingRequests = new Map();
|
||||||
|
|
||||||
|
// Rate limiting to avoid overwhelming the API
|
||||||
|
this.lastRequestTime = 0;
|
||||||
|
this.requestInterval = 100; // Minimum 100ms between requests
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get enriched callsign information, using cache when available
|
||||||
|
* @param {string} callsign - The raw callsign to lookup
|
||||||
|
* @returns {Promise<Object>} - Enriched callsign data
|
||||||
|
*/
|
||||||
|
async getCallsignInfo(callsign) {
|
||||||
|
if (!callsign || callsign.trim() === '') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const cleanCallsign = callsign.trim().toUpperCase();
|
||||||
|
|
||||||
|
// Check cache first
|
||||||
|
if (this.callsignCache.has(cleanCallsign)) {
|
||||||
|
return this.callsignCache.get(cleanCallsign);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we already have a pending request for this callsign
|
||||||
|
if (this.pendingRequests.has(cleanCallsign)) {
|
||||||
|
return this.pendingRequests.get(cleanCallsign);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rate limiting
|
||||||
|
const now = Date.now();
|
||||||
|
if (now - this.lastRequestTime < this.requestInterval) {
|
||||||
|
await new Promise(resolve => setTimeout(resolve, this.requestInterval));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the API request
|
||||||
|
const requestPromise = this.fetchCallsignInfo(cleanCallsign);
|
||||||
|
this.pendingRequests.set(cleanCallsign, requestPromise);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await requestPromise;
|
||||||
|
|
||||||
|
// Cache the result for future use
|
||||||
|
if (result && result.callsign) {
|
||||||
|
this.callsignCache.set(cleanCallsign, result.callsign);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result ? result.callsign : null;
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to lookup callsign ${cleanCallsign}:`, error);
|
||||||
|
return null;
|
||||||
|
} finally {
|
||||||
|
// Clean up pending request
|
||||||
|
this.pendingRequests.delete(cleanCallsign);
|
||||||
|
this.lastRequestTime = Date.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch callsign information from the API
|
||||||
|
* @param {string} callsign - The callsign to lookup
|
||||||
|
* @returns {Promise<Object>} - API response
|
||||||
|
*/
|
||||||
|
async fetchCallsignInfo(callsign) {
|
||||||
|
const response = await fetch(`/api/callsign/${encodeURIComponent(callsign)}`);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate rich HTML display for a callsign
|
||||||
|
* @param {Object} callsignInfo - Enriched callsign data from API
|
||||||
|
* @param {string} originalCallsign - Original callsign if API data is null
|
||||||
|
* @returns {string} - HTML string for display
|
||||||
|
*/
|
||||||
|
generateCallsignDisplay(callsignInfo, originalCallsign = '') {
|
||||||
|
if (!callsignInfo || !callsignInfo.is_valid) {
|
||||||
|
// Fallback for invalid or missing callsign data
|
||||||
|
if (originalCallsign) {
|
||||||
|
return `<span class="callsign-display simple">${originalCallsign}</span>`;
|
||||||
|
}
|
||||||
|
return '<span class="callsign-display no-data">N/A</span>';
|
||||||
|
}
|
||||||
|
|
||||||
|
const parts = [];
|
||||||
|
|
||||||
|
// Airline code
|
||||||
|
if (callsignInfo.airline_code) {
|
||||||
|
parts.push(`<span class="airline-code">${callsignInfo.airline_code}</span>`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flight number
|
||||||
|
if (callsignInfo.flight_number) {
|
||||||
|
parts.push(`<span class="flight-number">${callsignInfo.flight_number}</span>`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Airline name (if available)
|
||||||
|
let airlineInfo = '';
|
||||||
|
if (callsignInfo.airline_name) {
|
||||||
|
airlineInfo = `<span class="airline-name" title="${callsignInfo.airline_name}">
|
||||||
|
${callsignInfo.airline_name}
|
||||||
|
</span>`;
|
||||||
|
|
||||||
|
// Add country if available
|
||||||
|
if (callsignInfo.airline_country) {
|
||||||
|
airlineInfo += ` <span class="airline-country">(${callsignInfo.airline_country})</span>`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return `
|
||||||
|
<span class="callsign-display enriched">
|
||||||
|
<span class="callsign-code">${parts.join(' ')}</span>
|
||||||
|
${airlineInfo ? `<span class="callsign-details">${airlineInfo}</span>` : ''}
|
||||||
|
</span>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate compact callsign display for table view
|
||||||
|
* @param {Object} callsignInfo - Enriched callsign data
|
||||||
|
* @param {string} originalCallsign - Original callsign fallback
|
||||||
|
* @returns {string} - Compact HTML for table display
|
||||||
|
*/
|
||||||
|
generateCompactCallsignDisplay(callsignInfo, originalCallsign = '') {
|
||||||
|
if (!callsignInfo || !callsignInfo.is_valid) {
|
||||||
|
return originalCallsign || 'N/A';
|
||||||
|
}
|
||||||
|
|
||||||
|
// For tables, use the display_name or format airline + flight
|
||||||
|
if (callsignInfo.display_name) {
|
||||||
|
return `<span class="callsign-compact" title="${callsignInfo.airline_name || ''}">${callsignInfo.display_name}</span>`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return `<span class="callsign-compact">${callsignInfo.airline_code} ${callsignInfo.flight_number}</span>`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear the callsign cache (useful for memory management)
|
||||||
|
*/
|
||||||
|
clearCache() {
|
||||||
|
this.callsignCache.clear();
|
||||||
|
console.debug('Callsign cache cleared');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get cache statistics for debugging
|
||||||
|
* @returns {Object} - Cache size and pending requests
|
||||||
|
*/
|
||||||
|
getCacheStats() {
|
||||||
|
return {
|
||||||
|
cacheSize: this.callsignCache.size,
|
||||||
|
pendingRequests: this.pendingRequests.size
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
686
cmd/skyview-data/main.go
Normal file
686
cmd/skyview-data/main.go
Normal file
|
|
@ -0,0 +1,686 @@
|
||||||
|
// Package main implements the SkyView data management utility.
|
||||||
|
//
|
||||||
|
// This tool provides simple commands for populating and updating the SkyView
|
||||||
|
// database with aviation data from various external sources while maintaining
|
||||||
|
// proper license compliance.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"skyview/internal/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Shared configuration structures (should match main skyview)
|
||||||
|
type Config struct {
|
||||||
|
Server ServerConfig `json:"server"`
|
||||||
|
Sources []SourceConfig `json:"sources"`
|
||||||
|
Settings Settings `json:"settings"`
|
||||||
|
Database *database.Config `json:"database,omitempty"`
|
||||||
|
Callsign *CallsignConfig `json:"callsign,omitempty"`
|
||||||
|
Origin OriginConfig `json:"origin"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CallsignConfig struct {
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
CacheHours int `json:"cache_hours"`
|
||||||
|
PrivacyMode bool `json:"privacy_mode"`
|
||||||
|
Sources map[string]CallsignSourceConfig `json:"sources"`
|
||||||
|
ExternalAPIs map[string]ExternalAPIConfig `json:"external_apis,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CallsignSourceConfig struct {
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
Priority int `json:"priority"`
|
||||||
|
License string `json:"license"`
|
||||||
|
RequiresConsent bool `json:"requires_consent,omitempty"`
|
||||||
|
UserAcceptsTerms bool `json:"user_accepts_terms,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ExternalAPIConfig struct {
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
TimeoutSeconds int `json:"timeout_seconds,omitempty"`
|
||||||
|
MaxRetries int `json:"max_retries,omitempty"`
|
||||||
|
RequiresConsent bool `json:"requires_consent,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type OriginConfig struct {
|
||||||
|
Latitude float64 `json:"latitude"`
|
||||||
|
Longitude float64 `json:"longitude"`
|
||||||
|
Name string `json:"name,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServerConfig struct {
|
||||||
|
Host string `json:"host"`
|
||||||
|
Port int `json:"port"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SourceConfig struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Host string `json:"host"`
|
||||||
|
Port int `json:"port"`
|
||||||
|
Format string `json:"format,omitempty"`
|
||||||
|
Latitude float64 `json:"latitude"`
|
||||||
|
Longitude float64 `json:"longitude"`
|
||||||
|
Altitude float64 `json:"altitude"`
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Settings struct {
|
||||||
|
HistoryLimit int `json:"history_limit"`
|
||||||
|
StaleTimeout int `json:"stale_timeout"`
|
||||||
|
UpdateRate int `json:"update_rate"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Version information
|
||||||
|
var (
|
||||||
|
version = "dev"
|
||||||
|
commit = "unknown"
|
||||||
|
date = "unknown"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
var (
|
||||||
|
configPath = flag.String("config", "config.json", "Configuration file path")
|
||||||
|
dbPath = flag.String("db", "", "Database file path (override config)")
|
||||||
|
verbose = flag.Bool("v", false, "Verbose output")
|
||||||
|
force = flag.Bool("force", false, "Force operation without prompts")
|
||||||
|
showVer = flag.Bool("version", false, "Show version information")
|
||||||
|
)
|
||||||
|
|
||||||
|
flag.Usage = func() {
|
||||||
|
fmt.Fprintf(os.Stderr, `SkyView Data Manager v%s
|
||||||
|
|
||||||
|
USAGE:
|
||||||
|
skyview-data [OPTIONS] COMMAND [ARGS...]
|
||||||
|
|
||||||
|
COMMANDS:
|
||||||
|
init Initialize empty database
|
||||||
|
list List available data sources
|
||||||
|
status Show current database status
|
||||||
|
update [SOURCE...] Update data from sources (default: safe sources)
|
||||||
|
import SOURCE Import data from specific source
|
||||||
|
clear SOURCE Remove data from specific source
|
||||||
|
reset Clear all data and reset database
|
||||||
|
optimize Optimize database for storage efficiency
|
||||||
|
|
||||||
|
EXAMPLES:
|
||||||
|
skyview-data init # Create empty database
|
||||||
|
skyview-data update # Update from safe (public domain) sources
|
||||||
|
skyview-data update openflights # Update OpenFlights data (requires license acceptance)
|
||||||
|
skyview-data import ourairports # Import OurAirports data
|
||||||
|
skyview-data list # Show available sources
|
||||||
|
skyview-data status # Show database status
|
||||||
|
skyview-data optimize # Optimize database storage
|
||||||
|
|
||||||
|
OPTIONS:
|
||||||
|
`, version)
|
||||||
|
flag.PrintDefaults()
|
||||||
|
}
|
||||||
|
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
if *showVer {
|
||||||
|
fmt.Printf("skyview-data version %s (commit %s, built %s)\n", version, commit, date)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if flag.NArg() == 0 {
|
||||||
|
flag.Usage()
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
command := flag.Arg(0)
|
||||||
|
|
||||||
|
// Set up logging for cron-friendly operation
|
||||||
|
if *verbose {
|
||||||
|
log.SetFlags(log.LstdFlags | log.Lshortfile)
|
||||||
|
} else {
|
||||||
|
// For cron jobs, include timestamp but no file info
|
||||||
|
log.SetFlags(log.LstdFlags)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load configuration
|
||||||
|
config, err := loadConfig(*configPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Configuration loading failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize database connection using shared config
|
||||||
|
db, err := initDatabaseFromConfig(config, *dbPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Database initialization failed: %v", err)
|
||||||
|
}
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
// Execute command
|
||||||
|
switch command {
|
||||||
|
case "init":
|
||||||
|
err = cmdInit(db, *force)
|
||||||
|
case "list":
|
||||||
|
err = cmdList(db)
|
||||||
|
case "status":
|
||||||
|
err = cmdStatus(db)
|
||||||
|
case "update":
|
||||||
|
sources := flag.Args()[1:]
|
||||||
|
err = cmdUpdate(db, sources, *force)
|
||||||
|
case "import":
|
||||||
|
if flag.NArg() < 2 {
|
||||||
|
log.Fatal("import command requires a source name")
|
||||||
|
}
|
||||||
|
err = cmdImport(db, flag.Arg(1), *force)
|
||||||
|
case "clear":
|
||||||
|
if flag.NArg() < 2 {
|
||||||
|
log.Fatal("clear command requires a source name")
|
||||||
|
}
|
||||||
|
err = cmdClear(db, flag.Arg(1), *force)
|
||||||
|
case "reset":
|
||||||
|
err = cmdReset(db, *force)
|
||||||
|
case "optimize":
|
||||||
|
err = cmdOptimize(db, *force)
|
||||||
|
default:
|
||||||
|
log.Fatalf("Unknown command: %s", command)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Command failed: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// initDatabase initializes the database connection with auto-creation
|
||||||
|
func initDatabase(dbPath string) (*database.Database, error) {
|
||||||
|
config := database.DefaultConfig()
|
||||||
|
config.Path = dbPath
|
||||||
|
|
||||||
|
db, err := database.NewDatabase(config)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create database: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := db.Initialize(); err != nil {
|
||||||
|
db.Close()
|
||||||
|
return nil, fmt.Errorf("failed to initialize database: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// cmdInit initializes an empty database
|
||||||
|
func cmdInit(db *database.Database, force bool) error {
|
||||||
|
dbPath := db.GetConfig().Path
|
||||||
|
|
||||||
|
// Check if database already exists and has data
|
||||||
|
if !force {
|
||||||
|
if stats, err := db.GetHistoryManager().GetStatistics(); err == nil {
|
||||||
|
if totalRecords, ok := stats["total_records"].(int); ok && totalRecords > 0 {
|
||||||
|
fmt.Printf("Database already exists with %d records at: %s\n", totalRecords, dbPath)
|
||||||
|
fmt.Println("Use --force to reinitialize")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Initializing SkyView database at: %s\n", dbPath)
|
||||||
|
fmt.Println("✓ Database schema created")
|
||||||
|
fmt.Println("✓ Empty tables ready for data import")
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println("Next steps:")
|
||||||
|
fmt.Println(" skyview-data update # Import safe (public domain) data")
|
||||||
|
fmt.Println(" skyview-data list # Show available data sources")
|
||||||
|
fmt.Println(" skyview-data status # Check database status")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// cmdList shows available data sources
|
||||||
|
func cmdList(db *database.Database) error {
|
||||||
|
fmt.Println("Available Data Sources:")
|
||||||
|
fmt.Println()
|
||||||
|
|
||||||
|
sources := database.GetAvailableDataSources()
|
||||||
|
for _, source := range sources {
|
||||||
|
status := "🟢"
|
||||||
|
if source.RequiresConsent {
|
||||||
|
status = "⚠️ "
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("%s %s\n", status, source.Name)
|
||||||
|
fmt.Printf(" License: %s\n", source.License)
|
||||||
|
fmt.Printf(" URL: %s\n", source.URL)
|
||||||
|
if source.RequiresConsent {
|
||||||
|
fmt.Printf(" Note: Requires license acceptance\n")
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Legend:")
|
||||||
|
fmt.Println(" 🟢 = Safe to use automatically (Public Domain/MIT)")
|
||||||
|
fmt.Println(" ⚠️ = Requires license acceptance (AGPL, etc.)")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// cmdStatus shows current database status
|
||||||
|
func cmdStatus(db *database.Database) error {
|
||||||
|
fmt.Println("SkyView Database Status")
|
||||||
|
fmt.Println("======================")
|
||||||
|
|
||||||
|
dbPath := db.GetConfig().Path
|
||||||
|
fmt.Printf("Database: %s\n", dbPath)
|
||||||
|
|
||||||
|
// Check if file exists and get size
|
||||||
|
if stat, err := os.Stat(dbPath); err == nil {
|
||||||
|
fmt.Printf("Size: %.2f MB\n", float64(stat.Size())/(1024*1024))
|
||||||
|
fmt.Printf("Modified: %s\n", stat.ModTime().Format(time.RFC3339))
|
||||||
|
|
||||||
|
// Add database optimization stats
|
||||||
|
optimizer := database.NewOptimizationManager(db, db.GetConfig())
|
||||||
|
if stats, err := optimizer.GetOptimizationStats(); err == nil {
|
||||||
|
fmt.Printf("Efficiency: %.1f%% (%d used pages, %d free pages)\n",
|
||||||
|
stats.Efficiency, stats.UsedPages, stats.FreePages)
|
||||||
|
if stats.AutoVacuumEnabled {
|
||||||
|
fmt.Printf("Auto-VACUUM: Enabled\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
|
||||||
|
// Show loaded data sources
|
||||||
|
loader := database.NewDataLoader(db.GetConnection())
|
||||||
|
loadedSources, err := loader.GetLoadedDataSources()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get loaded sources: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(loadedSources) == 0 {
|
||||||
|
fmt.Println("📭 No data sources loaded")
|
||||||
|
fmt.Println(" Run 'skyview-data update' to populate with aviation data")
|
||||||
|
} else {
|
||||||
|
fmt.Printf("📦 Loaded Data Sources (%d):\n", len(loadedSources))
|
||||||
|
for _, source := range loadedSources {
|
||||||
|
fmt.Printf(" • %s (%s)\n", source.Name, source.License)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
|
||||||
|
// Show statistics
|
||||||
|
stats, err := db.GetHistoryManager().GetStatistics()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get statistics: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show comprehensive reference data statistics
|
||||||
|
var airportCount, airlineCount int
|
||||||
|
db.GetConnection().QueryRow(`SELECT COUNT(*) FROM airports`).Scan(&airportCount)
|
||||||
|
db.GetConnection().QueryRow(`SELECT COUNT(*) FROM airlines`).Scan(&airlineCount)
|
||||||
|
|
||||||
|
// Get data source update information
|
||||||
|
var lastUpdate time.Time
|
||||||
|
var updateCount int
|
||||||
|
err = db.GetConnection().QueryRow(`
|
||||||
|
SELECT COUNT(*), MAX(imported_at)
|
||||||
|
FROM data_sources
|
||||||
|
WHERE imported_at IS NOT NULL
|
||||||
|
`).Scan(&updateCount, &lastUpdate)
|
||||||
|
|
||||||
|
fmt.Printf("📊 Database Statistics:\n")
|
||||||
|
fmt.Printf(" Reference Data:\n")
|
||||||
|
if airportCount > 0 {
|
||||||
|
fmt.Printf(" • Airports: %d\n", airportCount)
|
||||||
|
}
|
||||||
|
if airlineCount > 0 {
|
||||||
|
fmt.Printf(" • Airlines: %d\n", airlineCount)
|
||||||
|
}
|
||||||
|
if updateCount > 0 {
|
||||||
|
fmt.Printf(" • Data Sources: %d imported\n", updateCount)
|
||||||
|
if !lastUpdate.IsZero() {
|
||||||
|
fmt.Printf(" • Last Updated: %s\n", lastUpdate.Format("2006-01-02 15:04:05"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf(" Flight History:\n")
|
||||||
|
if totalRecords, ok := stats["total_records"].(int); ok {
|
||||||
|
fmt.Printf(" • Aircraft Records: %d\n", totalRecords)
|
||||||
|
}
|
||||||
|
if uniqueAircraft, ok := stats["unique_aircraft"].(int); ok {
|
||||||
|
fmt.Printf(" • Unique Aircraft: %d\n", uniqueAircraft)
|
||||||
|
}
|
||||||
|
if recentRecords, ok := stats["recent_records_24h"].(int); ok {
|
||||||
|
fmt.Printf(" • Last 24h: %d records\n", recentRecords)
|
||||||
|
}
|
||||||
|
|
||||||
|
oldestRecord, hasOldest := stats["oldest_record"]
|
||||||
|
newestRecord, hasNewest := stats["newest_record"]
|
||||||
|
if hasOldest && hasNewest && oldestRecord != nil && newestRecord != nil {
|
||||||
|
if oldest, ok := oldestRecord.(time.Time); ok {
|
||||||
|
if newest, ok := newestRecord.(time.Time); ok {
|
||||||
|
fmt.Printf(" • Flight Data Range: %s to %s\n",
|
||||||
|
oldest.Format("2006-01-02"),
|
||||||
|
newest.Format("2006-01-02"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show airport data sample if available
|
||||||
|
if airportCount > 0 {
|
||||||
|
var sampleAirports []string
|
||||||
|
rows, err := db.GetConnection().Query(`
|
||||||
|
SELECT name || ' (' || COALESCE(icao_code, ident) || ')'
|
||||||
|
FROM airports
|
||||||
|
WHERE icao_code IS NOT NULL AND icao_code != ''
|
||||||
|
ORDER BY name
|
||||||
|
LIMIT 3
|
||||||
|
`)
|
||||||
|
if err == nil {
|
||||||
|
for rows.Next() {
|
||||||
|
var airport string
|
||||||
|
if rows.Scan(&airport) == nil {
|
||||||
|
sampleAirports = append(sampleAirports, airport)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rows.Close()
|
||||||
|
if len(sampleAirports) > 0 {
|
||||||
|
fmt.Printf(" • Sample Airports: %s\n", strings.Join(sampleAirports, ", "))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// cmdUpdate updates data from specified sources (or safe sources by default)
|
||||||
|
func cmdUpdate(db *database.Database, sources []string, force bool) error {
|
||||||
|
availableSources := database.GetAvailableDataSources()
|
||||||
|
|
||||||
|
// If no sources specified, use safe (non-consent-required) sources
|
||||||
|
if len(sources) == 0 {
|
||||||
|
log.Println("Updating from safe data sources...")
|
||||||
|
for _, source := range availableSources {
|
||||||
|
if !source.RequiresConsent {
|
||||||
|
sources = append(sources, strings.ToLower(strings.ReplaceAll(source.Name, " ", "")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(sources) == 0 {
|
||||||
|
log.Println("No safe data sources available for automatic update")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
log.Printf("Found %d safe data sources to update", len(sources))
|
||||||
|
}
|
||||||
|
|
||||||
|
loader := database.NewDataLoader(db.GetConnection())
|
||||||
|
|
||||||
|
for _, sourceName := range sources {
|
||||||
|
// Find matching source
|
||||||
|
var matchedSource *database.DataSource
|
||||||
|
for _, available := range availableSources {
|
||||||
|
if strings.EqualFold(strings.ReplaceAll(available.Name, " ", ""), sourceName) {
|
||||||
|
matchedSource = &available
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if matchedSource == nil {
|
||||||
|
log.Printf("⚠️ Unknown source: %s", sourceName)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for consent requirement
|
||||||
|
if matchedSource.RequiresConsent && !force {
|
||||||
|
log.Printf("Skipping %s: requires license acceptance (%s)", matchedSource.Name, matchedSource.License)
|
||||||
|
log.Printf("Use --force to accept license terms, or 'skyview-data import %s' for interactive acceptance", sourceName)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set license acceptance for forced operations
|
||||||
|
if force && matchedSource.RequiresConsent {
|
||||||
|
matchedSource.UserAcceptedLicense = true
|
||||||
|
log.Printf("Accepting license terms for %s (%s)", matchedSource.Name, matchedSource.License)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("Loading %s...", matchedSource.Name)
|
||||||
|
|
||||||
|
result, err := loader.LoadDataSource(*matchedSource)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Failed to load %s: %v", matchedSource.Name, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("Loaded %s: %d records in %v", matchedSource.Name, result.RecordsNew, result.Duration)
|
||||||
|
|
||||||
|
if len(result.Errors) > 0 {
|
||||||
|
log.Printf(" %d errors occurred during import (first few):", len(result.Errors))
|
||||||
|
for i, errMsg := range result.Errors {
|
||||||
|
if i >= 3 { break }
|
||||||
|
log.Printf(" %s", errMsg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Println("Update completed successfully")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// cmdImport imports data from a specific source with interactive license acceptance
|
||||||
|
func cmdImport(db *database.Database, sourceName string, force bool) error {
|
||||||
|
availableSources := database.GetAvailableDataSources()
|
||||||
|
|
||||||
|
var matchedSource *database.DataSource
|
||||||
|
for _, available := range availableSources {
|
||||||
|
if strings.EqualFold(strings.ReplaceAll(available.Name, " ", ""), sourceName) {
|
||||||
|
matchedSource = &available
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if matchedSource == nil {
|
||||||
|
return fmt.Errorf("unknown data source: %s", sourceName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle license acceptance
|
||||||
|
if matchedSource.RequiresConsent && !force {
|
||||||
|
fmt.Printf("📄 License Information for %s\n", matchedSource.Name)
|
||||||
|
fmt.Printf(" License: %s\n", matchedSource.License)
|
||||||
|
fmt.Printf(" URL: %s\n", matchedSource.URL)
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Printf("By importing this data, you agree to comply with the %s license terms.\n", matchedSource.License)
|
||||||
|
|
||||||
|
if !askForConfirmation("Do you accept the license terms?") {
|
||||||
|
fmt.Println("Import cancelled.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
matchedSource.UserAcceptedLicense = true
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("📥 Importing %s...\n", matchedSource.Name)
|
||||||
|
|
||||||
|
loader := database.NewDataLoader(db.GetConnection())
|
||||||
|
result, err := loader.LoadDataSource(*matchedSource)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("import failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("✅ Import completed!\n")
|
||||||
|
fmt.Printf(" Records: %d loaded, %d errors\n", result.RecordsNew, result.RecordsError)
|
||||||
|
fmt.Printf(" Duration: %v\n", result.Duration)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// cmdClear removes data from a specific source
|
||||||
|
func cmdClear(db *database.Database, sourceName string, force bool) error {
|
||||||
|
if !force && !askForConfirmation(fmt.Sprintf("Clear all data from source '%s'?", sourceName)) {
|
||||||
|
fmt.Println("Operation cancelled.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
loader := database.NewDataLoader(db.GetConnection())
|
||||||
|
err := loader.ClearDataSource(sourceName)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("clear failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("✅ Cleared data from source: %s\n", sourceName)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// cmdReset clears all data and resets the database
|
||||||
|
func cmdReset(db *database.Database, force bool) error {
|
||||||
|
if !force {
|
||||||
|
fmt.Println("⚠️ This will remove ALL data from the database!")
|
||||||
|
if !askForConfirmation("Are you sure you want to reset the database?") {
|
||||||
|
fmt.Println("Reset cancelled.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This would require implementing a database reset function
|
||||||
|
fmt.Println("❌ Database reset not yet implemented")
|
||||||
|
return fmt.Errorf("reset functionality not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
// askForConfirmation asks the user for yes/no confirmation
|
||||||
|
func askForConfirmation(question string) bool {
|
||||||
|
fmt.Printf("%s (y/N): ", question)
|
||||||
|
|
||||||
|
var response string
|
||||||
|
fmt.Scanln(&response)
|
||||||
|
|
||||||
|
response = strings.ToLower(strings.TrimSpace(response))
|
||||||
|
return response == "y" || response == "yes"
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadConfig loads the shared configuration file
|
||||||
|
func loadConfig(configPath string) (*Config, error) {
|
||||||
|
data, err := os.ReadFile(configPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read config file %s: %w", configPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var config Config
|
||||||
|
if err := json.Unmarshal(data, &config); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse config file %s: %w", configPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// initDatabaseFromConfig initializes database using shared configuration
|
||||||
|
func initDatabaseFromConfig(config *Config, dbPathOverride string) (*database.Database, error) {
|
||||||
|
var dbConfig *database.Config
|
||||||
|
|
||||||
|
if config.Database != nil {
|
||||||
|
dbConfig = config.Database
|
||||||
|
} else {
|
||||||
|
dbConfig = database.DefaultConfig()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow command-line override of database path
|
||||||
|
if dbPathOverride != "" {
|
||||||
|
dbConfig.Path = dbPathOverride
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve database path if empty
|
||||||
|
if dbConfig.Path == "" {
|
||||||
|
resolvedPath, err := database.ResolveDatabasePath(dbConfig.Path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to resolve database path: %w", err)
|
||||||
|
}
|
||||||
|
dbConfig.Path = resolvedPath
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create and initialize database
|
||||||
|
db, err := database.NewDatabase(dbConfig)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := db.Initialize(); err != nil {
|
||||||
|
db.Close()
|
||||||
|
return nil, fmt.Errorf("failed to initialize database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// cmdOptimize optimizes the database for storage efficiency
|
||||||
|
func cmdOptimize(db *database.Database, force bool) error {
|
||||||
|
fmt.Println("Database Storage Optimization")
|
||||||
|
fmt.Println("============================")
|
||||||
|
|
||||||
|
// We need to get the database path from the config
|
||||||
|
// For now, let's create a simple optimization manager
|
||||||
|
config := &database.Config{
|
||||||
|
Path: "./dev-skyview.db", // Default path - this should be configurable
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create optimization manager
|
||||||
|
optimizer := database.NewOptimizationManager(db, config)
|
||||||
|
|
||||||
|
// Get current stats
|
||||||
|
fmt.Println("📊 Current Database Statistics:")
|
||||||
|
stats, err := optimizer.GetOptimizationStats()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get database stats: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf(" • Size: %.1f MB\n", float64(stats.DatabaseSize)/(1024*1024))
|
||||||
|
fmt.Printf(" • Page Size: %d bytes\n", stats.PageSize)
|
||||||
|
fmt.Printf(" • Total Pages: %d\n", stats.PageCount)
|
||||||
|
fmt.Printf(" • Used Pages: %d\n", stats.UsedPages)
|
||||||
|
fmt.Printf(" • Free Pages: %d\n", stats.FreePages)
|
||||||
|
fmt.Printf(" • Efficiency: %.1f%%\n", stats.Efficiency)
|
||||||
|
fmt.Printf(" • Auto VACUUM: %v\n", stats.AutoVacuumEnabled)
|
||||||
|
|
||||||
|
// Check if optimization is needed
|
||||||
|
needsOptimization := stats.FreePages > 0 || stats.Efficiency < 95.0
|
||||||
|
|
||||||
|
if !needsOptimization && !force {
|
||||||
|
fmt.Println("✅ Database is already well optimized!")
|
||||||
|
fmt.Println(" Use --force to run optimization anyway")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform optimizations
|
||||||
|
if force && !needsOptimization {
|
||||||
|
fmt.Println("\n🔧 Force optimization requested:")
|
||||||
|
} else {
|
||||||
|
fmt.Println("\n🔧 Applying Optimizations:")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := optimizer.VacuumDatabase(); err != nil {
|
||||||
|
return fmt.Errorf("VACUUM failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := optimizer.OptimizeDatabase(); err != nil {
|
||||||
|
return fmt.Errorf("optimization failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show final stats
|
||||||
|
fmt.Println("\n📈 Final Statistics:")
|
||||||
|
finalStats, err := optimizer.GetOptimizationStats()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get final stats: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf(" • Size: %.1f MB\n", float64(finalStats.DatabaseSize)/(1024*1024))
|
||||||
|
fmt.Printf(" • Efficiency: %.1f%%\n", finalStats.Efficiency)
|
||||||
|
fmt.Printf(" • Free Pages: %d\n", finalStats.FreePages)
|
||||||
|
|
||||||
|
if stats.DatabaseSize > finalStats.DatabaseSize {
|
||||||
|
saved := stats.DatabaseSize - finalStats.DatabaseSize
|
||||||
|
fmt.Printf(" • Space Saved: %.1f MB\n", float64(saved)/(1024*1024))
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("\n✅ Database optimization completed!")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
|
@ -44,5 +44,31 @@
|
||||||
"history_limit": 1000,
|
"history_limit": 1000,
|
||||||
"stale_timeout": 60,
|
"stale_timeout": 60,
|
||||||
"update_rate": 1
|
"update_rate": 1
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"path": "",
|
||||||
|
"max_history_days": 7,
|
||||||
|
"backup_on_upgrade": true,
|
||||||
|
"max_open_conns": 10,
|
||||||
|
"max_idle_conns": 5
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"privacy_mode": false,
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 2
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 3
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
38
debian/DEBIAN/postinst
vendored
38
debian/DEBIAN/postinst
vendored
|
|
@ -18,6 +18,21 @@ case "$1" in
|
||||||
chown skyview-adsb:skyview-adsb /var/lib/skyview-adsb /var/log/skyview-adsb >/dev/null 2>&1 || true
|
chown skyview-adsb:skyview-adsb /var/lib/skyview-adsb /var/log/skyview-adsb >/dev/null 2>&1 || true
|
||||||
chmod 755 /var/lib/skyview-adsb /var/log/skyview-adsb >/dev/null 2>&1 || true
|
chmod 755 /var/lib/skyview-adsb /var/log/skyview-adsb >/dev/null 2>&1 || true
|
||||||
|
|
||||||
|
# Create database directory for skyview user (not skyview-adsb)
|
||||||
|
mkdir -p /var/lib/skyview >/dev/null 2>&1 || true
|
||||||
|
if getent passwd skyview >/dev/null 2>&1; then
|
||||||
|
chown skyview:skyview /var/lib/skyview >/dev/null 2>&1 || true
|
||||||
|
else
|
||||||
|
# Create skyview user for database management
|
||||||
|
if ! getent group skyview >/dev/null 2>&1; then
|
||||||
|
addgroup --system --quiet skyview
|
||||||
|
fi
|
||||||
|
adduser --system --ingroup skyview --home /var/lib/skyview \
|
||||||
|
--no-create-home --disabled-password --shell /bin/false --quiet skyview
|
||||||
|
chown skyview:skyview /var/lib/skyview >/dev/null 2>&1 || true
|
||||||
|
fi
|
||||||
|
chmod 755 /var/lib/skyview >/dev/null 2>&1 || true
|
||||||
|
|
||||||
# Set permissions on config files
|
# Set permissions on config files
|
||||||
if [ -f /etc/skyview-adsb/config.json ]; then
|
if [ -f /etc/skyview-adsb/config.json ]; then
|
||||||
chown root:skyview-adsb /etc/skyview-adsb/config.json >/dev/null 2>&1 || true
|
chown root:skyview-adsb /etc/skyview-adsb/config.json >/dev/null 2>&1 || true
|
||||||
|
|
@ -25,14 +40,33 @@ case "$1" in
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
# Handle systemd service
|
# Handle systemd services
|
||||||
systemctl daemon-reload >/dev/null 2>&1 || true
|
systemctl daemon-reload >/dev/null 2>&1 || true
|
||||||
|
|
||||||
# Check if service was previously enabled
|
# Check if main service was previously enabled
|
||||||
if systemctl is-enabled skyview-adsb >/dev/null 2>&1; then
|
if systemctl is-enabled skyview-adsb >/dev/null 2>&1; then
|
||||||
# Service was enabled, restart it
|
# Service was enabled, restart it
|
||||||
systemctl restart skyview-adsb >/dev/null 2>&1 || true
|
systemctl restart skyview-adsb >/dev/null 2>&1 || true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Only restart database timer if it was already enabled
|
||||||
|
if systemctl is-enabled skyview-database-update.timer >/dev/null 2>&1; then
|
||||||
|
systemctl restart skyview-database-update.timer >/dev/null 2>&1 || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Initialize database on first install (but don't auto-enable timer)
|
||||||
|
if [ ! -f /var/lib/skyview/skyview.db ]; then
|
||||||
|
echo "Initializing SkyView database..."
|
||||||
|
sudo -u skyview /usr/bin/skyview-data update >/dev/null 2>&1 || true
|
||||||
|
echo "Database initialized with safe (public domain) data."
|
||||||
|
echo ""
|
||||||
|
echo "To enable automatic weekly updates:"
|
||||||
|
echo " systemctl enable --now skyview-database-update.timer"
|
||||||
|
echo ""
|
||||||
|
echo "To import additional data sources:"
|
||||||
|
echo " skyview-data list"
|
||||||
|
echo " skyview-data import <source>"
|
||||||
|
fi
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,9 @@ Wants=network.target
|
||||||
Type=simple
|
Type=simple
|
||||||
User=skyview-adsb
|
User=skyview-adsb
|
||||||
Group=skyview-adsb
|
Group=skyview-adsb
|
||||||
|
# Update database before starting main service
|
||||||
|
ExecStartPre=/usr/bin/skyview-data -config /etc/skyview-adsb/config.json update
|
||||||
|
TimeoutStartSec=300
|
||||||
ExecStart=/usr/bin/skyview -config /etc/skyview-adsb/config.json
|
ExecStart=/usr/bin/skyview -config /etc/skyview-adsb/config.json
|
||||||
WorkingDirectory=/var/lib/skyview-adsb
|
WorkingDirectory=/var/lib/skyview-adsb
|
||||||
StandardOutput=journal
|
StandardOutput=journal
|
||||||
|
|
|
||||||
33
debian/lib/systemd/system/skyview-database-update.service
vendored
Normal file
33
debian/lib/systemd/system/skyview-database-update.service
vendored
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
[Unit]
|
||||||
|
Description=SkyView Database Update
|
||||||
|
Documentation=man:skyview-data(1)
|
||||||
|
After=network-online.target
|
||||||
|
Wants=network-online.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=oneshot
|
||||||
|
User=skyview-adsb
|
||||||
|
Group=skyview-adsb
|
||||||
|
ExecStart=/usr/bin/skyview-data update
|
||||||
|
StandardOutput=journal
|
||||||
|
StandardError=journal
|
||||||
|
|
||||||
|
# Security hardening
|
||||||
|
NoNewPrivileges=true
|
||||||
|
PrivateTmp=true
|
||||||
|
ProtectSystem=strict
|
||||||
|
ProtectHome=true
|
||||||
|
ReadWritePaths=/var/lib/skyview-adsb /tmp
|
||||||
|
ProtectKernelTunables=true
|
||||||
|
ProtectKernelModules=true
|
||||||
|
ProtectControlGroups=true
|
||||||
|
RestrictRealtime=true
|
||||||
|
RestrictSUIDSGID=true
|
||||||
|
|
||||||
|
# Resource limits
|
||||||
|
MemoryMax=256M
|
||||||
|
TasksMax=50
|
||||||
|
TimeoutStartSec=300
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
17
debian/lib/systemd/system/skyview-database-update.timer
vendored
Normal file
17
debian/lib/systemd/system/skyview-database-update.timer
vendored
Normal file
|
|
@ -0,0 +1,17 @@
|
||||||
|
[Unit]
|
||||||
|
Description=SkyView Database Update Timer
|
||||||
|
Documentation=man:skyview-data(1)
|
||||||
|
Requires=skyview-database-update.service
|
||||||
|
|
||||||
|
[Timer]
|
||||||
|
# Run weekly on Sunday at 3 AM
|
||||||
|
OnCalendar=Sun 03:00
|
||||||
|
# Randomize start time within 1 hour to avoid thundering herd
|
||||||
|
RandomizedDelaySec=3600
|
||||||
|
# Start immediately if system was down during scheduled time
|
||||||
|
Persistent=true
|
||||||
|
# Don't start if system is on battery (laptops)
|
||||||
|
ConditionACPower=true
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=timers.target
|
||||||
708
debian/usr/share/doc/skyview-adsb/CONFIGURATION.md
vendored
Normal file
708
debian/usr/share/doc/skyview-adsb/CONFIGURATION.md
vendored
Normal file
|
|
@ -0,0 +1,708 @@
|
||||||
|
# SkyView Configuration Guide
|
||||||
|
|
||||||
|
This document provides comprehensive configuration options for SkyView, including server settings, data sources, database management, and external aviation data integration.
|
||||||
|
|
||||||
|
## Configuration File Format
|
||||||
|
|
||||||
|
SkyView uses JSON configuration files. The default locations are:
|
||||||
|
- **System service**: `/etc/skyview-adsb/config.json`
|
||||||
|
- **User mode**: `~/.config/skyview/config.json`
|
||||||
|
- **Current directory**: `./config.json`
|
||||||
|
- **Custom path**: Specify with `-config path/to/config.json`
|
||||||
|
|
||||||
|
## Complete Configuration Example
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "",
|
||||||
|
"port": 8080
|
||||||
|
},
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "primary",
|
||||||
|
"name": "Primary Site",
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 30005,
|
||||||
|
"format": "beast",
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"altitude": 50.0,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"origin": {
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"name": "Control Tower"
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 1000,
|
||||||
|
"stale_timeout": 60,
|
||||||
|
"update_rate": 1
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"path": "",
|
||||||
|
"max_history_days": 7,
|
||||||
|
"backup_on_upgrade": true,
|
||||||
|
"max_open_conns": 10,
|
||||||
|
"max_idle_conns": 5
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"privacy_mode": false,
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 2
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration Sections
|
||||||
|
|
||||||
|
### Server Configuration
|
||||||
|
|
||||||
|
Controls the web server and API endpoints.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "",
|
||||||
|
"port": 8080
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
- **`host`** (string): Interface to bind to
|
||||||
|
- `""` or `"0.0.0.0"` = All interfaces (default)
|
||||||
|
- `"127.0.0.1"` = Localhost only (IPv4)
|
||||||
|
- `"::1"` = Localhost only (IPv6)
|
||||||
|
- `"192.168.1.100"` = Specific interface
|
||||||
|
|
||||||
|
- **`port`** (integer): TCP port for web interface
|
||||||
|
- Default: `8080`
|
||||||
|
- Valid range: `1-65535`
|
||||||
|
- Ports below 1024 require root privileges
|
||||||
|
|
||||||
|
### ADS-B Data Sources
|
||||||
|
|
||||||
|
Configures connections to dump1090/readsb receivers.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "primary",
|
||||||
|
"name": "Primary Site",
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 30005,
|
||||||
|
"format": "beast",
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"altitude": 50.0,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Source Options
|
||||||
|
|
||||||
|
- **`id`** (string, required): Unique identifier for this source
|
||||||
|
- **`name`** (string, required): Human-readable name displayed in UI
|
||||||
|
- **`host`** (string, required): Hostname or IP address of receiver
|
||||||
|
- **`port`** (integer, required): TCP port of receiver
|
||||||
|
- **`format`** (string, optional): Data format
|
||||||
|
- `"beast"` = Beast binary format (port 30005, default)
|
||||||
|
- `"vrs"` = VRS JSON format (port 33005)
|
||||||
|
- **`latitude`** (number, required): Receiver latitude in decimal degrees
|
||||||
|
- **`longitude`** (number, required): Receiver longitude in decimal degrees
|
||||||
|
- **`altitude`** (number, optional): Receiver altitude in meters above sea level
|
||||||
|
- **`enabled`** (boolean): Enable/disable this source (default: `true`)
|
||||||
|
|
||||||
|
#### Multiple Sources Example
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "site1",
|
||||||
|
"name": "North Site",
|
||||||
|
"host": "192.168.1.100",
|
||||||
|
"port": 30005,
|
||||||
|
"latitude": 51.50,
|
||||||
|
"longitude": -0.46,
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "site2",
|
||||||
|
"name": "South Site (VRS)",
|
||||||
|
"host": "192.168.1.101",
|
||||||
|
"port": 33005,
|
||||||
|
"format": "vrs",
|
||||||
|
"latitude": 51.44,
|
||||||
|
"longitude": -0.46,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Map Origin Configuration
|
||||||
|
|
||||||
|
Sets the default map center and reference point for the web interface. This is **different** from the ADS-B receiver locations defined in `sources`.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"origin": {
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"name": "Control Tower"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Purpose and Usage
|
||||||
|
|
||||||
|
The **`origin`** defines where the map centers when users first load the web interface:
|
||||||
|
|
||||||
|
- **Map Center**: Initial view focus when loading the web interface
|
||||||
|
- **Reference Point**: Visual "home" location for navigation
|
||||||
|
- **User Experience**: Where operators expect to see coverage area
|
||||||
|
- **Reset Target**: Where "Reset to Origin" button returns the map view
|
||||||
|
|
||||||
|
This is **separate from** the `sources` coordinates, which define:
|
||||||
|
- Physical ADS-B receiver locations for signal processing
|
||||||
|
- Multi-source data fusion calculations
|
||||||
|
- Coverage area computation
|
||||||
|
- Signal strength weighting
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
- **`latitude`** (number, required): Center latitude in decimal degrees
|
||||||
|
- **`longitude`** (number, required): Center longitude in decimal degrees
|
||||||
|
- **`name`** (string, optional): Display name for the origin point (shown in UI)
|
||||||
|
|
||||||
|
#### Example: Multi-Site Configuration
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"origin": {
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"name": "London Control Center"
|
||||||
|
},
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "north",
|
||||||
|
"name": "North Site",
|
||||||
|
"latitude": 51.5200,
|
||||||
|
"longitude": -0.4600
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "south",
|
||||||
|
"name": "South Site",
|
||||||
|
"latitude": 51.4200,
|
||||||
|
"longitude": -0.4600
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
In this configuration:
|
||||||
|
- **Map centers** on the Control Center for optimal viewing
|
||||||
|
- **Two receivers** located north and south provide coverage
|
||||||
|
- **Users see** the control area as the focal point
|
||||||
|
- **System uses** both receiver locations for signal processing
|
||||||
|
|
||||||
|
#### Single-Site Simplification
|
||||||
|
|
||||||
|
For single receiver deployments, origin and source coordinates are typically the same:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"origin": {
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"name": "Primary Site"
|
||||||
|
},
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "primary",
|
||||||
|
"name": "Primary Receiver",
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### General Settings
|
||||||
|
|
||||||
|
Global application behavior settings.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 1000,
|
||||||
|
"stale_timeout": 60,
|
||||||
|
"update_rate": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
- **`history_limit`** (integer): Maximum aircraft position history points per aircraft
|
||||||
|
- Default: `1000`
|
||||||
|
- Higher values = longer trails, more memory usage
|
||||||
|
- `0` = No history limit
|
||||||
|
|
||||||
|
- **`stale_timeout`** (integer): Seconds before aircraft is considered stale/inactive
|
||||||
|
- Default: `60` seconds
|
||||||
|
- Range: `10-600` seconds
|
||||||
|
|
||||||
|
- **`update_rate`** (integer): WebSocket update rate in seconds
|
||||||
|
- Default: `1` second
|
||||||
|
- Range: `1-10` seconds
|
||||||
|
- Lower values = more responsive, higher bandwidth
|
||||||
|
|
||||||
|
### Database Configuration
|
||||||
|
|
||||||
|
Controls SQLite database storage and performance.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"database": {
|
||||||
|
"path": "",
|
||||||
|
"max_history_days": 7,
|
||||||
|
"backup_on_upgrade": true,
|
||||||
|
"max_open_conns": 10,
|
||||||
|
"max_idle_conns": 5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
- **`path`** (string): Database file path
|
||||||
|
- `""` = Auto-resolve to system appropriate location
|
||||||
|
- Custom path: `"/var/lib/skyview-adsb/skyview.db"`
|
||||||
|
- Relative path: `"./data/skyview.db"`
|
||||||
|
|
||||||
|
- **`max_history_days`** (integer): Aircraft history retention in days
|
||||||
|
- Default: `7` days
|
||||||
|
- `0` = Keep all history (unlimited)
|
||||||
|
- Range: `1-365` days
|
||||||
|
|
||||||
|
- **`backup_on_upgrade`** (boolean): Create backup before schema upgrades
|
||||||
|
- Default: `true`
|
||||||
|
- Recommended to keep enabled for safety
|
||||||
|
|
||||||
|
- **`max_open_conns`** (integer): Maximum concurrent database connections
|
||||||
|
- Default: `10`
|
||||||
|
- Range: `1-100`
|
||||||
|
|
||||||
|
- **`max_idle_conns`** (integer): Maximum idle database connections in pool
|
||||||
|
- Default: `5`
|
||||||
|
- Range: `1-50`
|
||||||
|
- Should be ≤ max_open_conns
|
||||||
|
|
||||||
|
## External Aviation Data Sources
|
||||||
|
|
||||||
|
SkyView can enhance aircraft data using external aviation databases. This section configures callsign enhancement and airline/airport lookup functionality.
|
||||||
|
|
||||||
|
### Callsign Enhancement Configuration
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"privacy_mode": false,
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 2
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Main Callsign Options
|
||||||
|
|
||||||
|
- **`enabled`** (boolean): Enable callsign enhancement features
|
||||||
|
- Default: `true`
|
||||||
|
- Set to `false` to disable all callsign lookups
|
||||||
|
|
||||||
|
- **`cache_hours`** (integer): Hours to cache lookup results
|
||||||
|
- Default: `24` hours
|
||||||
|
- Range: `1-168` hours (1 hour to 1 week)
|
||||||
|
|
||||||
|
- **`privacy_mode`** (boolean): Disable all external data requests
|
||||||
|
- Default: `false`
|
||||||
|
- `true` = Local-only operation (no network requests)
|
||||||
|
- `false` = Allow external data loading
|
||||||
|
|
||||||
|
### Available External Data Sources
|
||||||
|
|
||||||
|
SkyView supports three external aviation data sources:
|
||||||
|
|
||||||
|
#### 1. OpenFlights Airlines Database
|
||||||
|
|
||||||
|
- **Content**: Global airline information with ICAO/IATA codes, callsigns, and country data
|
||||||
|
- **Records**: ~6,162 airlines worldwide
|
||||||
|
- **License**: AGPL-3.0 (runtime consumption allowed)
|
||||||
|
- **Source**: https://openflights.org/data.html
|
||||||
|
- **Updates**: Downloads latest data automatically
|
||||||
|
|
||||||
|
```json
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2. OpenFlights Airports Database
|
||||||
|
|
||||||
|
- **Content**: Global airport data with coordinates, codes, and basic metadata
|
||||||
|
- **Records**: ~7,698 airports worldwide
|
||||||
|
- **License**: AGPL-3.0 (runtime consumption allowed)
|
||||||
|
- **Source**: https://openflights.org/data.html
|
||||||
|
- **Updates**: Downloads latest data automatically
|
||||||
|
|
||||||
|
```json
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 2
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 3. OurAirports Database
|
||||||
|
|
||||||
|
- **Content**: Comprehensive airport database with detailed geographic and operational metadata
|
||||||
|
- **Records**: ~83,557 airports worldwide (includes small airfields)
|
||||||
|
- **License**: Public Domain (CC0)
|
||||||
|
- **Source**: https://ourairports.com/data/
|
||||||
|
- **Updates**: Downloads latest data automatically
|
||||||
|
|
||||||
|
```json
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 3
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Source Configuration Options
|
||||||
|
|
||||||
|
- **`enabled`** (boolean): Enable/disable this specific data source
|
||||||
|
- **`priority`** (integer): Processing priority (lower number = higher priority)
|
||||||
|
|
||||||
|
**Note**: License information and consent requirements are handled automatically by the system. All currently available data sources are safe for automatic loading without explicit consent.
|
||||||
|
|
||||||
|
### Data Loading Performance
|
||||||
|
|
||||||
|
When all sources are enabled, expect the following performance:
|
||||||
|
|
||||||
|
- **OpenFlights Airlines**: 6,162 records in ~350ms
|
||||||
|
- **OpenFlights Airports**: 7,698 records in ~640ms
|
||||||
|
- **OurAirports**: 83,557 records in ~2.2s
|
||||||
|
- **Total**: 97,417 records in ~3.2s
|
||||||
|
|
||||||
|
## Privacy and Security Settings
|
||||||
|
|
||||||
|
### Privacy Mode
|
||||||
|
|
||||||
|
Enable privacy mode to disable all external data requests:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"privacy_mode": true,
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Privacy Mode Features
|
||||||
|
|
||||||
|
- **No External Requests**: Completely disables all external data loading
|
||||||
|
- **Local-Only Operation**: Uses only embedded data and local cache
|
||||||
|
- **Air-Gapped Compatible**: Suitable for isolated networks
|
||||||
|
- **Compliance**: Meets strict privacy requirements
|
||||||
|
|
||||||
|
### Selective Source Control
|
||||||
|
|
||||||
|
Enable only specific data sources:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Database Management Commands
|
||||||
|
|
||||||
|
### Updating External Data Sources
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Update all enabled external data sources
|
||||||
|
skyview-data -config /path/to/config.json update
|
||||||
|
|
||||||
|
# List available data sources
|
||||||
|
skyview-data -config /path/to/config.json list
|
||||||
|
|
||||||
|
# Check database status and loaded sources
|
||||||
|
skyview-data -config /path/to/config.json status
|
||||||
|
```
|
||||||
|
|
||||||
|
### Database Optimization
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Optimize database storage efficiency
|
||||||
|
skyview-data -config /path/to/config.json optimize
|
||||||
|
|
||||||
|
# Check optimization statistics only
|
||||||
|
skyview-data -config /path/to/config.json optimize --stats-only
|
||||||
|
|
||||||
|
# Force optimization without prompts
|
||||||
|
skyview-data -config /path/to/config.json optimize --force
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration Validation
|
||||||
|
|
||||||
|
### Validating Configuration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Test configuration file syntax
|
||||||
|
skyview -config config.json -test
|
||||||
|
|
||||||
|
# Verify data source connectivity
|
||||||
|
skyview-data -config config.json list
|
||||||
|
```
|
||||||
|
|
||||||
|
### Common Configuration Errors
|
||||||
|
|
||||||
|
#### JSON Syntax Errors
|
||||||
|
```
|
||||||
|
Error: invalid character '}' looking for beginning of object key string
|
||||||
|
```
|
||||||
|
**Solution**: Check for trailing commas, missing quotes, or bracket mismatches.
|
||||||
|
|
||||||
|
#### Invalid Data Types
|
||||||
|
```
|
||||||
|
Error: json: cannot unmarshal string into Go struct field
|
||||||
|
```
|
||||||
|
**Solution**: Ensure numbers are not quoted, booleans use true/false, etc.
|
||||||
|
|
||||||
|
#### Missing Required Fields
|
||||||
|
```
|
||||||
|
Error: source 'primary' missing required field: latitude
|
||||||
|
```
|
||||||
|
**Solution**: Add all required fields for each configured source.
|
||||||
|
|
||||||
|
### Minimal Configuration
|
||||||
|
|
||||||
|
For basic operation without external data sources:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "",
|
||||||
|
"port": 8080
|
||||||
|
},
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "primary",
|
||||||
|
"name": "Local Receiver",
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 30005,
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 1000,
|
||||||
|
"stale_timeout": 60,
|
||||||
|
"update_rate": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Full-Featured Configuration
|
||||||
|
|
||||||
|
For complete functionality with all external data sources:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "",
|
||||||
|
"port": 8080
|
||||||
|
},
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "primary",
|
||||||
|
"name": "Primary Site",
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 30005,
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"altitude": 50.0,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"origin": {
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"name": "Control Tower"
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 4000,
|
||||||
|
"stale_timeout": 60,
|
||||||
|
"update_rate": 1
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"path": "",
|
||||||
|
"max_history_days": 7,
|
||||||
|
"backup_on_upgrade": true,
|
||||||
|
"max_open_conns": 10,
|
||||||
|
"max_idle_conns": 5
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"privacy_mode": false,
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 2
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This configuration enables all SkyView features including multi-source ADS-B data fusion, comprehensive aviation database integration, and database optimization.
|
||||||
|
|
||||||
|
## Environment-Specific Examples
|
||||||
|
|
||||||
|
### Production System Service
|
||||||
|
|
||||||
|
Configuration for systemd service deployment:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "0.0.0.0",
|
||||||
|
"port": 8080
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"path": "/var/lib/skyview-adsb/skyview.db",
|
||||||
|
"max_history_days": 30,
|
||||||
|
"backup_on_upgrade": true
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"privacy_mode": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Development/Testing
|
||||||
|
|
||||||
|
Configuration for development use:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"port": 3000
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"path": "./dev-skyview.db",
|
||||||
|
"max_history_days": 1
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 100,
|
||||||
|
"update_rate": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Air-Gapped/Secure Environment
|
||||||
|
|
||||||
|
Configuration for isolated networks:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"privacy_mode": true,
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
729
debian/usr/share/doc/skyview-adsb/DATABASE.md
vendored
Normal file
729
debian/usr/share/doc/skyview-adsb/DATABASE.md
vendored
Normal file
|
|
@ -0,0 +1,729 @@
|
||||||
|
# SkyView Database Architecture
|
||||||
|
|
||||||
|
This document describes SkyView's SQLite database architecture, migration system, and integration approach for persistent data storage.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
SkyView uses a single SQLite database to store:
|
||||||
|
- **Historic aircraft data**: Position history, message counts, signal strength
|
||||||
|
- **Callsign lookup data**: Cached airline/airport information from external APIs
|
||||||
|
- **Embedded aviation data**: OpenFlights airline and airport databases
|
||||||
|
|
||||||
|
## Database Design Principles
|
||||||
|
|
||||||
|
### Embedded Architecture
|
||||||
|
- Single SQLite file for all persistent data
|
||||||
|
- No external database dependencies
|
||||||
|
- Self-contained deployment with embedded schemas
|
||||||
|
- Backward compatibility through versioned migrations
|
||||||
|
|
||||||
|
### Performance Optimization
|
||||||
|
- Strategic indexing for time-series aircraft data
|
||||||
|
- Efficient lookups for callsign enhancement
|
||||||
|
- Configurable data retention policies
|
||||||
|
- Query optimization for real-time operations
|
||||||
|
|
||||||
|
### Data Safety
|
||||||
|
- Atomic migration transactions
|
||||||
|
- Pre-migration backups for destructive changes
|
||||||
|
- Data loss warnings for schema changes
|
||||||
|
- Rollback capabilities where possible
|
||||||
|
|
||||||
|
## Database Schema
|
||||||
|
|
||||||
|
### Core Tables
|
||||||
|
|
||||||
|
#### `schema_info`
|
||||||
|
Tracks database version and applied migrations:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE schema_info (
|
||||||
|
version INTEGER PRIMARY KEY,
|
||||||
|
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
description TEXT,
|
||||||
|
checksum TEXT
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `aircraft_history`
|
||||||
|
Stores time-series aircraft position and message data:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE aircraft_history (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
icao TEXT NOT NULL,
|
||||||
|
timestamp TIMESTAMP NOT NULL,
|
||||||
|
latitude REAL,
|
||||||
|
longitude REAL,
|
||||||
|
altitude INTEGER,
|
||||||
|
speed INTEGER,
|
||||||
|
track INTEGER,
|
||||||
|
vertical_rate INTEGER,
|
||||||
|
squawk TEXT,
|
||||||
|
callsign TEXT,
|
||||||
|
source_id TEXT NOT NULL,
|
||||||
|
signal_strength REAL
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Indexes:**
|
||||||
|
- `idx_aircraft_history_icao_time`: Fast queries by aircraft and time range
|
||||||
|
- `idx_aircraft_history_timestamp`: Time-based cleanup and queries
|
||||||
|
- `idx_aircraft_history_callsign`: Callsign-based searches
|
||||||
|
|
||||||
|
#### `airlines`
|
||||||
|
Multi-source airline database with unified schema:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE airlines (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
alias TEXT,
|
||||||
|
iata_code TEXT,
|
||||||
|
icao_code TEXT,
|
||||||
|
callsign TEXT,
|
||||||
|
country TEXT,
|
||||||
|
country_code TEXT,
|
||||||
|
active BOOLEAN DEFAULT 1,
|
||||||
|
data_source TEXT NOT NULL DEFAULT 'unknown',
|
||||||
|
source_id TEXT,
|
||||||
|
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Indexes:**
|
||||||
|
- `idx_airlines_icao_code`: ICAO code lookup (primary for callsign enhancement)
|
||||||
|
- `idx_airlines_iata_code`: IATA code lookup
|
||||||
|
- `idx_airlines_callsign`: Radio callsign lookup
|
||||||
|
- `idx_airlines_country_code`: Country-based filtering
|
||||||
|
- `idx_airlines_active`: Active airlines filtering
|
||||||
|
- `idx_airlines_source`: Data source tracking
|
||||||
|
|
||||||
|
#### `airports`
|
||||||
|
Multi-source airport database with comprehensive metadata:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE airports (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
ident TEXT,
|
||||||
|
type TEXT,
|
||||||
|
city TEXT,
|
||||||
|
municipality TEXT,
|
||||||
|
region TEXT,
|
||||||
|
country TEXT,
|
||||||
|
country_code TEXT,
|
||||||
|
continent TEXT,
|
||||||
|
iata_code TEXT,
|
||||||
|
icao_code TEXT,
|
||||||
|
local_code TEXT,
|
||||||
|
gps_code TEXT,
|
||||||
|
latitude REAL,
|
||||||
|
longitude REAL,
|
||||||
|
elevation_ft INTEGER,
|
||||||
|
scheduled_service BOOLEAN DEFAULT 0,
|
||||||
|
home_link TEXT,
|
||||||
|
wikipedia_link TEXT,
|
||||||
|
keywords TEXT,
|
||||||
|
timezone_offset REAL,
|
||||||
|
timezone TEXT,
|
||||||
|
dst_type TEXT,
|
||||||
|
data_source TEXT NOT NULL DEFAULT 'unknown',
|
||||||
|
source_id TEXT,
|
||||||
|
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Indexes:**
|
||||||
|
- `idx_airports_icao_code`: ICAO code lookup
|
||||||
|
- `idx_airports_iata_code`: IATA code lookup
|
||||||
|
- `idx_airports_ident`: Airport identifier lookup
|
||||||
|
- `idx_airports_country_code`: Country-based filtering
|
||||||
|
- `idx_airports_type`: Airport type filtering
|
||||||
|
- `idx_airports_coords`: Geographic coordinate queries
|
||||||
|
- `idx_airports_source`: Data source tracking
|
||||||
|
|
||||||
|
#### `callsign_cache`
|
||||||
|
Caches external API lookups and local enrichment for callsign enhancement:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE callsign_cache (
|
||||||
|
callsign TEXT PRIMARY KEY,
|
||||||
|
airline_icao TEXT,
|
||||||
|
airline_iata TEXT,
|
||||||
|
airline_name TEXT,
|
||||||
|
airline_country TEXT,
|
||||||
|
flight_number TEXT,
|
||||||
|
origin_iata TEXT, -- Departure airport IATA code
|
||||||
|
destination_iata TEXT, -- Arrival airport IATA code
|
||||||
|
aircraft_type TEXT,
|
||||||
|
route TEXT, -- Full route description
|
||||||
|
status TEXT, -- Flight status (scheduled, delayed, etc.)
|
||||||
|
source TEXT NOT NULL DEFAULT 'local',
|
||||||
|
cached_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
expires_at TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Route Information Fields:**
|
||||||
|
- **`origin_iata`**: IATA code of departure airport (e.g., "JFK" for New York JFK)
|
||||||
|
- **`destination_iata`**: IATA code of arrival airport (e.g., "LAX" for Los Angeles)
|
||||||
|
- **`route`**: Human-readable route description (e.g., "JFK-LAX" or "New York to Los Angeles")
|
||||||
|
- **`status`**: Current flight status when available from external APIs
|
||||||
|
|
||||||
|
These fields enable enhanced flight tracking with origin-destination pairs and route visualization.
|
||||||
|
|
||||||
|
**Indexes:**
|
||||||
|
- `idx_callsign_cache_expires`: Efficient cache cleanup
|
||||||
|
- `idx_callsign_cache_airline`: Airline-based queries
|
||||||
|
|
||||||
|
#### `data_sources`
|
||||||
|
Tracks loaded external data sources and their metadata:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE data_sources (
|
||||||
|
name TEXT PRIMARY KEY,
|
||||||
|
license TEXT NOT NULL,
|
||||||
|
url TEXT,
|
||||||
|
version TEXT,
|
||||||
|
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
record_count INTEGER DEFAULT 0,
|
||||||
|
user_accepted_license BOOLEAN DEFAULT 0
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
## Database Location Strategy
|
||||||
|
|
||||||
|
### Path Resolution Order
|
||||||
|
1. **Explicit configuration**: `database.path` in config file
|
||||||
|
2. **System service**: `/var/lib/skyview/skyview.db`
|
||||||
|
3. **User mode**: `~/.local/share/skyview/skyview.db`
|
||||||
|
4. **Fallback**: `./skyview.db` in current directory
|
||||||
|
|
||||||
|
### Directory Permissions
|
||||||
|
- System: `root:root` with `755` permissions for `/var/lib/skyview/`
|
||||||
|
- User: User-owned directories with standard permissions
|
||||||
|
- Service: `skyview:skyview` user/group for system service
|
||||||
|
|
||||||
|
## Migration System
|
||||||
|
|
||||||
|
### Migration Structure
|
||||||
|
```go
|
||||||
|
type Migration struct {
|
||||||
|
Version int // Sequential version number
|
||||||
|
Description string // Human-readable description
|
||||||
|
Up string // SQL for applying migration
|
||||||
|
Down string // SQL for rollback (optional)
|
||||||
|
DataLoss bool // Warning flag for destructive changes
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migration Process
|
||||||
|
1. **Version Check**: Compare current schema version with available migrations
|
||||||
|
2. **Backup**: Create automatic backup before destructive changes
|
||||||
|
3. **Transaction**: Wrap each migration in atomic transaction
|
||||||
|
4. **Validation**: Verify schema integrity after migration
|
||||||
|
5. **Logging**: Record successful migrations in `schema_info`
|
||||||
|
|
||||||
|
### Data Loss Protection
|
||||||
|
- Migrations marked with `DataLoss: true` require explicit user consent
|
||||||
|
- Automatic backups created before destructive operations
|
||||||
|
- Warning messages displayed during upgrade process
|
||||||
|
- Rollback SQL provided where possible
|
||||||
|
|
||||||
|
### Example Migration Sequence
|
||||||
|
```go
|
||||||
|
var migrations = []Migration{
|
||||||
|
{
|
||||||
|
Version: 1,
|
||||||
|
Description: "Initial schema with aircraft history",
|
||||||
|
Up: createInitialSchema,
|
||||||
|
DataLoss: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Version: 2,
|
||||||
|
Description: "Add OpenFlights airline and airport data",
|
||||||
|
Up: addAviationTables,
|
||||||
|
DataLoss: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Version: 3,
|
||||||
|
Description: "Add callsign lookup cache",
|
||||||
|
Up: addCallsignCache,
|
||||||
|
DataLoss: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Data Sources and Loading
|
||||||
|
|
||||||
|
SkyView supports multiple aviation data sources with automatic conflict resolution and license compliance.
|
||||||
|
|
||||||
|
### Supported Data Sources
|
||||||
|
|
||||||
|
#### OpenFlights Airlines Database
|
||||||
|
- **Source**: https://openflights.org/data.html
|
||||||
|
- **License**: Open Database License (ODbL) 1.0
|
||||||
|
- **Content**: Global airline data with ICAO/IATA codes, callsigns, and country information
|
||||||
|
- **Records**: ~6,162 airlines
|
||||||
|
- **Update Method**: Runtime download (no license confirmation required)
|
||||||
|
|
||||||
|
#### OpenFlights Airports Database
|
||||||
|
- **Source**: https://openflights.org/data.html
|
||||||
|
- **License**: Open Database License (ODbL) 1.0
|
||||||
|
- **Content**: Global airport data with coordinates, codes, and metadata
|
||||||
|
- **Records**: ~7,698 airports
|
||||||
|
- **Update Method**: Runtime download
|
||||||
|
|
||||||
|
#### OurAirports Database
|
||||||
|
- **Source**: https://ourairports.com/data/
|
||||||
|
- **License**: Creative Commons Zero (CC0) 1.0
|
||||||
|
- **Content**: Comprehensive airport database with detailed metadata
|
||||||
|
- **Records**: ~83,557 airports
|
||||||
|
- **Update Method**: Runtime download
|
||||||
|
|
||||||
|
### Data Loading System
|
||||||
|
|
||||||
|
#### Intelligent Conflict Resolution
|
||||||
|
The data loading system uses **INSERT OR REPLACE** upserts to handle overlapping data:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
INSERT OR REPLACE INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
```
|
||||||
|
|
||||||
|
This ensures that:
|
||||||
|
- Duplicate records are automatically updated rather than causing errors
|
||||||
|
- Later data sources can override earlier ones
|
||||||
|
- Database integrity is maintained during bulk loads
|
||||||
|
|
||||||
|
#### Loading Process
|
||||||
|
1. **Source Validation**: Verify data source accessibility and format
|
||||||
|
2. **Incremental Processing**: Process data in chunks to manage memory
|
||||||
|
3. **Error Handling**: Log and continue on individual record errors
|
||||||
|
4. **Statistics Reporting**: Track records processed, added, and errors
|
||||||
|
5. **Source Tracking**: Record metadata about each loaded source
|
||||||
|
|
||||||
|
#### Performance Characteristics
|
||||||
|
- **OpenFlights Airlines**: ~6,162 records in ~363ms
|
||||||
|
- **OpenFlights Airports**: ~7,698 records in ~200ms
|
||||||
|
- **OurAirports**: ~83,557 records in ~980ms
|
||||||
|
- **Error Rate**: <0.1% under normal conditions
|
||||||
|
|
||||||
|
## Configuration Integration
|
||||||
|
|
||||||
|
### Database Configuration
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"database": {
|
||||||
|
"path": "/var/lib/skyview-adsb/skyview.db",
|
||||||
|
"max_history_days": 7,
|
||||||
|
"backup_on_upgrade": true,
|
||||||
|
"vacuum_interval": "24h",
|
||||||
|
"page_size": 4096
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"external_apis": true,
|
||||||
|
"privacy_mode": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration Fields
|
||||||
|
|
||||||
|
#### `database`
|
||||||
|
- **`path`**: Database file location (empty = auto-resolve)
|
||||||
|
- **`max_history_days`**: Retention policy for aircraft history (0 = unlimited)
|
||||||
|
- **`backup_on_upgrade`**: Create backup before schema migrations
|
||||||
|
|
||||||
|
#### `callsign`
|
||||||
|
- **`enabled`**: Enable callsign enhancement features
|
||||||
|
- **`cache_hours`**: TTL for cached external API results
|
||||||
|
- **`privacy_mode`**: Disable all external data requests
|
||||||
|
- **`sources`**: Independent control for each data source
|
||||||
|
|
||||||
|
### Enhanced Configuration Example
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"privacy_mode": false,
|
||||||
|
"sources": {
|
||||||
|
"openflights_embedded": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1,
|
||||||
|
"license": "AGPL-3.0"
|
||||||
|
},
|
||||||
|
"faa_registry": {
|
||||||
|
"enabled": false,
|
||||||
|
"priority": 2,
|
||||||
|
"update_frequency": "weekly",
|
||||||
|
"license": "public_domain"
|
||||||
|
},
|
||||||
|
"opensky_api": {
|
||||||
|
"enabled": false,
|
||||||
|
"priority": 3,
|
||||||
|
"timeout_seconds": 5,
|
||||||
|
"max_retries": 2,
|
||||||
|
"requires_consent": true,
|
||||||
|
"license_warning": "Commercial use requires OpenSky Network consent",
|
||||||
|
"user_accepts_terms": false
|
||||||
|
},
|
||||||
|
"custom_database": {
|
||||||
|
"enabled": false,
|
||||||
|
"priority": 4,
|
||||||
|
"path": "",
|
||||||
|
"license": "user_verified"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fallback_chain": ["openflights_embedded", "faa_registry", "opensky_api", "custom_database"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Individual Source Configuration Options
|
||||||
|
- **`enabled`**: Enable/disable this specific source
|
||||||
|
- **`priority`**: Processing order (lower numbers = higher priority)
|
||||||
|
- **`license`**: License type for compliance tracking
|
||||||
|
- **`requires_consent`**: Whether source requires explicit user consent
|
||||||
|
- **`user_accepts_terms`**: User acknowledgment of licensing terms
|
||||||
|
- **`timeout_seconds`**: Per-source timeout configuration
|
||||||
|
- **`max_retries`**: Per-source retry limits
|
||||||
|
- **`update_frequency`**: For downloadable sources (daily/weekly/monthly)
|
||||||
|
|
||||||
|
## Debian Package Integration
|
||||||
|
|
||||||
|
### Package Structure
|
||||||
|
```
|
||||||
|
/var/lib/skyview/ # Database directory
|
||||||
|
/etc/skyview/config.json # Default configuration
|
||||||
|
/usr/bin/skyview # Main application
|
||||||
|
/usr/share/skyview/ # Embedded resources
|
||||||
|
```
|
||||||
|
|
||||||
|
### Installation Process
|
||||||
|
1. **`postinst`**: Create directories, user accounts, permissions
|
||||||
|
2. **First Run**: Database initialization and migration on startup
|
||||||
|
3. **Upgrades**: Automatic schema migration with backup
|
||||||
|
4. **Service**: Systemd integration with proper database access
|
||||||
|
|
||||||
|
### Service User
|
||||||
|
- User: `skyview-adsb`
|
||||||
|
- Home: `/var/lib/skyview-adsb`
|
||||||
|
- Shell: `/bin/false` (service account)
|
||||||
|
- Database: Read/write access to `/var/lib/skyview-adsb/`
|
||||||
|
|
||||||
|
### Automatic Database Updates
|
||||||
|
The systemd service configuration includes automatic database updates on startup:
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=skyview-adsb
|
||||||
|
Group=skyview-adsb
|
||||||
|
# Update database before starting main service
|
||||||
|
ExecStartPre=/usr/bin/skyview-data -config /etc/skyview-adsb/config.json update
|
||||||
|
TimeoutStartSec=300
|
||||||
|
ExecStart=/usr/bin/skyview -config /etc/skyview-adsb/config.json
|
||||||
|
```
|
||||||
|
|
||||||
|
This ensures aviation data sources are refreshed before each service start, complementing the weekly timer-based updates.
|
||||||
|
|
||||||
|
## Data Retention and Cleanup
|
||||||
|
|
||||||
|
### Automatic Cleanup
|
||||||
|
- **Aircraft History**: Configurable retention period (`max_history_days`)
|
||||||
|
- **Cache Expiration**: TTL-based cleanup of external API cache
|
||||||
|
- **Optimization**: Periodic VACUUM operations for storage efficiency
|
||||||
|
|
||||||
|
### Manual Maintenance
|
||||||
|
```sql
|
||||||
|
-- Clean old aircraft history (example: 7 days)
|
||||||
|
DELETE FROM aircraft_history
|
||||||
|
WHERE timestamp < datetime('now', '-7 days');
|
||||||
|
|
||||||
|
-- Clean expired cache entries
|
||||||
|
DELETE FROM callsign_cache
|
||||||
|
WHERE expires_at < datetime('now');
|
||||||
|
|
||||||
|
-- Optimize database storage
|
||||||
|
VACUUM;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Database Optimization
|
||||||
|
|
||||||
|
SkyView includes a comprehensive database optimization system that automatically manages storage efficiency and performance.
|
||||||
|
|
||||||
|
### Optimization Features
|
||||||
|
|
||||||
|
#### Automatic VACUUM Operations
|
||||||
|
- **Full VACUUM**: Rebuilds database to reclaim deleted space
|
||||||
|
- **Incremental VACUUM**: Gradual space reclamation with minimal performance impact
|
||||||
|
- **Scheduled Maintenance**: Configurable intervals for automatic optimization
|
||||||
|
- **Size Reporting**: Before/after statistics with space savings metrics
|
||||||
|
|
||||||
|
#### Storage Optimization
|
||||||
|
- **Page Size Optimization**: Configurable SQLite page size for optimal performance
|
||||||
|
- **Auto-Vacuum Configuration**: Enables incremental space reclamation
|
||||||
|
- **Statistics Updates**: ANALYZE operations for query plan optimization
|
||||||
|
- **Efficiency Monitoring**: Real-time storage efficiency reporting
|
||||||
|
|
||||||
|
### Using the Optimization System
|
||||||
|
|
||||||
|
#### Command Line Interface
|
||||||
|
```bash
|
||||||
|
# Run comprehensive database optimization
|
||||||
|
skyview-data optimize
|
||||||
|
|
||||||
|
# Run with force flag to skip confirmation prompts
|
||||||
|
skyview-data optimize --force
|
||||||
|
|
||||||
|
# Check current optimization statistics
|
||||||
|
skyview-data optimize --stats-only
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Optimization Output Example
|
||||||
|
```
|
||||||
|
Optimizing database for storage efficiency...
|
||||||
|
✓ Auto VACUUM: Enable incremental auto-vacuum
|
||||||
|
✓ Incremental VACUUM: Reclaim free pages incrementally
|
||||||
|
✓ Optimize: Update SQLite query planner statistics
|
||||||
|
✓ Analyze: Update table statistics for better query plans
|
||||||
|
|
||||||
|
VACUUM completed in 1.2s: 275.3 MB → 263.1 MB (saved 12.2 MB, 4.4%)
|
||||||
|
|
||||||
|
Database optimization completed successfully.
|
||||||
|
Storage efficiency: 96.8% (263.1 MB used of 272.4 MB allocated)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Configuration Options
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"database": {
|
||||||
|
"vacuum_interval": "24h",
|
||||||
|
"page_size": 4096,
|
||||||
|
"enable_compression": true,
|
||||||
|
"compression_level": 6
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Optimization Statistics
|
||||||
|
|
||||||
|
The optimization system provides detailed metrics about database performance:
|
||||||
|
|
||||||
|
#### Available Statistics
|
||||||
|
- **Database Size**: Total file size in bytes
|
||||||
|
- **Page Statistics**: Page size, count, and utilization
|
||||||
|
- **Storage Efficiency**: Percentage of allocated space actually used
|
||||||
|
- **Free Space**: Amount of reclaimable space available
|
||||||
|
- **Auto-Vacuum Status**: Current auto-vacuum configuration
|
||||||
|
- **Last Optimization**: Timestamp of most recent optimization
|
||||||
|
|
||||||
|
#### Programmatic Access
|
||||||
|
```go
|
||||||
|
// Get current optimization statistics
|
||||||
|
optimizer := NewOptimizationManager(db, config)
|
||||||
|
stats, err := optimizer.GetOptimizationStats()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Failed to get stats:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Database efficiency: %.1f%%\n", stats.Efficiency)
|
||||||
|
fmt.Printf("Storage used: %.1f MB\n", float64(stats.DatabaseSize)/(1024*1024))
|
||||||
|
```
|
||||||
|
|
||||||
|
## Performance Considerations
|
||||||
|
|
||||||
|
### Query Optimization
|
||||||
|
- Time-range queries use `idx_aircraft_history_icao_time`
|
||||||
|
- Callsign lookups prioritize local cache over external APIs
|
||||||
|
- Bulk operations use transactions for consistency
|
||||||
|
|
||||||
|
### Storage Efficiency
|
||||||
|
- Configurable history limits prevent unbounded growth
|
||||||
|
- Automatic VACUUM operations with optimization reporting
|
||||||
|
- Compressed timestamps and efficient data types
|
||||||
|
- Page size optimization for storage efficiency
|
||||||
|
- Auto-vacuum configuration for incremental space reclamation
|
||||||
|
|
||||||
|
### Memory Usage
|
||||||
|
- WAL mode for concurrent read/write access
|
||||||
|
- Connection pooling for multiple goroutines
|
||||||
|
- Prepared statements for repeated queries
|
||||||
|
|
||||||
|
## Privacy and Security
|
||||||
|
|
||||||
|
### Privacy Mode
|
||||||
|
SkyView includes comprehensive privacy controls through the `privacy_mode` configuration option:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"privacy_mode": true,
|
||||||
|
"external_apis": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Privacy Mode Features
|
||||||
|
- **No External Calls**: Completely disables all external API requests
|
||||||
|
- **Local-Only Lookups**: Uses only embedded OpenFlights database for callsign enhancement
|
||||||
|
- **No Data Transmission**: Aircraft data never leaves the local system
|
||||||
|
- **Compliance**: Suitable for sensitive environments requiring air-gapped operation
|
||||||
|
|
||||||
|
#### Privacy Mode Behavior
|
||||||
|
| Feature | Privacy Mode ON | Privacy Mode OFF |
|
||||||
|
|---------|----------------|------------------|
|
||||||
|
| External API calls | ❌ Disabled | ✅ Configurable |
|
||||||
|
| OpenFlights lookup | ✅ Enabled | ✅ Enabled |
|
||||||
|
| Callsign caching | ✅ Local only | ✅ Full caching |
|
||||||
|
| Data transmission | ❌ None | ⚠️ API calls only |
|
||||||
|
|
||||||
|
#### Use Cases for Privacy Mode
|
||||||
|
- **Military installations**: No external data transmission allowed
|
||||||
|
- **Air-gapped networks**: No internet connectivity available
|
||||||
|
- **Corporate policies**: External API usage prohibited
|
||||||
|
- **Personal privacy**: User preference for local-only operation
|
||||||
|
|
||||||
|
### Security Considerations
|
||||||
|
|
||||||
|
#### File Permissions
|
||||||
|
- Database files readable only by skyview user/group
|
||||||
|
- Configuration files protected from unauthorized access
|
||||||
|
- Backup files inherit secure permissions
|
||||||
|
|
||||||
|
#### Data Protection
|
||||||
|
- Local SQLite database with file-system level security
|
||||||
|
- No cloud storage or external database dependencies
|
||||||
|
- All aviation data processed and stored locally
|
||||||
|
|
||||||
|
#### Network Security
|
||||||
|
- External API calls (when enabled) use HTTPS only
|
||||||
|
- No persistent connections to external services
|
||||||
|
- Optional certificate validation for API endpoints
|
||||||
|
|
||||||
|
### Data Integrity
|
||||||
|
- Foreign key constraints where applicable
|
||||||
|
- Transaction isolation for concurrent operations
|
||||||
|
- Checksums for migration verification
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
#### Database Locked
|
||||||
|
```
|
||||||
|
Error: database is locked
|
||||||
|
```
|
||||||
|
**Solution**: Stop SkyView service, check for stale lock files, restart
|
||||||
|
|
||||||
|
#### Migration Failures
|
||||||
|
```
|
||||||
|
Error: migration 3 failed: table already exists
|
||||||
|
```
|
||||||
|
**Solution**: Check schema version, restore from backup, retry migration
|
||||||
|
|
||||||
|
#### Permission Denied
|
||||||
|
```
|
||||||
|
Error: unable to open database file
|
||||||
|
```
|
||||||
|
**Solution**: Verify file permissions, check directory ownership, ensure disk space
|
||||||
|
|
||||||
|
### Diagnostic Commands
|
||||||
|
```bash
|
||||||
|
# Check database integrity
|
||||||
|
sqlite3 /var/lib/skyview/skyview.db "PRAGMA integrity_check;"
|
||||||
|
|
||||||
|
# View schema version
|
||||||
|
sqlite3 /var/lib/skyview/skyview.db "SELECT * FROM schema_info;"
|
||||||
|
|
||||||
|
# Database statistics
|
||||||
|
sqlite3 /var/lib/skyview/skyview.db ".dbinfo"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing and Quality Assurance
|
||||||
|
|
||||||
|
SkyView includes comprehensive test coverage for all database functionality to ensure reliability and data integrity.
|
||||||
|
|
||||||
|
### Test Coverage Areas
|
||||||
|
|
||||||
|
#### Core Database Functionality
|
||||||
|
- **Database Creation and Initialization**: Connection management, configuration handling
|
||||||
|
- **Migration System**: Schema versioning, upgrade/downgrade operations
|
||||||
|
- **Connection Pooling**: Concurrent access, connection lifecycle management
|
||||||
|
- **SQLite Pragma Settings**: WAL mode, foreign keys, performance optimizations
|
||||||
|
|
||||||
|
#### Data Loading and Management
|
||||||
|
- **Multi-Source Loading**: OpenFlights, OurAirports data integration
|
||||||
|
- **Conflict Resolution**: Upsert operations, duplicate handling
|
||||||
|
- **Error Handling**: Network failures, malformed data recovery
|
||||||
|
- **Performance Validation**: Loading speed, memory usage optimization
|
||||||
|
|
||||||
|
#### Callsign Enhancement System
|
||||||
|
- **Parsing Logic**: Callsign validation, airline code extraction
|
||||||
|
- **Database Integration**: Local lookups, caching operations
|
||||||
|
- **Search Functionality**: Airline filtering, country-based queries
|
||||||
|
- **Cache Management**: TTL handling, cleanup operations
|
||||||
|
|
||||||
|
#### Optimization System
|
||||||
|
- **VACUUM Operations**: Space reclamation, performance monitoring
|
||||||
|
- **Page Size Optimization**: Configuration validation, storage efficiency
|
||||||
|
- **Statistics Generation**: Metrics accuracy, reporting consistency
|
||||||
|
- **Maintenance Scheduling**: Automated optimization, interval management
|
||||||
|
|
||||||
|
### Test Infrastructure
|
||||||
|
|
||||||
|
#### Automated Test Setup
|
||||||
|
```go
|
||||||
|
// setupTestDatabase creates isolated test environment
|
||||||
|
func setupTestDatabase(t *testing.T) (*Database, func()) {
|
||||||
|
tempFile, _ := os.CreateTemp("", "test_skyview_*.db")
|
||||||
|
config := &Config{Path: tempFile.Name()}
|
||||||
|
db, _ := NewDatabase(config)
|
||||||
|
db.Initialize() // Run all migrations
|
||||||
|
|
||||||
|
cleanup := func() {
|
||||||
|
db.Close()
|
||||||
|
os.Remove(tempFile.Name())
|
||||||
|
}
|
||||||
|
return db, cleanup
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Network-Safe Testing
|
||||||
|
Tests gracefully handle network connectivity issues:
|
||||||
|
- Skip tests requiring external data sources when offline
|
||||||
|
- Provide meaningful error messages for connectivity failures
|
||||||
|
- Use local test data when external sources are unavailable
|
||||||
|
|
||||||
|
### Running Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all database tests
|
||||||
|
go test -v ./internal/database/...
|
||||||
|
|
||||||
|
# Run tests in short mode (skip long-running network tests)
|
||||||
|
go test -v -short ./internal/database/...
|
||||||
|
|
||||||
|
# Run specific test categories
|
||||||
|
go test -v -run="TestDatabase" ./internal/database/...
|
||||||
|
go test -v -run="TestOptimization" ./internal/database/...
|
||||||
|
go test -v -run="TestCallsign" ./internal/database/...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
### Planned Features
|
||||||
|
- **Compression**: Time-series compression for long-term storage
|
||||||
|
- **Partitioning**: Date-based partitioning for large datasets
|
||||||
|
- **Replication**: Read replica support for high-availability setups
|
||||||
|
- **Analytics**: Built-in reporting and statistics tables
|
||||||
|
- **Enhanced Route Data**: Integration with additional flight tracking APIs
|
||||||
|
- **Geographic Indexing**: Spatial queries for airport proximity searches
|
||||||
|
|
||||||
|
### Migration Path
|
||||||
|
- All enhancements will use versioned migrations
|
||||||
|
- Backward compatibility maintained for existing installations
|
||||||
|
- Data preservation prioritized over schema optimization
|
||||||
|
- Comprehensive testing required for all schema changes
|
||||||
181
debian/usr/share/man/man1/skyview-data.1
vendored
Normal file
181
debian/usr/share/man/man1/skyview-data.1
vendored
Normal file
|
|
@ -0,0 +1,181 @@
|
||||||
|
.TH skyview-data 1 "January 2025" "SkyView Database Manager"
|
||||||
|
.SH NAME
|
||||||
|
skyview-data \- SkyView aviation database management utility
|
||||||
|
|
||||||
|
.SH SYNOPSIS
|
||||||
|
.B skyview-data
|
||||||
|
[\fIOPTIONS\fR] \fICOMMAND\fR [\fIARGS\fR...]
|
||||||
|
|
||||||
|
.SH DESCRIPTION
|
||||||
|
.B skyview-data
|
||||||
|
manages the SkyView aviation database, allowing users to import airline and airport data from various external sources while maintaining license compliance.
|
||||||
|
|
||||||
|
The tool automatically creates and migrates the database schema, downloads data from public and licensed sources, and provides status monitoring for the aviation database used by SkyView for callsign enhancement.
|
||||||
|
|
||||||
|
.SH OPTIONS
|
||||||
|
.TP
|
||||||
|
.BR \-db " \fIPATH\fR"
|
||||||
|
Database file path (auto-detected if empty)
|
||||||
|
.TP
|
||||||
|
.BR \-v ", " \-\-verbose
|
||||||
|
Verbose output
|
||||||
|
.TP
|
||||||
|
.BR \-\-force
|
||||||
|
Force operation without prompts
|
||||||
|
.TP
|
||||||
|
.BR \-\-version
|
||||||
|
Show version information
|
||||||
|
|
||||||
|
.SH COMMANDS
|
||||||
|
.TP
|
||||||
|
.B init
|
||||||
|
Initialize empty database with schema
|
||||||
|
.TP
|
||||||
|
.B list
|
||||||
|
List available data sources with license information
|
||||||
|
.TP
|
||||||
|
.B status
|
||||||
|
Show current database status and statistics
|
||||||
|
.TP
|
||||||
|
.B update [\fISOURCE\fR...]
|
||||||
|
Update data from specified sources, or all safe sources if none specified
|
||||||
|
.TP
|
||||||
|
.B import \fISOURCE\fR
|
||||||
|
Import data from a specific source with license acceptance
|
||||||
|
.TP
|
||||||
|
.B clear \fISOURCE\fR
|
||||||
|
Remove all data from the specified source
|
||||||
|
.TP
|
||||||
|
.B reset
|
||||||
|
Clear all data and reset database (destructive)
|
||||||
|
|
||||||
|
.SH DATA SOURCES
|
||||||
|
.SS Safe Sources (Public Domain)
|
||||||
|
These sources are automatically included in
|
||||||
|
.B update
|
||||||
|
operations:
|
||||||
|
.TP
|
||||||
|
.B ourairports
|
||||||
|
Public domain airport database from OurAirports.com
|
||||||
|
.TP
|
||||||
|
.B faa-registry
|
||||||
|
US aircraft registration database (FAA, public domain)
|
||||||
|
|
||||||
|
.SS License-Required Sources
|
||||||
|
These sources require explicit license acceptance:
|
||||||
|
.TP
|
||||||
|
.B openflights
|
||||||
|
Comprehensive airline and airport database (AGPL-3.0 license)
|
||||||
|
|
||||||
|
.SH EXAMPLES
|
||||||
|
.TP
|
||||||
|
Initialize database and import safe data:
|
||||||
|
.EX
|
||||||
|
skyview-data init
|
||||||
|
skyview-data update
|
||||||
|
.EE
|
||||||
|
.TP
|
||||||
|
Import OpenFlights data with license acceptance:
|
||||||
|
.EX
|
||||||
|
skyview-data import openflights
|
||||||
|
.EE
|
||||||
|
.TP
|
||||||
|
Check database status:
|
||||||
|
.EX
|
||||||
|
skyview-data status
|
||||||
|
.EE
|
||||||
|
.TP
|
||||||
|
Set up automatic updates via systemd timer:
|
||||||
|
.EX
|
||||||
|
systemctl enable skyview-database-update.timer
|
||||||
|
systemctl start skyview-database-update.timer
|
||||||
|
.EE
|
||||||
|
|
||||||
|
.SH CRON AUTOMATION
|
||||||
|
For automated updates,
|
||||||
|
.B skyview-data update
|
||||||
|
is designed to work seamlessly with cron:
|
||||||
|
|
||||||
|
.EX
|
||||||
|
# Update weekly on Sunday at 3 AM
|
||||||
|
0 3 * * 0 /usr/bin/skyview-data update
|
||||||
|
.EE
|
||||||
|
|
||||||
|
The command automatically:
|
||||||
|
.RS
|
||||||
|
.IP \(bu 2
|
||||||
|
Creates the database if it doesn't exist
|
||||||
|
.IP \(bu 2
|
||||||
|
Updates only safe (public domain) sources
|
||||||
|
.IP \(bu 2
|
||||||
|
Provides proper exit codes for monitoring
|
||||||
|
.IP \(bu 2
|
||||||
|
Logs to standard output with timestamps
|
||||||
|
.RE
|
||||||
|
|
||||||
|
.SH SYSTEMD INTEGRATION
|
||||||
|
The Debian package includes systemd timer integration:
|
||||||
|
|
||||||
|
.EX
|
||||||
|
# Enable automatic weekly updates
|
||||||
|
systemctl enable skyview-database-update.timer
|
||||||
|
systemctl start skyview-database-update.timer
|
||||||
|
|
||||||
|
# Check timer status
|
||||||
|
systemctl status skyview-database-update.timer
|
||||||
|
|
||||||
|
# View update logs
|
||||||
|
journalctl -u skyview-database-update.service
|
||||||
|
.EE
|
||||||
|
|
||||||
|
.SH FILES
|
||||||
|
.TP
|
||||||
|
.I /var/lib/skyview/skyview.db
|
||||||
|
System-wide database location
|
||||||
|
.TP
|
||||||
|
.I ~/.local/share/skyview/skyview.db
|
||||||
|
User-specific database location
|
||||||
|
.TP
|
||||||
|
.I /var/log/skyview/
|
||||||
|
Log directory for database operations
|
||||||
|
|
||||||
|
.SH EXIT STATUS
|
||||||
|
.TP
|
||||||
|
.B 0
|
||||||
|
Success
|
||||||
|
.TP
|
||||||
|
.B 1
|
||||||
|
General error or command failure
|
||||||
|
.TP
|
||||||
|
.B 2
|
||||||
|
Invalid arguments or usage
|
||||||
|
|
||||||
|
.SH SECURITY
|
||||||
|
All external data downloads use HTTPS. No sensitive information is transmitted. The tool processes only publicly available aviation data.
|
||||||
|
|
||||||
|
License-required sources require explicit user acceptance before import.
|
||||||
|
|
||||||
|
.SH LICENSE COMPLIANCE
|
||||||
|
.B skyview-data
|
||||||
|
maintains strict license separation:
|
||||||
|
.RS
|
||||||
|
.IP \(bu 2
|
||||||
|
SkyView binary contains no external data (MIT license maintained)
|
||||||
|
.IP \(bu 2
|
||||||
|
Each data source tracks its license and user acceptance
|
||||||
|
.IP \(bu 2
|
||||||
|
Users choose which sources to import based on license compatibility
|
||||||
|
.IP \(bu 2
|
||||||
|
Automatic updates only include public domain sources
|
||||||
|
.RE
|
||||||
|
|
||||||
|
.SH SEE ALSO
|
||||||
|
.BR skyview (1),
|
||||||
|
.BR systemctl (1),
|
||||||
|
.BR crontab (5)
|
||||||
|
|
||||||
|
.SH AUTHOR
|
||||||
|
SkyView is developed as an open-source ADS-B aircraft tracking system.
|
||||||
|
|
||||||
|
.SH REPORTING BUGS
|
||||||
|
Report bugs and issues at the project repository.
|
||||||
|
|
@ -107,11 +107,35 @@ SkyView is a high-performance, multi-source ADS-B aircraft tracking system built
|
||||||
- Aircraft state management and lifecycle tracking
|
- Aircraft state management and lifecycle tracking
|
||||||
- Historical data collection (position, altitude, speed, signal trails)
|
- Historical data collection (position, altitude, speed, signal trails)
|
||||||
- Automatic stale aircraft cleanup
|
- Automatic stale aircraft cleanup
|
||||||
|
- SQLite database integration for persistent storage
|
||||||
|
|
||||||
**Files**:
|
**Files**:
|
||||||
- `merger.go`: Multi-source data fusion engine
|
- `merger.go`: Multi-source data fusion engine
|
||||||
|
|
||||||
### 4. ICAO Country Database (`internal/icao/`)
|
### 4. Database System (`internal/database/`)
|
||||||
|
|
||||||
|
**Purpose**: Provides persistent storage for historical aircraft data and callsign enhancement
|
||||||
|
|
||||||
|
**Key Features**:
|
||||||
|
- SQLite-based storage with versioned schema migrations
|
||||||
|
- Aircraft position history with configurable retention
|
||||||
|
- Embedded OpenFlights airline and airport databases
|
||||||
|
- Callsign lookup cache for external API results
|
||||||
|
- Privacy mode for air-gapped operation
|
||||||
|
- Automatic database maintenance and cleanup
|
||||||
|
|
||||||
|
**Files**:
|
||||||
|
- `database.go`: Core database operations and schema management
|
||||||
|
- `migrations.go`: Database schema versioning and migration system
|
||||||
|
- `callsign.go`: Callsign enhancement and cache management
|
||||||
|
|
||||||
|
**Storage Components**:
|
||||||
|
- **Aircraft History**: Time-series position data with source attribution
|
||||||
|
- **OpenFlights Data**: Embedded airline/airport reference data
|
||||||
|
- **Callsign Cache**: External API lookup results with TTL
|
||||||
|
- **Schema Versioning**: Migration tracking and rollback support
|
||||||
|
|
||||||
|
### 5. ICAO Country Database (`internal/icao/`)
|
||||||
|
|
||||||
**Purpose**: Provides comprehensive ICAO address to country mapping
|
**Purpose**: Provides comprehensive ICAO address to country mapping
|
||||||
|
|
||||||
|
|
@ -125,7 +149,7 @@ SkyView is a high-performance, multi-source ADS-B aircraft tracking system built
|
||||||
**Files**:
|
**Files**:
|
||||||
- `database.go`: In-memory ICAO allocation database with binary search
|
- `database.go`: In-memory ICAO allocation database with binary search
|
||||||
|
|
||||||
### 5. HTTP/WebSocket Server (`internal/server/`)
|
### 6. HTTP/WebSocket Server (`internal/server/`)
|
||||||
|
|
||||||
**Purpose**: Serves web interface and provides low-latency data streaming
|
**Purpose**: Serves web interface and provides low-latency data streaming
|
||||||
|
|
||||||
|
|
@ -138,7 +162,7 @@ SkyView is a high-performance, multi-source ADS-B aircraft tracking system built
|
||||||
**Files**:
|
**Files**:
|
||||||
- `server.go`: HTTP server and WebSocket handler
|
- `server.go`: HTTP server and WebSocket handler
|
||||||
|
|
||||||
### 6. Web Frontend (`assets/static/`)
|
### 7. Web Frontend (`assets/static/`)
|
||||||
|
|
||||||
**Purpose**: Interactive web interface for aircraft tracking and visualization
|
**Purpose**: Interactive web interface for aircraft tracking and visualization
|
||||||
|
|
||||||
|
|
@ -220,6 +244,17 @@ SkyView is a high-performance, multi-source ADS-B aircraft tracking system built
|
||||||
"latitude": 51.4700, // Map center point
|
"latitude": 51.4700, // Map center point
|
||||||
"longitude": -0.4600,
|
"longitude": -0.4600,
|
||||||
"name": "Origin Name"
|
"name": "Origin Name"
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"path": "", // Auto-resolved: /var/lib/skyview/skyview.db
|
||||||
|
"max_history_days": 7, // Data retention period
|
||||||
|
"backup_on_upgrade": true // Backup before migrations
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true, // Enable callsign enhancement
|
||||||
|
"cache_hours": 24, // External API cache TTL
|
||||||
|
"external_apis": true, // Allow external API calls
|
||||||
|
"privacy_mode": false // Disable external data transmission
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
@ -233,7 +268,8 @@ SkyView is a high-performance, multi-source ADS-B aircraft tracking system built
|
||||||
- **Non-blocking I/O**: Asynchronous network operations
|
- **Non-blocking I/O**: Asynchronous network operations
|
||||||
|
|
||||||
### Memory Management
|
### Memory Management
|
||||||
- **Bounded History**: Configurable limits on historical data storage
|
- **Database Storage**: Persistent history reduces memory usage
|
||||||
|
- **Configurable Retention**: Database cleanup based on age and limits
|
||||||
- **Automatic Cleanup**: Stale aircraft removal to prevent memory leaks
|
- **Automatic Cleanup**: Stale aircraft removal to prevent memory leaks
|
||||||
- **Efficient Data Structures**: Maps for O(1) aircraft lookups
|
- **Efficient Data Structures**: Maps for O(1) aircraft lookups
|
||||||
- **Embedded Assets**: Static files bundled in binary
|
- **Embedded Assets**: Static files bundled in binary
|
||||||
|
|
@ -260,7 +296,8 @@ SkyView is a high-performance, multi-source ADS-B aircraft tracking system built
|
||||||
### Data Privacy
|
### Data Privacy
|
||||||
- **Public ADS-B Data**: Only processes publicly broadcast aircraft data
|
- **Public ADS-B Data**: Only processes publicly broadcast aircraft data
|
||||||
- **No Personal Information**: Aircraft tracking only, no passenger data
|
- **No Personal Information**: Aircraft tracking only, no passenger data
|
||||||
- **Local Processing**: No data transmitted to external services
|
- **Privacy Mode**: Complete offline operation with external API disable
|
||||||
|
- **Local Processing**: All data processed and stored locally
|
||||||
- **Historical Limits**: Configurable data retention periods
|
- **Historical Limits**: Configurable data retention periods
|
||||||
|
|
||||||
## External Resources
|
## External Resources
|
||||||
|
|
|
||||||
708
docs/CONFIGURATION.md
Normal file
708
docs/CONFIGURATION.md
Normal file
|
|
@ -0,0 +1,708 @@
|
||||||
|
# SkyView Configuration Guide
|
||||||
|
|
||||||
|
This document provides comprehensive configuration options for SkyView, including server settings, data sources, database management, and external aviation data integration.
|
||||||
|
|
||||||
|
## Configuration File Format
|
||||||
|
|
||||||
|
SkyView uses JSON configuration files. The default locations are:
|
||||||
|
- **System service**: `/etc/skyview-adsb/config.json`
|
||||||
|
- **User mode**: `~/.config/skyview/config.json`
|
||||||
|
- **Current directory**: `./config.json`
|
||||||
|
- **Custom path**: Specify with `-config path/to/config.json`
|
||||||
|
|
||||||
|
## Complete Configuration Example
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "",
|
||||||
|
"port": 8080
|
||||||
|
},
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "primary",
|
||||||
|
"name": "Primary Site",
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 30005,
|
||||||
|
"format": "beast",
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"altitude": 50.0,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"origin": {
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"name": "Control Tower"
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 1000,
|
||||||
|
"stale_timeout": 60,
|
||||||
|
"update_rate": 1
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"path": "",
|
||||||
|
"max_history_days": 7,
|
||||||
|
"backup_on_upgrade": true,
|
||||||
|
"max_open_conns": 10,
|
||||||
|
"max_idle_conns": 5
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"privacy_mode": false,
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 2
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration Sections
|
||||||
|
|
||||||
|
### Server Configuration
|
||||||
|
|
||||||
|
Controls the web server and API endpoints.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "",
|
||||||
|
"port": 8080
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
- **`host`** (string): Interface to bind to
|
||||||
|
- `""` or `"0.0.0.0"` = All interfaces (default)
|
||||||
|
- `"127.0.0.1"` = Localhost only (IPv4)
|
||||||
|
- `"::1"` = Localhost only (IPv6)
|
||||||
|
- `"192.168.1.100"` = Specific interface
|
||||||
|
|
||||||
|
- **`port`** (integer): TCP port for web interface
|
||||||
|
- Default: `8080`
|
||||||
|
- Valid range: `1-65535`
|
||||||
|
- Ports below 1024 require root privileges
|
||||||
|
|
||||||
|
### ADS-B Data Sources
|
||||||
|
|
||||||
|
Configures connections to dump1090/readsb receivers.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "primary",
|
||||||
|
"name": "Primary Site",
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 30005,
|
||||||
|
"format": "beast",
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"altitude": 50.0,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Source Options
|
||||||
|
|
||||||
|
- **`id`** (string, required): Unique identifier for this source
|
||||||
|
- **`name`** (string, required): Human-readable name displayed in UI
|
||||||
|
- **`host`** (string, required): Hostname or IP address of receiver
|
||||||
|
- **`port`** (integer, required): TCP port of receiver
|
||||||
|
- **`format`** (string, optional): Data format
|
||||||
|
- `"beast"` = Beast binary format (port 30005, default)
|
||||||
|
- `"vrs"` = VRS JSON format (port 33005)
|
||||||
|
- **`latitude`** (number, required): Receiver latitude in decimal degrees
|
||||||
|
- **`longitude`** (number, required): Receiver longitude in decimal degrees
|
||||||
|
- **`altitude`** (number, optional): Receiver altitude in meters above sea level
|
||||||
|
- **`enabled`** (boolean): Enable/disable this source (default: `true`)
|
||||||
|
|
||||||
|
#### Multiple Sources Example
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "site1",
|
||||||
|
"name": "North Site",
|
||||||
|
"host": "192.168.1.100",
|
||||||
|
"port": 30005,
|
||||||
|
"latitude": 51.50,
|
||||||
|
"longitude": -0.46,
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "site2",
|
||||||
|
"name": "South Site (VRS)",
|
||||||
|
"host": "192.168.1.101",
|
||||||
|
"port": 33005,
|
||||||
|
"format": "vrs",
|
||||||
|
"latitude": 51.44,
|
||||||
|
"longitude": -0.46,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Map Origin Configuration
|
||||||
|
|
||||||
|
Sets the default map center and reference point for the web interface. This is **different** from the ADS-B receiver locations defined in `sources`.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"origin": {
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"name": "Control Tower"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Purpose and Usage
|
||||||
|
|
||||||
|
The **`origin`** defines where the map centers when users first load the web interface:
|
||||||
|
|
||||||
|
- **Map Center**: Initial view focus when loading the web interface
|
||||||
|
- **Reference Point**: Visual "home" location for navigation
|
||||||
|
- **User Experience**: Where operators expect to see coverage area
|
||||||
|
- **Reset Target**: Where "Reset to Origin" button returns the map view
|
||||||
|
|
||||||
|
This is **separate from** the `sources` coordinates, which define:
|
||||||
|
- Physical ADS-B receiver locations for signal processing
|
||||||
|
- Multi-source data fusion calculations
|
||||||
|
- Coverage area computation
|
||||||
|
- Signal strength weighting
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
- **`latitude`** (number, required): Center latitude in decimal degrees
|
||||||
|
- **`longitude`** (number, required): Center longitude in decimal degrees
|
||||||
|
- **`name`** (string, optional): Display name for the origin point (shown in UI)
|
||||||
|
|
||||||
|
#### Example: Multi-Site Configuration
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"origin": {
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"name": "London Control Center"
|
||||||
|
},
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "north",
|
||||||
|
"name": "North Site",
|
||||||
|
"latitude": 51.5200,
|
||||||
|
"longitude": -0.4600
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "south",
|
||||||
|
"name": "South Site",
|
||||||
|
"latitude": 51.4200,
|
||||||
|
"longitude": -0.4600
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
In this configuration:
|
||||||
|
- **Map centers** on the Control Center for optimal viewing
|
||||||
|
- **Two receivers** located north and south provide coverage
|
||||||
|
- **Users see** the control area as the focal point
|
||||||
|
- **System uses** both receiver locations for signal processing
|
||||||
|
|
||||||
|
#### Single-Site Simplification
|
||||||
|
|
||||||
|
For single receiver deployments, origin and source coordinates are typically the same:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"origin": {
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"name": "Primary Site"
|
||||||
|
},
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "primary",
|
||||||
|
"name": "Primary Receiver",
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### General Settings
|
||||||
|
|
||||||
|
Global application behavior settings.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 1000,
|
||||||
|
"stale_timeout": 60,
|
||||||
|
"update_rate": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
- **`history_limit`** (integer): Maximum aircraft position history points per aircraft
|
||||||
|
- Default: `1000`
|
||||||
|
- Higher values = longer trails, more memory usage
|
||||||
|
- `0` = No history limit
|
||||||
|
|
||||||
|
- **`stale_timeout`** (integer): Seconds before aircraft is considered stale/inactive
|
||||||
|
- Default: `60` seconds
|
||||||
|
- Range: `10-600` seconds
|
||||||
|
|
||||||
|
- **`update_rate`** (integer): WebSocket update rate in seconds
|
||||||
|
- Default: `1` second
|
||||||
|
- Range: `1-10` seconds
|
||||||
|
- Lower values = more responsive, higher bandwidth
|
||||||
|
|
||||||
|
### Database Configuration
|
||||||
|
|
||||||
|
Controls SQLite database storage and performance.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"database": {
|
||||||
|
"path": "",
|
||||||
|
"max_history_days": 7,
|
||||||
|
"backup_on_upgrade": true,
|
||||||
|
"max_open_conns": 10,
|
||||||
|
"max_idle_conns": 5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
- **`path`** (string): Database file path
|
||||||
|
- `""` = Auto-resolve to system appropriate location
|
||||||
|
- Custom path: `"/var/lib/skyview-adsb/skyview.db"`
|
||||||
|
- Relative path: `"./data/skyview.db"`
|
||||||
|
|
||||||
|
- **`max_history_days`** (integer): Aircraft history retention in days
|
||||||
|
- Default: `7` days
|
||||||
|
- `0` = Keep all history (unlimited)
|
||||||
|
- Range: `1-365` days
|
||||||
|
|
||||||
|
- **`backup_on_upgrade`** (boolean): Create backup before schema upgrades
|
||||||
|
- Default: `true`
|
||||||
|
- Recommended to keep enabled for safety
|
||||||
|
|
||||||
|
- **`max_open_conns`** (integer): Maximum concurrent database connections
|
||||||
|
- Default: `10`
|
||||||
|
- Range: `1-100`
|
||||||
|
|
||||||
|
- **`max_idle_conns`** (integer): Maximum idle database connections in pool
|
||||||
|
- Default: `5`
|
||||||
|
- Range: `1-50`
|
||||||
|
- Should be ≤ max_open_conns
|
||||||
|
|
||||||
|
## External Aviation Data Sources
|
||||||
|
|
||||||
|
SkyView can enhance aircraft data using external aviation databases. This section configures callsign enhancement and airline/airport lookup functionality.
|
||||||
|
|
||||||
|
### Callsign Enhancement Configuration
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"privacy_mode": false,
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 2
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Main Callsign Options
|
||||||
|
|
||||||
|
- **`enabled`** (boolean): Enable callsign enhancement features
|
||||||
|
- Default: `true`
|
||||||
|
- Set to `false` to disable all callsign lookups
|
||||||
|
|
||||||
|
- **`cache_hours`** (integer): Hours to cache lookup results
|
||||||
|
- Default: `24` hours
|
||||||
|
- Range: `1-168` hours (1 hour to 1 week)
|
||||||
|
|
||||||
|
- **`privacy_mode`** (boolean): Disable all external data requests
|
||||||
|
- Default: `false`
|
||||||
|
- `true` = Local-only operation (no network requests)
|
||||||
|
- `false` = Allow external data loading
|
||||||
|
|
||||||
|
### Available External Data Sources
|
||||||
|
|
||||||
|
SkyView supports three external aviation data sources:
|
||||||
|
|
||||||
|
#### 1. OpenFlights Airlines Database
|
||||||
|
|
||||||
|
- **Content**: Global airline information with ICAO/IATA codes, callsigns, and country data
|
||||||
|
- **Records**: ~6,162 airlines worldwide
|
||||||
|
- **License**: AGPL-3.0 (runtime consumption allowed)
|
||||||
|
- **Source**: https://openflights.org/data.html
|
||||||
|
- **Updates**: Downloads latest data automatically
|
||||||
|
|
||||||
|
```json
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2. OpenFlights Airports Database
|
||||||
|
|
||||||
|
- **Content**: Global airport data with coordinates, codes, and basic metadata
|
||||||
|
- **Records**: ~7,698 airports worldwide
|
||||||
|
- **License**: AGPL-3.0 (runtime consumption allowed)
|
||||||
|
- **Source**: https://openflights.org/data.html
|
||||||
|
- **Updates**: Downloads latest data automatically
|
||||||
|
|
||||||
|
```json
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 2
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 3. OurAirports Database
|
||||||
|
|
||||||
|
- **Content**: Comprehensive airport database with detailed geographic and operational metadata
|
||||||
|
- **Records**: ~83,557 airports worldwide (includes small airfields)
|
||||||
|
- **License**: Public Domain (CC0)
|
||||||
|
- **Source**: https://ourairports.com/data/
|
||||||
|
- **Updates**: Downloads latest data automatically
|
||||||
|
|
||||||
|
```json
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 3
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Source Configuration Options
|
||||||
|
|
||||||
|
- **`enabled`** (boolean): Enable/disable this specific data source
|
||||||
|
- **`priority`** (integer): Processing priority (lower number = higher priority)
|
||||||
|
|
||||||
|
**Note**: License information and consent requirements are handled automatically by the system. All currently available data sources are safe for automatic loading without explicit consent.
|
||||||
|
|
||||||
|
### Data Loading Performance
|
||||||
|
|
||||||
|
When all sources are enabled, expect the following performance:
|
||||||
|
|
||||||
|
- **OpenFlights Airlines**: 6,162 records in ~350ms
|
||||||
|
- **OpenFlights Airports**: 7,698 records in ~640ms
|
||||||
|
- **OurAirports**: 83,557 records in ~2.2s
|
||||||
|
- **Total**: 97,417 records in ~3.2s
|
||||||
|
|
||||||
|
## Privacy and Security Settings
|
||||||
|
|
||||||
|
### Privacy Mode
|
||||||
|
|
||||||
|
Enable privacy mode to disable all external data requests:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"privacy_mode": true,
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Privacy Mode Features
|
||||||
|
|
||||||
|
- **No External Requests**: Completely disables all external data loading
|
||||||
|
- **Local-Only Operation**: Uses only embedded data and local cache
|
||||||
|
- **Air-Gapped Compatible**: Suitable for isolated networks
|
||||||
|
- **Compliance**: Meets strict privacy requirements
|
||||||
|
|
||||||
|
### Selective Source Control
|
||||||
|
|
||||||
|
Enable only specific data sources:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Database Management Commands
|
||||||
|
|
||||||
|
### Updating External Data Sources
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Update all enabled external data sources
|
||||||
|
skyview-data -config /path/to/config.json update
|
||||||
|
|
||||||
|
# List available data sources
|
||||||
|
skyview-data -config /path/to/config.json list
|
||||||
|
|
||||||
|
# Check database status and loaded sources
|
||||||
|
skyview-data -config /path/to/config.json status
|
||||||
|
```
|
||||||
|
|
||||||
|
### Database Optimization
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Optimize database storage efficiency
|
||||||
|
skyview-data -config /path/to/config.json optimize
|
||||||
|
|
||||||
|
# Check optimization statistics only
|
||||||
|
skyview-data -config /path/to/config.json optimize --stats-only
|
||||||
|
|
||||||
|
# Force optimization without prompts
|
||||||
|
skyview-data -config /path/to/config.json optimize --force
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration Validation
|
||||||
|
|
||||||
|
### Validating Configuration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Test configuration file syntax
|
||||||
|
skyview -config config.json -test
|
||||||
|
|
||||||
|
# Verify data source connectivity
|
||||||
|
skyview-data -config config.json list
|
||||||
|
```
|
||||||
|
|
||||||
|
### Common Configuration Errors
|
||||||
|
|
||||||
|
#### JSON Syntax Errors
|
||||||
|
```
|
||||||
|
Error: invalid character '}' looking for beginning of object key string
|
||||||
|
```
|
||||||
|
**Solution**: Check for trailing commas, missing quotes, or bracket mismatches.
|
||||||
|
|
||||||
|
#### Invalid Data Types
|
||||||
|
```
|
||||||
|
Error: json: cannot unmarshal string into Go struct field
|
||||||
|
```
|
||||||
|
**Solution**: Ensure numbers are not quoted, booleans use true/false, etc.
|
||||||
|
|
||||||
|
#### Missing Required Fields
|
||||||
|
```
|
||||||
|
Error: source 'primary' missing required field: latitude
|
||||||
|
```
|
||||||
|
**Solution**: Add all required fields for each configured source.
|
||||||
|
|
||||||
|
### Minimal Configuration
|
||||||
|
|
||||||
|
For basic operation without external data sources:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "",
|
||||||
|
"port": 8080
|
||||||
|
},
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "primary",
|
||||||
|
"name": "Local Receiver",
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 30005,
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 1000,
|
||||||
|
"stale_timeout": 60,
|
||||||
|
"update_rate": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Full-Featured Configuration
|
||||||
|
|
||||||
|
For complete functionality with all external data sources:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "",
|
||||||
|
"port": 8080
|
||||||
|
},
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "primary",
|
||||||
|
"name": "Primary Site",
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 30005,
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"altitude": 50.0,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"origin": {
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"name": "Control Tower"
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 4000,
|
||||||
|
"stale_timeout": 60,
|
||||||
|
"update_rate": 1
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"path": "",
|
||||||
|
"max_history_days": 7,
|
||||||
|
"backup_on_upgrade": true,
|
||||||
|
"max_open_conns": 10,
|
||||||
|
"max_idle_conns": 5
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"privacy_mode": false,
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 2
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This configuration enables all SkyView features including multi-source ADS-B data fusion, comprehensive aviation database integration, and database optimization.
|
||||||
|
|
||||||
|
## Environment-Specific Examples
|
||||||
|
|
||||||
|
### Production System Service
|
||||||
|
|
||||||
|
Configuration for systemd service deployment:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "0.0.0.0",
|
||||||
|
"port": 8080
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"path": "/var/lib/skyview-adsb/skyview.db",
|
||||||
|
"max_history_days": 30,
|
||||||
|
"backup_on_upgrade": true
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"privacy_mode": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Development/Testing
|
||||||
|
|
||||||
|
Configuration for development use:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"port": 3000
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"path": "./dev-skyview.db",
|
||||||
|
"max_history_days": 1
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 100,
|
||||||
|
"update_rate": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Air-Gapped/Secure Environment
|
||||||
|
|
||||||
|
Configuration for isolated networks:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"privacy_mode": true,
|
||||||
|
"sources": {
|
||||||
|
"openflights_airlines": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"openflights_airports": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"ourairports": {
|
||||||
|
"enabled": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
259
docs/CRON_SETUP.md
Normal file
259
docs/CRON_SETUP.md
Normal file
|
|
@ -0,0 +1,259 @@
|
||||||
|
# SkyView Database Auto-Update with Cron
|
||||||
|
|
||||||
|
This guide explains how to set up automatic database updates for SkyView using cron jobs.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
SkyView can automatically update its aviation database from public domain sources using cron jobs. This ensures your aircraft callsign data stays current without manual intervention.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- ✅ **Auto-initialization**: Creates empty database if it doesn't exist
|
||||||
|
- ✅ **Safe sources only**: Updates only public domain data (no license issues)
|
||||||
|
- ✅ **Cron-friendly**: Proper logging and exit codes for automated execution
|
||||||
|
- ✅ **Lock file protection**: Prevents concurrent updates
|
||||||
|
- ✅ **Error handling**: Graceful failure handling with logging
|
||||||
|
|
||||||
|
## Quick Setup
|
||||||
|
|
||||||
|
### 1. Command Line Tool
|
||||||
|
|
||||||
|
The `skyview-data` command is designed to work perfectly with cron:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Auto-initialize database and update safe sources
|
||||||
|
skyview-data update
|
||||||
|
|
||||||
|
# Check what's loaded
|
||||||
|
skyview-data status
|
||||||
|
|
||||||
|
# List available sources
|
||||||
|
skyview-data list
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Cron Job Examples
|
||||||
|
|
||||||
|
#### Daily Update (Recommended)
|
||||||
|
```bash
|
||||||
|
# Add to crontab: crontab -e
|
||||||
|
# Update database daily at 3 AM
|
||||||
|
0 3 * * * /usr/bin/skyview-data update >/var/log/skyview-update.log 2>&1
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Weekly Update
|
||||||
|
```bash
|
||||||
|
# Update database weekly on Sunday at 2 AM
|
||||||
|
0 2 * * 0 /usr/bin/skyview-data update >/var/log/skyview-update.log 2>&1
|
||||||
|
```
|
||||||
|
|
||||||
|
#### With Helper Script (Debian Package)
|
||||||
|
```bash
|
||||||
|
# Use the provided update script
|
||||||
|
0 3 * * * /usr/share/skyview/scripts/update-database.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. System Service User
|
||||||
|
|
||||||
|
For Debian package installations, use the skyview service user:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Edit skyview user's crontab
|
||||||
|
sudo crontab -u skyview -e
|
||||||
|
|
||||||
|
# Add daily update
|
||||||
|
0 3 * * * /usr/bin/skyview-data update
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Database Location
|
||||||
|
|
||||||
|
The tool automatically detects the database location:
|
||||||
|
- **System service**: `/var/lib/skyview/skyview.db`
|
||||||
|
- **User install**: `~/.local/share/skyview/skyview.db`
|
||||||
|
- **Current directory**: `./skyview.db`
|
||||||
|
|
||||||
|
### Custom Database Path
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Specify custom database location
|
||||||
|
skyview-data -db /custom/path/skyview.db update
|
||||||
|
```
|
||||||
|
|
||||||
|
### Logging
|
||||||
|
|
||||||
|
For cron jobs, redirect output to log files:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Basic logging
|
||||||
|
skyview-data update >> /var/log/skyview-update.log 2>&1
|
||||||
|
|
||||||
|
# With timestamps (using helper script)
|
||||||
|
/usr/share/skyview/scripts/update-database.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
## Data Sources
|
||||||
|
|
||||||
|
### Safe Sources (Auto-Updated)
|
||||||
|
|
||||||
|
These sources are automatically included in `skyview-data update`:
|
||||||
|
|
||||||
|
- **OurAirports**: Public domain airport data
|
||||||
|
- **FAA Registry**: US aircraft registration (public domain)
|
||||||
|
- *(Additional safe sources as they become available)*
|
||||||
|
|
||||||
|
### License-Required Sources
|
||||||
|
|
||||||
|
These require explicit acceptance and are NOT included in automatic updates:
|
||||||
|
|
||||||
|
- **OpenFlights**: AGPL-3.0 licensed airline/airport data
|
||||||
|
|
||||||
|
To include license-required sources:
|
||||||
|
```bash
|
||||||
|
# Interactive acceptance
|
||||||
|
skyview-data import openflights
|
||||||
|
|
||||||
|
# Force acceptance (for automation)
|
||||||
|
skyview-data update openflights --force
|
||||||
|
```
|
||||||
|
|
||||||
|
## Monitoring
|
||||||
|
|
||||||
|
### Check Update Status
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# View database status
|
||||||
|
skyview-data status
|
||||||
|
|
||||||
|
# Example output:
|
||||||
|
# SkyView Database Status
|
||||||
|
# ======================
|
||||||
|
# Database: /var/lib/skyview/skyview.db
|
||||||
|
# Size: 15.4 MB
|
||||||
|
# Modified: 2025-01-15T03:00:12Z
|
||||||
|
#
|
||||||
|
# 📦 Loaded Data Sources (2):
|
||||||
|
# • OurAirports (Public Domain)
|
||||||
|
# • FAA Registry (Public Domain)
|
||||||
|
#
|
||||||
|
# 📊 Statistics:
|
||||||
|
# Aircraft History: 1,234 records
|
||||||
|
# Unique Aircraft: 567
|
||||||
|
# Last 24h: 89 records
|
||||||
|
```
|
||||||
|
|
||||||
|
### Log Monitoring
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# View recent updates
|
||||||
|
tail -f /var/log/skyview-update.log
|
||||||
|
|
||||||
|
# Check for errors
|
||||||
|
grep ERROR /var/log/skyview-update.log
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
#### Database Not Found
|
||||||
|
```
|
||||||
|
ERROR: failed to create database: no write permission
|
||||||
|
```
|
||||||
|
**Solution**: Ensure the skyview user has write access to `/var/lib/skyview/`
|
||||||
|
|
||||||
|
#### Network Errors
|
||||||
|
```
|
||||||
|
ERROR: failed to download data: connection timeout
|
||||||
|
```
|
||||||
|
**Solution**: Check internet connectivity and firewall settings
|
||||||
|
|
||||||
|
#### Lock File Issues
|
||||||
|
```
|
||||||
|
ERROR: Another instance is already running
|
||||||
|
```
|
||||||
|
**Solution**: Wait for current update to finish, or remove stale lock file
|
||||||
|
|
||||||
|
### Manual Debugging
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Verbose output
|
||||||
|
skyview-data -v update
|
||||||
|
|
||||||
|
# Force update (skips locks)
|
||||||
|
skyview-data update --force
|
||||||
|
|
||||||
|
# Reset database
|
||||||
|
skyview-data reset --force
|
||||||
|
```
|
||||||
|
|
||||||
|
## Advanced Configuration
|
||||||
|
|
||||||
|
### Custom Update Script
|
||||||
|
|
||||||
|
Create your own update script with custom logic:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# custom-update.sh
|
||||||
|
|
||||||
|
# Only update if database is older than 7 days
|
||||||
|
if [ "$(find /var/lib/skyview/skyview.db -mtime +7)" ]; then
|
||||||
|
skyview-data update
|
||||||
|
systemctl reload skyview # Reload SkyView after update
|
||||||
|
fi
|
||||||
|
```
|
||||||
|
|
||||||
|
### Integration with SkyView Service
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Reload SkyView after database updates
|
||||||
|
skyview-data update && systemctl reload skyview
|
||||||
|
```
|
||||||
|
|
||||||
|
### Backup Before Updates
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# backup-and-update.sh
|
||||||
|
|
||||||
|
DB_PATH="/var/lib/skyview/skyview.db"
|
||||||
|
BACKUP_DIR="/var/backups/skyview"
|
||||||
|
|
||||||
|
# Create backup
|
||||||
|
mkdir -p "$BACKUP_DIR"
|
||||||
|
cp "$DB_PATH" "$BACKUP_DIR/skyview-$(date +%Y%m%d).db"
|
||||||
|
|
||||||
|
# Keep only last 7 backups
|
||||||
|
find "$BACKUP_DIR" -name "skyview-*.db" -type f -mtime +7 -delete
|
||||||
|
|
||||||
|
# Update database
|
||||||
|
skyview-data update
|
||||||
|
```
|
||||||
|
|
||||||
|
## Security Considerations
|
||||||
|
|
||||||
|
### File Permissions
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Secure database directory
|
||||||
|
sudo chown skyview:skyview /var/lib/skyview
|
||||||
|
sudo chmod 755 /var/lib/skyview
|
||||||
|
sudo chmod 644 /var/lib/skyview/skyview.db
|
||||||
|
```
|
||||||
|
|
||||||
|
### Network Security
|
||||||
|
|
||||||
|
- Updates only download from trusted sources (GitHub, government sites)
|
||||||
|
- All downloads use HTTPS
|
||||||
|
- No sensitive data is transmitted
|
||||||
|
- Local processing only
|
||||||
|
|
||||||
|
### Resource Limits
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Limit resources in cron (optional)
|
||||||
|
0 3 * * * timeout 30m nice -n 10 skyview-data update
|
||||||
|
```
|
||||||
|
|
||||||
|
This setup ensures your SkyView installation maintains up-to-date aviation data automatically while respecting all license requirements and security best practices.
|
||||||
729
docs/DATABASE.md
Normal file
729
docs/DATABASE.md
Normal file
|
|
@ -0,0 +1,729 @@
|
||||||
|
# SkyView Database Architecture
|
||||||
|
|
||||||
|
This document describes SkyView's SQLite database architecture, migration system, and integration approach for persistent data storage.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
SkyView uses a single SQLite database to store:
|
||||||
|
- **Historic aircraft data**: Position history, message counts, signal strength
|
||||||
|
- **Callsign lookup data**: Cached airline/airport information from external APIs
|
||||||
|
- **Embedded aviation data**: OpenFlights airline and airport databases
|
||||||
|
|
||||||
|
## Database Design Principles
|
||||||
|
|
||||||
|
### Embedded Architecture
|
||||||
|
- Single SQLite file for all persistent data
|
||||||
|
- No external database dependencies
|
||||||
|
- Self-contained deployment with embedded schemas
|
||||||
|
- Backward compatibility through versioned migrations
|
||||||
|
|
||||||
|
### Performance Optimization
|
||||||
|
- Strategic indexing for time-series aircraft data
|
||||||
|
- Efficient lookups for callsign enhancement
|
||||||
|
- Configurable data retention policies
|
||||||
|
- Query optimization for real-time operations
|
||||||
|
|
||||||
|
### Data Safety
|
||||||
|
- Atomic migration transactions
|
||||||
|
- Pre-migration backups for destructive changes
|
||||||
|
- Data loss warnings for schema changes
|
||||||
|
- Rollback capabilities where possible
|
||||||
|
|
||||||
|
## Database Schema
|
||||||
|
|
||||||
|
### Core Tables
|
||||||
|
|
||||||
|
#### `schema_info`
|
||||||
|
Tracks database version and applied migrations:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE schema_info (
|
||||||
|
version INTEGER PRIMARY KEY,
|
||||||
|
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
description TEXT,
|
||||||
|
checksum TEXT
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `aircraft_history`
|
||||||
|
Stores time-series aircraft position and message data:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE aircraft_history (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
icao TEXT NOT NULL,
|
||||||
|
timestamp TIMESTAMP NOT NULL,
|
||||||
|
latitude REAL,
|
||||||
|
longitude REAL,
|
||||||
|
altitude INTEGER,
|
||||||
|
speed INTEGER,
|
||||||
|
track INTEGER,
|
||||||
|
vertical_rate INTEGER,
|
||||||
|
squawk TEXT,
|
||||||
|
callsign TEXT,
|
||||||
|
source_id TEXT NOT NULL,
|
||||||
|
signal_strength REAL
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Indexes:**
|
||||||
|
- `idx_aircraft_history_icao_time`: Fast queries by aircraft and time range
|
||||||
|
- `idx_aircraft_history_timestamp`: Time-based cleanup and queries
|
||||||
|
- `idx_aircraft_history_callsign`: Callsign-based searches
|
||||||
|
|
||||||
|
#### `airlines`
|
||||||
|
Multi-source airline database with unified schema:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE airlines (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
alias TEXT,
|
||||||
|
iata_code TEXT,
|
||||||
|
icao_code TEXT,
|
||||||
|
callsign TEXT,
|
||||||
|
country TEXT,
|
||||||
|
country_code TEXT,
|
||||||
|
active BOOLEAN DEFAULT 1,
|
||||||
|
data_source TEXT NOT NULL DEFAULT 'unknown',
|
||||||
|
source_id TEXT,
|
||||||
|
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Indexes:**
|
||||||
|
- `idx_airlines_icao_code`: ICAO code lookup (primary for callsign enhancement)
|
||||||
|
- `idx_airlines_iata_code`: IATA code lookup
|
||||||
|
- `idx_airlines_callsign`: Radio callsign lookup
|
||||||
|
- `idx_airlines_country_code`: Country-based filtering
|
||||||
|
- `idx_airlines_active`: Active airlines filtering
|
||||||
|
- `idx_airlines_source`: Data source tracking
|
||||||
|
|
||||||
|
#### `airports`
|
||||||
|
Multi-source airport database with comprehensive metadata:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE airports (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
ident TEXT,
|
||||||
|
type TEXT,
|
||||||
|
city TEXT,
|
||||||
|
municipality TEXT,
|
||||||
|
region TEXT,
|
||||||
|
country TEXT,
|
||||||
|
country_code TEXT,
|
||||||
|
continent TEXT,
|
||||||
|
iata_code TEXT,
|
||||||
|
icao_code TEXT,
|
||||||
|
local_code TEXT,
|
||||||
|
gps_code TEXT,
|
||||||
|
latitude REAL,
|
||||||
|
longitude REAL,
|
||||||
|
elevation_ft INTEGER,
|
||||||
|
scheduled_service BOOLEAN DEFAULT 0,
|
||||||
|
home_link TEXT,
|
||||||
|
wikipedia_link TEXT,
|
||||||
|
keywords TEXT,
|
||||||
|
timezone_offset REAL,
|
||||||
|
timezone TEXT,
|
||||||
|
dst_type TEXT,
|
||||||
|
data_source TEXT NOT NULL DEFAULT 'unknown',
|
||||||
|
source_id TEXT,
|
||||||
|
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Indexes:**
|
||||||
|
- `idx_airports_icao_code`: ICAO code lookup
|
||||||
|
- `idx_airports_iata_code`: IATA code lookup
|
||||||
|
- `idx_airports_ident`: Airport identifier lookup
|
||||||
|
- `idx_airports_country_code`: Country-based filtering
|
||||||
|
- `idx_airports_type`: Airport type filtering
|
||||||
|
- `idx_airports_coords`: Geographic coordinate queries
|
||||||
|
- `idx_airports_source`: Data source tracking
|
||||||
|
|
||||||
|
#### `callsign_cache`
|
||||||
|
Caches external API lookups and local enrichment for callsign enhancement:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE callsign_cache (
|
||||||
|
callsign TEXT PRIMARY KEY,
|
||||||
|
airline_icao TEXT,
|
||||||
|
airline_iata TEXT,
|
||||||
|
airline_name TEXT,
|
||||||
|
airline_country TEXT,
|
||||||
|
flight_number TEXT,
|
||||||
|
origin_iata TEXT, -- Departure airport IATA code
|
||||||
|
destination_iata TEXT, -- Arrival airport IATA code
|
||||||
|
aircraft_type TEXT,
|
||||||
|
route TEXT, -- Full route description
|
||||||
|
status TEXT, -- Flight status (scheduled, delayed, etc.)
|
||||||
|
source TEXT NOT NULL DEFAULT 'local',
|
||||||
|
cached_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
expires_at TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Route Information Fields:**
|
||||||
|
- **`origin_iata`**: IATA code of departure airport (e.g., "JFK" for New York JFK)
|
||||||
|
- **`destination_iata`**: IATA code of arrival airport (e.g., "LAX" for Los Angeles)
|
||||||
|
- **`route`**: Human-readable route description (e.g., "JFK-LAX" or "New York to Los Angeles")
|
||||||
|
- **`status`**: Current flight status when available from external APIs
|
||||||
|
|
||||||
|
These fields enable enhanced flight tracking with origin-destination pairs and route visualization.
|
||||||
|
|
||||||
|
**Indexes:**
|
||||||
|
- `idx_callsign_cache_expires`: Efficient cache cleanup
|
||||||
|
- `idx_callsign_cache_airline`: Airline-based queries
|
||||||
|
|
||||||
|
#### `data_sources`
|
||||||
|
Tracks loaded external data sources and their metadata:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE data_sources (
|
||||||
|
name TEXT PRIMARY KEY,
|
||||||
|
license TEXT NOT NULL,
|
||||||
|
url TEXT,
|
||||||
|
version TEXT,
|
||||||
|
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
record_count INTEGER DEFAULT 0,
|
||||||
|
user_accepted_license BOOLEAN DEFAULT 0
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
## Database Location Strategy
|
||||||
|
|
||||||
|
### Path Resolution Order
|
||||||
|
1. **Explicit configuration**: `database.path` in config file
|
||||||
|
2. **System service**: `/var/lib/skyview/skyview.db`
|
||||||
|
3. **User mode**: `~/.local/share/skyview/skyview.db`
|
||||||
|
4. **Fallback**: `./skyview.db` in current directory
|
||||||
|
|
||||||
|
### Directory Permissions
|
||||||
|
- System: `root:root` with `755` permissions for `/var/lib/skyview/`
|
||||||
|
- User: User-owned directories with standard permissions
|
||||||
|
- Service: `skyview:skyview` user/group for system service
|
||||||
|
|
||||||
|
## Migration System
|
||||||
|
|
||||||
|
### Migration Structure
|
||||||
|
```go
|
||||||
|
type Migration struct {
|
||||||
|
Version int // Sequential version number
|
||||||
|
Description string // Human-readable description
|
||||||
|
Up string // SQL for applying migration
|
||||||
|
Down string // SQL for rollback (optional)
|
||||||
|
DataLoss bool // Warning flag for destructive changes
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migration Process
|
||||||
|
1. **Version Check**: Compare current schema version with available migrations
|
||||||
|
2. **Backup**: Create automatic backup before destructive changes
|
||||||
|
3. **Transaction**: Wrap each migration in atomic transaction
|
||||||
|
4. **Validation**: Verify schema integrity after migration
|
||||||
|
5. **Logging**: Record successful migrations in `schema_info`
|
||||||
|
|
||||||
|
### Data Loss Protection
|
||||||
|
- Migrations marked with `DataLoss: true` require explicit user consent
|
||||||
|
- Automatic backups created before destructive operations
|
||||||
|
- Warning messages displayed during upgrade process
|
||||||
|
- Rollback SQL provided where possible
|
||||||
|
|
||||||
|
### Example Migration Sequence
|
||||||
|
```go
|
||||||
|
var migrations = []Migration{
|
||||||
|
{
|
||||||
|
Version: 1,
|
||||||
|
Description: "Initial schema with aircraft history",
|
||||||
|
Up: createInitialSchema,
|
||||||
|
DataLoss: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Version: 2,
|
||||||
|
Description: "Add OpenFlights airline and airport data",
|
||||||
|
Up: addAviationTables,
|
||||||
|
DataLoss: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Version: 3,
|
||||||
|
Description: "Add callsign lookup cache",
|
||||||
|
Up: addCallsignCache,
|
||||||
|
DataLoss: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Data Sources and Loading
|
||||||
|
|
||||||
|
SkyView supports multiple aviation data sources with automatic conflict resolution and license compliance.
|
||||||
|
|
||||||
|
### Supported Data Sources
|
||||||
|
|
||||||
|
#### OpenFlights Airlines Database
|
||||||
|
- **Source**: https://openflights.org/data.html
|
||||||
|
- **License**: Open Database License (ODbL) 1.0
|
||||||
|
- **Content**: Global airline data with ICAO/IATA codes, callsigns, and country information
|
||||||
|
- **Records**: ~6,162 airlines
|
||||||
|
- **Update Method**: Runtime download (no license confirmation required)
|
||||||
|
|
||||||
|
#### OpenFlights Airports Database
|
||||||
|
- **Source**: https://openflights.org/data.html
|
||||||
|
- **License**: Open Database License (ODbL) 1.0
|
||||||
|
- **Content**: Global airport data with coordinates, codes, and metadata
|
||||||
|
- **Records**: ~7,698 airports
|
||||||
|
- **Update Method**: Runtime download
|
||||||
|
|
||||||
|
#### OurAirports Database
|
||||||
|
- **Source**: https://ourairports.com/data/
|
||||||
|
- **License**: Creative Commons Zero (CC0) 1.0
|
||||||
|
- **Content**: Comprehensive airport database with detailed metadata
|
||||||
|
- **Records**: ~83,557 airports
|
||||||
|
- **Update Method**: Runtime download
|
||||||
|
|
||||||
|
### Data Loading System
|
||||||
|
|
||||||
|
#### Intelligent Conflict Resolution
|
||||||
|
The data loading system uses **INSERT OR REPLACE** upserts to handle overlapping data:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
INSERT OR REPLACE INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
```
|
||||||
|
|
||||||
|
This ensures that:
|
||||||
|
- Duplicate records are automatically updated rather than causing errors
|
||||||
|
- Later data sources can override earlier ones
|
||||||
|
- Database integrity is maintained during bulk loads
|
||||||
|
|
||||||
|
#### Loading Process
|
||||||
|
1. **Source Validation**: Verify data source accessibility and format
|
||||||
|
2. **Incremental Processing**: Process data in chunks to manage memory
|
||||||
|
3. **Error Handling**: Log and continue on individual record errors
|
||||||
|
4. **Statistics Reporting**: Track records processed, added, and errors
|
||||||
|
5. **Source Tracking**: Record metadata about each loaded source
|
||||||
|
|
||||||
|
#### Performance Characteristics
|
||||||
|
- **OpenFlights Airlines**: ~6,162 records in ~363ms
|
||||||
|
- **OpenFlights Airports**: ~7,698 records in ~200ms
|
||||||
|
- **OurAirports**: ~83,557 records in ~980ms
|
||||||
|
- **Error Rate**: <0.1% under normal conditions
|
||||||
|
|
||||||
|
## Configuration Integration
|
||||||
|
|
||||||
|
### Database Configuration
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"database": {
|
||||||
|
"path": "/var/lib/skyview-adsb/skyview.db",
|
||||||
|
"max_history_days": 7,
|
||||||
|
"backup_on_upgrade": true,
|
||||||
|
"vacuum_interval": "24h",
|
||||||
|
"page_size": 4096
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"external_apis": true,
|
||||||
|
"privacy_mode": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration Fields
|
||||||
|
|
||||||
|
#### `database`
|
||||||
|
- **`path`**: Database file location (empty = auto-resolve)
|
||||||
|
- **`max_history_days`**: Retention policy for aircraft history (0 = unlimited)
|
||||||
|
- **`backup_on_upgrade`**: Create backup before schema migrations
|
||||||
|
|
||||||
|
#### `callsign`
|
||||||
|
- **`enabled`**: Enable callsign enhancement features
|
||||||
|
- **`cache_hours`**: TTL for cached external API results
|
||||||
|
- **`privacy_mode`**: Disable all external data requests
|
||||||
|
- **`sources`**: Independent control for each data source
|
||||||
|
|
||||||
|
### Enhanced Configuration Example
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"privacy_mode": false,
|
||||||
|
"sources": {
|
||||||
|
"openflights_embedded": {
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1,
|
||||||
|
"license": "AGPL-3.0"
|
||||||
|
},
|
||||||
|
"faa_registry": {
|
||||||
|
"enabled": false,
|
||||||
|
"priority": 2,
|
||||||
|
"update_frequency": "weekly",
|
||||||
|
"license": "public_domain"
|
||||||
|
},
|
||||||
|
"opensky_api": {
|
||||||
|
"enabled": false,
|
||||||
|
"priority": 3,
|
||||||
|
"timeout_seconds": 5,
|
||||||
|
"max_retries": 2,
|
||||||
|
"requires_consent": true,
|
||||||
|
"license_warning": "Commercial use requires OpenSky Network consent",
|
||||||
|
"user_accepts_terms": false
|
||||||
|
},
|
||||||
|
"custom_database": {
|
||||||
|
"enabled": false,
|
||||||
|
"priority": 4,
|
||||||
|
"path": "",
|
||||||
|
"license": "user_verified"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fallback_chain": ["openflights_embedded", "faa_registry", "opensky_api", "custom_database"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Individual Source Configuration Options
|
||||||
|
- **`enabled`**: Enable/disable this specific source
|
||||||
|
- **`priority`**: Processing order (lower numbers = higher priority)
|
||||||
|
- **`license`**: License type for compliance tracking
|
||||||
|
- **`requires_consent`**: Whether source requires explicit user consent
|
||||||
|
- **`user_accepts_terms`**: User acknowledgment of licensing terms
|
||||||
|
- **`timeout_seconds`**: Per-source timeout configuration
|
||||||
|
- **`max_retries`**: Per-source retry limits
|
||||||
|
- **`update_frequency`**: For downloadable sources (daily/weekly/monthly)
|
||||||
|
|
||||||
|
## Debian Package Integration
|
||||||
|
|
||||||
|
### Package Structure
|
||||||
|
```
|
||||||
|
/var/lib/skyview/ # Database directory
|
||||||
|
/etc/skyview/config.json # Default configuration
|
||||||
|
/usr/bin/skyview # Main application
|
||||||
|
/usr/share/skyview/ # Embedded resources
|
||||||
|
```
|
||||||
|
|
||||||
|
### Installation Process
|
||||||
|
1. **`postinst`**: Create directories, user accounts, permissions
|
||||||
|
2. **First Run**: Database initialization and migration on startup
|
||||||
|
3. **Upgrades**: Automatic schema migration with backup
|
||||||
|
4. **Service**: Systemd integration with proper database access
|
||||||
|
|
||||||
|
### Service User
|
||||||
|
- User: `skyview-adsb`
|
||||||
|
- Home: `/var/lib/skyview-adsb`
|
||||||
|
- Shell: `/bin/false` (service account)
|
||||||
|
- Database: Read/write access to `/var/lib/skyview-adsb/`
|
||||||
|
|
||||||
|
### Automatic Database Updates
|
||||||
|
The systemd service configuration includes automatic database updates on startup:
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=skyview-adsb
|
||||||
|
Group=skyview-adsb
|
||||||
|
# Update database before starting main service
|
||||||
|
ExecStartPre=/usr/bin/skyview-data -config /etc/skyview-adsb/config.json update
|
||||||
|
TimeoutStartSec=300
|
||||||
|
ExecStart=/usr/bin/skyview -config /etc/skyview-adsb/config.json
|
||||||
|
```
|
||||||
|
|
||||||
|
This ensures aviation data sources are refreshed before each service start, complementing the weekly timer-based updates.
|
||||||
|
|
||||||
|
## Data Retention and Cleanup
|
||||||
|
|
||||||
|
### Automatic Cleanup
|
||||||
|
- **Aircraft History**: Configurable retention period (`max_history_days`)
|
||||||
|
- **Cache Expiration**: TTL-based cleanup of external API cache
|
||||||
|
- **Optimization**: Periodic VACUUM operations for storage efficiency
|
||||||
|
|
||||||
|
### Manual Maintenance
|
||||||
|
```sql
|
||||||
|
-- Clean old aircraft history (example: 7 days)
|
||||||
|
DELETE FROM aircraft_history
|
||||||
|
WHERE timestamp < datetime('now', '-7 days');
|
||||||
|
|
||||||
|
-- Clean expired cache entries
|
||||||
|
DELETE FROM callsign_cache
|
||||||
|
WHERE expires_at < datetime('now');
|
||||||
|
|
||||||
|
-- Optimize database storage
|
||||||
|
VACUUM;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Database Optimization
|
||||||
|
|
||||||
|
SkyView includes a comprehensive database optimization system that automatically manages storage efficiency and performance.
|
||||||
|
|
||||||
|
### Optimization Features
|
||||||
|
|
||||||
|
#### Automatic VACUUM Operations
|
||||||
|
- **Full VACUUM**: Rebuilds database to reclaim deleted space
|
||||||
|
- **Incremental VACUUM**: Gradual space reclamation with minimal performance impact
|
||||||
|
- **Scheduled Maintenance**: Configurable intervals for automatic optimization
|
||||||
|
- **Size Reporting**: Before/after statistics with space savings metrics
|
||||||
|
|
||||||
|
#### Storage Optimization
|
||||||
|
- **Page Size Optimization**: Configurable SQLite page size for optimal performance
|
||||||
|
- **Auto-Vacuum Configuration**: Enables incremental space reclamation
|
||||||
|
- **Statistics Updates**: ANALYZE operations for query plan optimization
|
||||||
|
- **Efficiency Monitoring**: Real-time storage efficiency reporting
|
||||||
|
|
||||||
|
### Using the Optimization System
|
||||||
|
|
||||||
|
#### Command Line Interface
|
||||||
|
```bash
|
||||||
|
# Run comprehensive database optimization
|
||||||
|
skyview-data optimize
|
||||||
|
|
||||||
|
# Run with force flag to skip confirmation prompts
|
||||||
|
skyview-data optimize --force
|
||||||
|
|
||||||
|
# Check current optimization statistics
|
||||||
|
skyview-data optimize --stats-only
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Optimization Output Example
|
||||||
|
```
|
||||||
|
Optimizing database for storage efficiency...
|
||||||
|
✓ Auto VACUUM: Enable incremental auto-vacuum
|
||||||
|
✓ Incremental VACUUM: Reclaim free pages incrementally
|
||||||
|
✓ Optimize: Update SQLite query planner statistics
|
||||||
|
✓ Analyze: Update table statistics for better query plans
|
||||||
|
|
||||||
|
VACUUM completed in 1.2s: 275.3 MB → 263.1 MB (saved 12.2 MB, 4.4%)
|
||||||
|
|
||||||
|
Database optimization completed successfully.
|
||||||
|
Storage efficiency: 96.8% (263.1 MB used of 272.4 MB allocated)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Configuration Options
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"database": {
|
||||||
|
"vacuum_interval": "24h",
|
||||||
|
"page_size": 4096,
|
||||||
|
"enable_compression": true,
|
||||||
|
"compression_level": 6
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Optimization Statistics
|
||||||
|
|
||||||
|
The optimization system provides detailed metrics about database performance:
|
||||||
|
|
||||||
|
#### Available Statistics
|
||||||
|
- **Database Size**: Total file size in bytes
|
||||||
|
- **Page Statistics**: Page size, count, and utilization
|
||||||
|
- **Storage Efficiency**: Percentage of allocated space actually used
|
||||||
|
- **Free Space**: Amount of reclaimable space available
|
||||||
|
- **Auto-Vacuum Status**: Current auto-vacuum configuration
|
||||||
|
- **Last Optimization**: Timestamp of most recent optimization
|
||||||
|
|
||||||
|
#### Programmatic Access
|
||||||
|
```go
|
||||||
|
// Get current optimization statistics
|
||||||
|
optimizer := NewOptimizationManager(db, config)
|
||||||
|
stats, err := optimizer.GetOptimizationStats()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Failed to get stats:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Database efficiency: %.1f%%\n", stats.Efficiency)
|
||||||
|
fmt.Printf("Storage used: %.1f MB\n", float64(stats.DatabaseSize)/(1024*1024))
|
||||||
|
```
|
||||||
|
|
||||||
|
## Performance Considerations
|
||||||
|
|
||||||
|
### Query Optimization
|
||||||
|
- Time-range queries use `idx_aircraft_history_icao_time`
|
||||||
|
- Callsign lookups prioritize local cache over external APIs
|
||||||
|
- Bulk operations use transactions for consistency
|
||||||
|
|
||||||
|
### Storage Efficiency
|
||||||
|
- Configurable history limits prevent unbounded growth
|
||||||
|
- Automatic VACUUM operations with optimization reporting
|
||||||
|
- Compressed timestamps and efficient data types
|
||||||
|
- Page size optimization for storage efficiency
|
||||||
|
- Auto-vacuum configuration for incremental space reclamation
|
||||||
|
|
||||||
|
### Memory Usage
|
||||||
|
- WAL mode for concurrent read/write access
|
||||||
|
- Connection pooling for multiple goroutines
|
||||||
|
- Prepared statements for repeated queries
|
||||||
|
|
||||||
|
## Privacy and Security
|
||||||
|
|
||||||
|
### Privacy Mode
|
||||||
|
SkyView includes comprehensive privacy controls through the `privacy_mode` configuration option:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"privacy_mode": true,
|
||||||
|
"external_apis": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Privacy Mode Features
|
||||||
|
- **No External Calls**: Completely disables all external API requests
|
||||||
|
- **Local-Only Lookups**: Uses only embedded OpenFlights database for callsign enhancement
|
||||||
|
- **No Data Transmission**: Aircraft data never leaves the local system
|
||||||
|
- **Compliance**: Suitable for sensitive environments requiring air-gapped operation
|
||||||
|
|
||||||
|
#### Privacy Mode Behavior
|
||||||
|
| Feature | Privacy Mode ON | Privacy Mode OFF |
|
||||||
|
|---------|----------------|------------------|
|
||||||
|
| External API calls | ❌ Disabled | ✅ Configurable |
|
||||||
|
| OpenFlights lookup | ✅ Enabled | ✅ Enabled |
|
||||||
|
| Callsign caching | ✅ Local only | ✅ Full caching |
|
||||||
|
| Data transmission | ❌ None | ⚠️ API calls only |
|
||||||
|
|
||||||
|
#### Use Cases for Privacy Mode
|
||||||
|
- **Military installations**: No external data transmission allowed
|
||||||
|
- **Air-gapped networks**: No internet connectivity available
|
||||||
|
- **Corporate policies**: External API usage prohibited
|
||||||
|
- **Personal privacy**: User preference for local-only operation
|
||||||
|
|
||||||
|
### Security Considerations
|
||||||
|
|
||||||
|
#### File Permissions
|
||||||
|
- Database files readable only by skyview user/group
|
||||||
|
- Configuration files protected from unauthorized access
|
||||||
|
- Backup files inherit secure permissions
|
||||||
|
|
||||||
|
#### Data Protection
|
||||||
|
- Local SQLite database with file-system level security
|
||||||
|
- No cloud storage or external database dependencies
|
||||||
|
- All aviation data processed and stored locally
|
||||||
|
|
||||||
|
#### Network Security
|
||||||
|
- External API calls (when enabled) use HTTPS only
|
||||||
|
- No persistent connections to external services
|
||||||
|
- Optional certificate validation for API endpoints
|
||||||
|
|
||||||
|
### Data Integrity
|
||||||
|
- Foreign key constraints where applicable
|
||||||
|
- Transaction isolation for concurrent operations
|
||||||
|
- Checksums for migration verification
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
#### Database Locked
|
||||||
|
```
|
||||||
|
Error: database is locked
|
||||||
|
```
|
||||||
|
**Solution**: Stop SkyView service, check for stale lock files, restart
|
||||||
|
|
||||||
|
#### Migration Failures
|
||||||
|
```
|
||||||
|
Error: migration 3 failed: table already exists
|
||||||
|
```
|
||||||
|
**Solution**: Check schema version, restore from backup, retry migration
|
||||||
|
|
||||||
|
#### Permission Denied
|
||||||
|
```
|
||||||
|
Error: unable to open database file
|
||||||
|
```
|
||||||
|
**Solution**: Verify file permissions, check directory ownership, ensure disk space
|
||||||
|
|
||||||
|
### Diagnostic Commands
|
||||||
|
```bash
|
||||||
|
# Check database integrity
|
||||||
|
sqlite3 /var/lib/skyview/skyview.db "PRAGMA integrity_check;"
|
||||||
|
|
||||||
|
# View schema version
|
||||||
|
sqlite3 /var/lib/skyview/skyview.db "SELECT * FROM schema_info;"
|
||||||
|
|
||||||
|
# Database statistics
|
||||||
|
sqlite3 /var/lib/skyview/skyview.db ".dbinfo"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing and Quality Assurance
|
||||||
|
|
||||||
|
SkyView includes comprehensive test coverage for all database functionality to ensure reliability and data integrity.
|
||||||
|
|
||||||
|
### Test Coverage Areas
|
||||||
|
|
||||||
|
#### Core Database Functionality
|
||||||
|
- **Database Creation and Initialization**: Connection management, configuration handling
|
||||||
|
- **Migration System**: Schema versioning, upgrade/downgrade operations
|
||||||
|
- **Connection Pooling**: Concurrent access, connection lifecycle management
|
||||||
|
- **SQLite Pragma Settings**: WAL mode, foreign keys, performance optimizations
|
||||||
|
|
||||||
|
#### Data Loading and Management
|
||||||
|
- **Multi-Source Loading**: OpenFlights, OurAirports data integration
|
||||||
|
- **Conflict Resolution**: Upsert operations, duplicate handling
|
||||||
|
- **Error Handling**: Network failures, malformed data recovery
|
||||||
|
- **Performance Validation**: Loading speed, memory usage optimization
|
||||||
|
|
||||||
|
#### Callsign Enhancement System
|
||||||
|
- **Parsing Logic**: Callsign validation, airline code extraction
|
||||||
|
- **Database Integration**: Local lookups, caching operations
|
||||||
|
- **Search Functionality**: Airline filtering, country-based queries
|
||||||
|
- **Cache Management**: TTL handling, cleanup operations
|
||||||
|
|
||||||
|
#### Optimization System
|
||||||
|
- **VACUUM Operations**: Space reclamation, performance monitoring
|
||||||
|
- **Page Size Optimization**: Configuration validation, storage efficiency
|
||||||
|
- **Statistics Generation**: Metrics accuracy, reporting consistency
|
||||||
|
- **Maintenance Scheduling**: Automated optimization, interval management
|
||||||
|
|
||||||
|
### Test Infrastructure
|
||||||
|
|
||||||
|
#### Automated Test Setup
|
||||||
|
```go
|
||||||
|
// setupTestDatabase creates isolated test environment
|
||||||
|
func setupTestDatabase(t *testing.T) (*Database, func()) {
|
||||||
|
tempFile, _ := os.CreateTemp("", "test_skyview_*.db")
|
||||||
|
config := &Config{Path: tempFile.Name()}
|
||||||
|
db, _ := NewDatabase(config)
|
||||||
|
db.Initialize() // Run all migrations
|
||||||
|
|
||||||
|
cleanup := func() {
|
||||||
|
db.Close()
|
||||||
|
os.Remove(tempFile.Name())
|
||||||
|
}
|
||||||
|
return db, cleanup
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Network-Safe Testing
|
||||||
|
Tests gracefully handle network connectivity issues:
|
||||||
|
- Skip tests requiring external data sources when offline
|
||||||
|
- Provide meaningful error messages for connectivity failures
|
||||||
|
- Use local test data when external sources are unavailable
|
||||||
|
|
||||||
|
### Running Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all database tests
|
||||||
|
go test -v ./internal/database/...
|
||||||
|
|
||||||
|
# Run tests in short mode (skip long-running network tests)
|
||||||
|
go test -v -short ./internal/database/...
|
||||||
|
|
||||||
|
# Run specific test categories
|
||||||
|
go test -v -run="TestDatabase" ./internal/database/...
|
||||||
|
go test -v -run="TestOptimization" ./internal/database/...
|
||||||
|
go test -v -run="TestCallsign" ./internal/database/...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
### Planned Features
|
||||||
|
- **Compression**: Time-series compression for long-term storage
|
||||||
|
- **Partitioning**: Date-based partitioning for large datasets
|
||||||
|
- **Replication**: Read replica support for high-availability setups
|
||||||
|
- **Analytics**: Built-in reporting and statistics tables
|
||||||
|
- **Enhanced Route Data**: Integration with additional flight tracking APIs
|
||||||
|
- **Geographic Indexing**: Spatial queries for airport proximity searches
|
||||||
|
|
||||||
|
### Migration Path
|
||||||
|
- All enhancements will use versioned migrations
|
||||||
|
- Backward compatibility maintained for existing installations
|
||||||
|
- Data preservation prioritized over schema optimization
|
||||||
|
- Comprehensive testing required for all schema changes
|
||||||
340
docs/MIGRATION_GUIDE.md
Normal file
340
docs/MIGRATION_GUIDE.md
Normal file
|
|
@ -0,0 +1,340 @@
|
||||||
|
# SkyView Database Migration Guide
|
||||||
|
|
||||||
|
This guide covers the transition from SkyView's in-memory data storage to persistent SQLite database storage, introduced in version 0.1.0.
|
||||||
|
|
||||||
|
## Overview of Changes
|
||||||
|
|
||||||
|
### What's New
|
||||||
|
- **Persistent Storage**: Aircraft position history survives restarts
|
||||||
|
- **Callsign Enhancement**: Enriched aircraft information with airline/airport data
|
||||||
|
- **Embedded Aviation Database**: OpenFlights airline and airport data included
|
||||||
|
- **Configurable Retention**: Control how long historical data is kept
|
||||||
|
- **Privacy Controls**: Comprehensive privacy mode for sensitive environments
|
||||||
|
|
||||||
|
### What's Changed
|
||||||
|
- **Memory Usage**: Reduced memory footprint for aircraft tracking
|
||||||
|
- **Startup Time**: Slightly longer initial startup due to database initialization
|
||||||
|
- **Configuration**: New database and callsign configuration sections
|
||||||
|
- **File Structure**: New database file created in system or user directories
|
||||||
|
|
||||||
|
## Pre-Migration Checklist
|
||||||
|
|
||||||
|
### System Requirements
|
||||||
|
- **Disk Space**: Minimum 100MB available for database and backups
|
||||||
|
- **Permissions**: Write access to `/var/lib/skyview/` (system) or `~/.local/share/skyview/` (user)
|
||||||
|
- **SQLite**: No additional installation required (embedded in SkyView)
|
||||||
|
|
||||||
|
### Current Data
|
||||||
|
⚠️ **Important**: In-memory aircraft data from previous versions cannot be preserved during migration. Historical tracking will start fresh after the upgrade.
|
||||||
|
|
||||||
|
## Migration Process
|
||||||
|
|
||||||
|
### Automatic Migration (Recommended)
|
||||||
|
|
||||||
|
#### For Debian Package Users
|
||||||
|
```bash
|
||||||
|
# Update to new version
|
||||||
|
sudo apt update && sudo apt upgrade skyview-adsb
|
||||||
|
|
||||||
|
# Service will automatically restart and initialize database
|
||||||
|
sudo systemctl status skyview
|
||||||
|
```
|
||||||
|
|
||||||
|
#### For Manual Installation Users
|
||||||
|
```bash
|
||||||
|
# Stop current SkyView instance
|
||||||
|
sudo systemctl stop skyview # or kill existing process
|
||||||
|
|
||||||
|
# Backup current configuration
|
||||||
|
cp /etc/skyview/config.json /etc/skyview/config.json.backup
|
||||||
|
|
||||||
|
# Start new version (database will be created automatically)
|
||||||
|
sudo systemctl start skyview
|
||||||
|
```
|
||||||
|
|
||||||
|
### Manual Database Setup
|
||||||
|
|
||||||
|
If automatic initialization fails, you can manually initialize the database:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create database directory
|
||||||
|
sudo mkdir -p /var/lib/skyview
|
||||||
|
sudo chown skyview:skyview /var/lib/skyview
|
||||||
|
|
||||||
|
# Run SkyView with explicit database initialization
|
||||||
|
sudo -u skyview /usr/bin/skyview --init-database --config /etc/skyview/config.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration Updates
|
||||||
|
|
||||||
|
### New Configuration Sections
|
||||||
|
|
||||||
|
Add these sections to your existing `config.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"database": {
|
||||||
|
"path": "",
|
||||||
|
"max_history_days": 7,
|
||||||
|
"backup_on_upgrade": true
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"external_apis": true,
|
||||||
|
"privacy_mode": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration Migration
|
||||||
|
|
||||||
|
#### Default System Configuration
|
||||||
|
The new default configuration will be created at `/etc/skyview/config.json` if it doesn't exist:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": {
|
||||||
|
"host": "",
|
||||||
|
"port": 8080
|
||||||
|
},
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"id": "local",
|
||||||
|
"name": "Local Receiver",
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 30005,
|
||||||
|
"format": "beast",
|
||||||
|
"latitude": 0,
|
||||||
|
"longitude": 0,
|
||||||
|
"altitude": 0,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 500,
|
||||||
|
"stale_timeout": 60,
|
||||||
|
"update_rate": 1
|
||||||
|
},
|
||||||
|
"origin": {
|
||||||
|
"latitude": 51.4700,
|
||||||
|
"longitude": -0.4600,
|
||||||
|
"name": "Default Origin"
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"path": "",
|
||||||
|
"max_history_days": 7,
|
||||||
|
"backup_on_upgrade": true
|
||||||
|
},
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"cache_hours": 24,
|
||||||
|
"external_apis": true,
|
||||||
|
"privacy_mode": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Preserving Custom Settings
|
||||||
|
|
||||||
|
If you have customized your configuration, merge your existing settings with the new sections:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Backup original
|
||||||
|
cp /etc/skyview/config.json /etc/skyview/config.json.original
|
||||||
|
|
||||||
|
# Edit configuration to add new sections
|
||||||
|
sudo nano /etc/skyview/config.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Privacy Configuration
|
||||||
|
|
||||||
|
### Enabling Privacy Mode
|
||||||
|
|
||||||
|
For sensitive environments that require no external data transmission:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"enabled": true,
|
||||||
|
"external_apis": false,
|
||||||
|
"privacy_mode": true,
|
||||||
|
"cache_hours": 168
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Privacy Mode Features
|
||||||
|
- **No External API Calls**: Completely disables OpenSky Network and other external APIs
|
||||||
|
- **Local-Only Enhancement**: Uses embedded OpenFlights data for callsign lookup
|
||||||
|
- **Offline Operation**: Full functionality without internet connectivity
|
||||||
|
- **Compliance Ready**: Suitable for air-gapped or restricted networks
|
||||||
|
|
||||||
|
## Post-Migration Verification
|
||||||
|
|
||||||
|
### Database Verification
|
||||||
|
```bash
|
||||||
|
# Check database file exists and has correct permissions
|
||||||
|
ls -la /var/lib/skyview/skyview.db
|
||||||
|
# Should show: -rw-r--r-- 1 skyview skyview [size] [date] skyview.db
|
||||||
|
|
||||||
|
# Verify database schema
|
||||||
|
sqlite3 /var/lib/skyview/skyview.db "SELECT version FROM schema_info ORDER BY version DESC LIMIT 1;"
|
||||||
|
# Should return current schema version number
|
||||||
|
```
|
||||||
|
|
||||||
|
### Service Health Check
|
||||||
|
```bash
|
||||||
|
# Check service status
|
||||||
|
sudo systemctl status skyview
|
||||||
|
|
||||||
|
# Check logs for any errors
|
||||||
|
sudo journalctl -u skyview -f
|
||||||
|
|
||||||
|
# Verify web interface accessibility
|
||||||
|
curl -I http://localhost:8080/
|
||||||
|
# Should return: HTTP/1.1 200 OK
|
||||||
|
```
|
||||||
|
|
||||||
|
### Feature Testing
|
||||||
|
1. **Historical Data**: Verify aircraft positions persist after restart
|
||||||
|
2. **Callsign Enhancement**: Check that airline names appear for aircraft with callsigns
|
||||||
|
3. **Performance**: Monitor memory and CPU usage compared to previous version
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Migration Issues
|
||||||
|
|
||||||
|
#### Database Permission Errors
|
||||||
|
```
|
||||||
|
Error: unable to open database file
|
||||||
|
```
|
||||||
|
|
||||||
|
**Solution**:
|
||||||
|
```bash
|
||||||
|
sudo chown -R skyview:skyview /var/lib/skyview/
|
||||||
|
sudo chmod 755 /var/lib/skyview/
|
||||||
|
sudo chmod 644 /var/lib/skyview/skyview.db
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Migration Failures
|
||||||
|
```
|
||||||
|
Error: migration failed at version X
|
||||||
|
```
|
||||||
|
|
||||||
|
**Solution**:
|
||||||
|
```bash
|
||||||
|
# Stop service
|
||||||
|
sudo systemctl stop skyview
|
||||||
|
|
||||||
|
# Remove corrupted database
|
||||||
|
sudo rm /var/lib/skyview/skyview.db
|
||||||
|
|
||||||
|
# Restart service (will recreate database)
|
||||||
|
sudo systemctl start skyview
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Configuration Conflicts
|
||||||
|
```
|
||||||
|
Error: unknown configuration field 'database'
|
||||||
|
```
|
||||||
|
|
||||||
|
**Solution**: Update configuration file with new sections, or reset to default configuration.
|
||||||
|
|
||||||
|
### Rolling Back
|
||||||
|
|
||||||
|
If you need to revert to the previous version:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Stop current service
|
||||||
|
sudo systemctl stop skyview
|
||||||
|
|
||||||
|
# Install previous package version
|
||||||
|
sudo apt install skyview-adsb=[previous-version]
|
||||||
|
|
||||||
|
# Remove database directory (optional)
|
||||||
|
sudo rm -rf /var/lib/skyview/
|
||||||
|
|
||||||
|
# Restore original configuration
|
||||||
|
sudo cp /etc/skyview/config.json.backup /etc/skyview/config.json
|
||||||
|
|
||||||
|
# Start service
|
||||||
|
sudo systemctl start skyview
|
||||||
|
```
|
||||||
|
|
||||||
|
⚠️ **Note**: Rolling back will lose all historical aircraft data stored in the database.
|
||||||
|
|
||||||
|
## Performance Impact
|
||||||
|
|
||||||
|
### Expected Changes
|
||||||
|
|
||||||
|
#### Memory Usage
|
||||||
|
- **Before**: ~50-100MB RAM for aircraft tracking
|
||||||
|
- **After**: ~30-60MB RAM (reduced due to database storage)
|
||||||
|
|
||||||
|
#### Disk Usage
|
||||||
|
- **Database**: ~10-50MB depending on retention settings and traffic
|
||||||
|
- **Backups**: Additional ~10-50MB for migration backups
|
||||||
|
|
||||||
|
#### Startup Time
|
||||||
|
- **Before**: 1-2 seconds
|
||||||
|
- **After**: 2-5 seconds (database initialization)
|
||||||
|
|
||||||
|
### Optimization Recommendations
|
||||||
|
|
||||||
|
#### For High-Traffic Environments
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"database": {
|
||||||
|
"max_history_days": 3,
|
||||||
|
"backup_on_upgrade": false
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
"history_limit": 100
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### For Resource-Constrained Systems
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"callsign": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"max_history_days": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benefits After Migration
|
||||||
|
|
||||||
|
### Enhanced Features
|
||||||
|
- **Persistent History**: Aircraft tracks survive system restarts
|
||||||
|
- **Rich Callsign Data**: Airline names, routes, and aircraft types
|
||||||
|
- **Better Analytics**: Historical data enables trend analysis
|
||||||
|
- **Improved Performance**: Reduced memory usage for long-running instances
|
||||||
|
|
||||||
|
### Operational Improvements
|
||||||
|
- **Service Reliability**: Database recovery after crashes
|
||||||
|
- **Maintenance Windows**: Graceful restart without data loss
|
||||||
|
- **Monitoring**: Historical data for performance analysis
|
||||||
|
- **Compliance**: Privacy controls for regulatory requirements
|
||||||
|
|
||||||
|
## Support
|
||||||
|
|
||||||
|
### Getting Help
|
||||||
|
- **Documentation**: Check `/usr/share/doc/skyview-adsb/` for additional guides
|
||||||
|
- **Logs**: Service logs available via `journalctl -u skyview`
|
||||||
|
- **Configuration**: Example configs in `/usr/share/skyview/examples/`
|
||||||
|
- **Community**: Report issues at project repository
|
||||||
|
|
||||||
|
### Reporting Issues
|
||||||
|
When reporting migration issues, include:
|
||||||
|
- SkyView version (before and after)
|
||||||
|
- Operating system and version
|
||||||
|
- Configuration file content
|
||||||
|
- Error messages from logs
|
||||||
|
- Database file permissions (`ls -la /var/lib/skyview/`)
|
||||||
|
|
||||||
|
This migration enables SkyView's evolution toward more sophisticated aircraft tracking while maintaining the simplicity and reliability of the existing system.
|
||||||
2
go.mod
2
go.mod
|
|
@ -6,3 +6,5 @@ require (
|
||||||
github.com/gorilla/mux v1.8.1
|
github.com/gorilla/mux v1.8.1
|
||||||
github.com/gorilla/websocket v1.5.3
|
github.com/gorilla/websocket v1.5.3
|
||||||
)
|
)
|
||||||
|
|
||||||
|
require github.com/mattn/go-sqlite3 v1.14.32
|
||||||
|
|
|
||||||
2
go.sum
2
go.sum
|
|
@ -2,3 +2,5 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||||
|
|
|
||||||
389
internal/database/api_client.go
Normal file
389
internal/database/api_client.go
Normal file
|
|
@ -0,0 +1,389 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ExternalAPIClient struct {
|
||||||
|
httpClient *http.Client
|
||||||
|
mutex sync.RWMutex
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
timeout time.Duration
|
||||||
|
maxRetries int
|
||||||
|
userAgent string
|
||||||
|
|
||||||
|
// Rate limiting
|
||||||
|
lastRequest time.Time
|
||||||
|
minInterval time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
type APIClientConfig struct {
|
||||||
|
Timeout time.Duration
|
||||||
|
MaxRetries int
|
||||||
|
UserAgent string
|
||||||
|
MinInterval time.Duration // Minimum interval between requests
|
||||||
|
}
|
||||||
|
|
||||||
|
type OpenSkyFlightInfo struct {
|
||||||
|
ICAO string `json:"icao"`
|
||||||
|
Callsign string `json:"callsign"`
|
||||||
|
Origin string `json:"origin"`
|
||||||
|
Destination string `json:"destination"`
|
||||||
|
FirstSeen time.Time `json:"first_seen"`
|
||||||
|
LastSeen time.Time `json:"last_seen"`
|
||||||
|
AircraftType string `json:"aircraft_type"`
|
||||||
|
Registration string `json:"registration"`
|
||||||
|
FlightNumber string `json:"flight_number"`
|
||||||
|
Airline string `json:"airline"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type APIError struct {
|
||||||
|
Operation string
|
||||||
|
StatusCode int
|
||||||
|
Message string
|
||||||
|
Retryable bool
|
||||||
|
RetryAfter time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *APIError) Error() string {
|
||||||
|
return fmt.Sprintf("API error in %s: %s (status: %d, retryable: %v)",
|
||||||
|
e.Operation, e.Message, e.StatusCode, e.Retryable)
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewExternalAPIClient(config APIClientConfig) *ExternalAPIClient {
|
||||||
|
if config.Timeout == 0 {
|
||||||
|
config.Timeout = 10 * time.Second
|
||||||
|
}
|
||||||
|
if config.MaxRetries == 0 {
|
||||||
|
config.MaxRetries = 3
|
||||||
|
}
|
||||||
|
if config.UserAgent == "" {
|
||||||
|
config.UserAgent = "SkyView-ADSB/1.0 (https://github.com/user/skyview)"
|
||||||
|
}
|
||||||
|
if config.MinInterval == 0 {
|
||||||
|
config.MinInterval = 1 * time.Second // Default rate limit
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ExternalAPIClient{
|
||||||
|
httpClient: &http.Client{
|
||||||
|
Timeout: config.Timeout,
|
||||||
|
},
|
||||||
|
timeout: config.Timeout,
|
||||||
|
maxRetries: config.MaxRetries,
|
||||||
|
userAgent: config.UserAgent,
|
||||||
|
minInterval: config.MinInterval,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ExternalAPIClient) enforceRateLimit() {
|
||||||
|
c.mutex.Lock()
|
||||||
|
defer c.mutex.Unlock()
|
||||||
|
|
||||||
|
elapsed := time.Since(c.lastRequest)
|
||||||
|
if elapsed < c.minInterval {
|
||||||
|
time.Sleep(c.minInterval - elapsed)
|
||||||
|
}
|
||||||
|
c.lastRequest = time.Now()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ExternalAPIClient) makeRequest(ctx context.Context, url string) (*http.Response, error) {
|
||||||
|
c.enforceRateLimit()
|
||||||
|
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("User-Agent", c.userAgent)
|
||||||
|
req.Header.Set("Accept", "application/json")
|
||||||
|
|
||||||
|
var resp *http.Response
|
||||||
|
var lastErr error
|
||||||
|
|
||||||
|
for attempt := 0; attempt <= c.maxRetries; attempt++ {
|
||||||
|
if attempt > 0 {
|
||||||
|
// Exponential backoff
|
||||||
|
backoff := time.Duration(1<<uint(attempt-1)) * time.Second
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return nil, ctx.Err()
|
||||||
|
case <-time.After(backoff):
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, lastErr = c.httpClient.Do(req)
|
||||||
|
if lastErr != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for retryable status codes
|
||||||
|
if resp.StatusCode >= 500 || resp.StatusCode == 429 {
|
||||||
|
resp.Body.Close()
|
||||||
|
|
||||||
|
// Handle rate limiting
|
||||||
|
if resp.StatusCode == 429 {
|
||||||
|
retryAfter := parseRetryAfter(resp.Header.Get("Retry-After"))
|
||||||
|
if retryAfter > 0 {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return nil, ctx.Err()
|
||||||
|
case <-time.After(retryAfter):
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success or non-retryable error
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if lastErr != nil {
|
||||||
|
return nil, lastErr
|
||||||
|
}
|
||||||
|
|
||||||
|
return resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ExternalAPIClient) GetFlightInfoFromOpenSky(ctx context.Context, icao string) (*OpenSkyFlightInfo, error) {
|
||||||
|
if icao == "" {
|
||||||
|
return nil, fmt.Errorf("empty ICAO code")
|
||||||
|
}
|
||||||
|
|
||||||
|
// OpenSky Network API endpoint for flight information
|
||||||
|
apiURL := fmt.Sprintf("https://opensky-network.org/api/flights/aircraft?icao24=%s&begin=%d&end=%d",
|
||||||
|
icao,
|
||||||
|
time.Now().Add(-24*time.Hour).Unix(),
|
||||||
|
time.Now().Unix(),
|
||||||
|
)
|
||||||
|
|
||||||
|
resp, err := c.makeRequest(ctx, apiURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, &APIError{
|
||||||
|
Operation: "opensky_flight_info",
|
||||||
|
Message: err.Error(),
|
||||||
|
Retryable: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
body, _ := io.ReadAll(resp.Body)
|
||||||
|
return nil, &APIError{
|
||||||
|
Operation: "opensky_flight_info",
|
||||||
|
StatusCode: resp.StatusCode,
|
||||||
|
Message: string(body),
|
||||||
|
Retryable: resp.StatusCode >= 500 || resp.StatusCode == 429,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var flights [][]interface{}
|
||||||
|
decoder := json.NewDecoder(resp.Body)
|
||||||
|
if err := decoder.Decode(&flights); err != nil {
|
||||||
|
return nil, &APIError{
|
||||||
|
Operation: "opensky_parse_response",
|
||||||
|
Message: err.Error(),
|
||||||
|
Retryable: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(flights) == 0 {
|
||||||
|
return nil, nil // No flight information available
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the most recent flight
|
||||||
|
flight := flights[0]
|
||||||
|
if len(flight) < 10 {
|
||||||
|
return nil, &APIError{
|
||||||
|
Operation: "opensky_invalid_response",
|
||||||
|
Message: "invalid flight data format",
|
||||||
|
Retryable: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info := &OpenSkyFlightInfo{
|
||||||
|
ICAO: icao,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse fields based on OpenSky API documentation
|
||||||
|
if callsign, ok := flight[1].(string); ok {
|
||||||
|
info.Callsign = callsign
|
||||||
|
}
|
||||||
|
if firstSeen, ok := flight[2].(float64); ok {
|
||||||
|
info.FirstSeen = time.Unix(int64(firstSeen), 0)
|
||||||
|
}
|
||||||
|
if lastSeen, ok := flight[3].(float64); ok {
|
||||||
|
info.LastSeen = time.Unix(int64(lastSeen), 0)
|
||||||
|
}
|
||||||
|
if origin, ok := flight[4].(string); ok {
|
||||||
|
info.Origin = origin
|
||||||
|
}
|
||||||
|
if destination, ok := flight[5].(string); ok {
|
||||||
|
info.Destination = destination
|
||||||
|
}
|
||||||
|
|
||||||
|
return info, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ExternalAPIClient) GetAircraftInfoFromOpenSky(ctx context.Context, icao string) (map[string]interface{}, error) {
|
||||||
|
if icao == "" {
|
||||||
|
return nil, fmt.Errorf("empty ICAO code")
|
||||||
|
}
|
||||||
|
|
||||||
|
// OpenSky Network metadata API
|
||||||
|
apiURL := fmt.Sprintf("https://opensky-network.org/api/metadata/aircraft/icao/%s", icao)
|
||||||
|
|
||||||
|
resp, err := c.makeRequest(ctx, apiURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, &APIError{
|
||||||
|
Operation: "opensky_aircraft_info",
|
||||||
|
Message: err.Error(),
|
||||||
|
Retryable: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode == http.StatusNotFound {
|
||||||
|
return nil, nil // Aircraft not found
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
body, _ := io.ReadAll(resp.Body)
|
||||||
|
return nil, &APIError{
|
||||||
|
Operation: "opensky_aircraft_info",
|
||||||
|
StatusCode: resp.StatusCode,
|
||||||
|
Message: string(body),
|
||||||
|
Retryable: resp.StatusCode >= 500 || resp.StatusCode == 429,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var aircraft map[string]interface{}
|
||||||
|
decoder := json.NewDecoder(resp.Body)
|
||||||
|
if err := decoder.Decode(&aircraft); err != nil {
|
||||||
|
return nil, &APIError{
|
||||||
|
Operation: "opensky_parse_aircraft",
|
||||||
|
Message: err.Error(),
|
||||||
|
Retryable: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return aircraft, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ExternalAPIClient) EnhanceCallsignWithExternalData(ctx context.Context, callsign, icao string) (map[string]interface{}, error) {
|
||||||
|
enhancement := make(map[string]interface{})
|
||||||
|
enhancement["callsign"] = callsign
|
||||||
|
enhancement["icao"] = icao
|
||||||
|
enhancement["enhanced"] = false
|
||||||
|
|
||||||
|
// Try to get flight information from OpenSky
|
||||||
|
if flightInfo, err := c.GetFlightInfoFromOpenSky(ctx, icao); err == nil && flightInfo != nil {
|
||||||
|
enhancement["flight_info"] = map[string]interface{}{
|
||||||
|
"origin": flightInfo.Origin,
|
||||||
|
"destination": flightInfo.Destination,
|
||||||
|
"first_seen": flightInfo.FirstSeen,
|
||||||
|
"last_seen": flightInfo.LastSeen,
|
||||||
|
"flight_number": flightInfo.FlightNumber,
|
||||||
|
"airline": flightInfo.Airline,
|
||||||
|
}
|
||||||
|
enhancement["enhanced"] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to get aircraft metadata
|
||||||
|
if aircraftInfo, err := c.GetAircraftInfoFromOpenSky(ctx, icao); err == nil && aircraftInfo != nil {
|
||||||
|
enhancement["aircraft_info"] = aircraftInfo
|
||||||
|
enhancement["enhanced"] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
return enhancement, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ExternalAPIClient) BatchEnhanceCallsigns(ctx context.Context, callsigns map[string]string) (map[string]map[string]interface{}, error) {
|
||||||
|
results := make(map[string]map[string]interface{})
|
||||||
|
|
||||||
|
for callsign, icao := range callsigns {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return results, ctx.Err()
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
|
||||||
|
enhanced, err := c.EnhanceCallsignWithExternalData(ctx, callsign, icao)
|
||||||
|
if err != nil {
|
||||||
|
// Log error but continue with other callsigns
|
||||||
|
fmt.Printf("Warning: failed to enhance callsign %s (ICAO: %s): %v\n", callsign, icao, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
results[callsign] = enhanced
|
||||||
|
}
|
||||||
|
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ExternalAPIClient) TestConnection(ctx context.Context) error {
|
||||||
|
// Test with a simple API call
|
||||||
|
testURL := "https://opensky-network.org/api/states?time=0&lamin=0&lomin=0&lamax=1&lomax=1"
|
||||||
|
|
||||||
|
resp, err := c.makeRequest(ctx, testURL)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("connection test failed: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return fmt.Errorf("connection test returned status %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseRetryAfter(header string) time.Duration {
|
||||||
|
if header == "" {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try parsing as seconds
|
||||||
|
if seconds, err := time.ParseDuration(header + "s"); err == nil {
|
||||||
|
return seconds
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try parsing as HTTP date
|
||||||
|
if t, err := http.ParseTime(header); err == nil {
|
||||||
|
return time.Until(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// HealthCheck provides information about the client's health
|
||||||
|
func (c *ExternalAPIClient) HealthCheck(ctx context.Context) map[string]interface{} {
|
||||||
|
health := make(map[string]interface{})
|
||||||
|
|
||||||
|
// Test connection
|
||||||
|
if err := c.TestConnection(ctx); err != nil {
|
||||||
|
health["status"] = "unhealthy"
|
||||||
|
health["error"] = err.Error()
|
||||||
|
} else {
|
||||||
|
health["status"] = "healthy"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add configuration info
|
||||||
|
health["timeout"] = c.timeout.String()
|
||||||
|
health["max_retries"] = c.maxRetries
|
||||||
|
health["min_interval"] = c.minInterval.String()
|
||||||
|
health["user_agent"] = c.userAgent
|
||||||
|
|
||||||
|
c.mutex.RLock()
|
||||||
|
health["last_request"] = c.lastRequest
|
||||||
|
c.mutex.RUnlock()
|
||||||
|
|
||||||
|
return health
|
||||||
|
}
|
||||||
261
internal/database/database.go
Normal file
261
internal/database/database.go
Normal file
|
|
@ -0,0 +1,261 @@
|
||||||
|
// Package database provides persistent storage for aircraft data and callsign enhancement
|
||||||
|
// using SQLite with versioned schema migrations and comprehensive error handling.
|
||||||
|
//
|
||||||
|
// The database system supports:
|
||||||
|
// - Aircraft position history with configurable retention
|
||||||
|
// - Embedded OpenFlights airline and airport data
|
||||||
|
// - External API result caching with TTL
|
||||||
|
// - Schema migrations with rollback support
|
||||||
|
// - Privacy mode for complete offline operation
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
_ "github.com/mattn/go-sqlite3" // SQLite driver
|
||||||
|
)
|
||||||
|
|
||||||
|
// Database represents the main database connection and operations
|
||||||
|
type Database struct {
|
||||||
|
conn *sql.DB
|
||||||
|
config *Config
|
||||||
|
migrator *Migrator
|
||||||
|
callsign *CallsignManager
|
||||||
|
history *HistoryManager
|
||||||
|
}
|
||||||
|
|
||||||
|
// Config holds database configuration options
|
||||||
|
type Config struct {
|
||||||
|
// Database file path (auto-resolved if empty)
|
||||||
|
Path string `json:"path"`
|
||||||
|
|
||||||
|
// Data retention settings
|
||||||
|
MaxHistoryDays int `json:"max_history_days"` // 0 = unlimited
|
||||||
|
BackupOnUpgrade bool `json:"backup_on_upgrade"`
|
||||||
|
|
||||||
|
// Connection settings
|
||||||
|
MaxOpenConns int `json:"max_open_conns"` // Default: 10
|
||||||
|
MaxIdleConns int `json:"max_idle_conns"` // Default: 5
|
||||||
|
ConnMaxLifetime time.Duration `json:"conn_max_lifetime"` // Default: 1 hour
|
||||||
|
|
||||||
|
// Maintenance settings
|
||||||
|
VacuumInterval time.Duration `json:"vacuum_interval"` // Default: 24 hours
|
||||||
|
CleanupInterval time.Duration `json:"cleanup_interval"` // Default: 1 hour
|
||||||
|
|
||||||
|
// Compression settings
|
||||||
|
EnableCompression bool `json:"enable_compression"` // Enable automatic compression
|
||||||
|
CompressionLevel int `json:"compression_level"` // Compression level (1-9, default: 6)
|
||||||
|
PageSize int `json:"page_size"` // SQLite page size (default: 4096)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AircraftHistoryRecord represents a stored aircraft position update
|
||||||
|
type AircraftHistoryRecord struct {
|
||||||
|
ID int64 `json:"id"`
|
||||||
|
ICAO string `json:"icao"`
|
||||||
|
Timestamp time.Time `json:"timestamp"`
|
||||||
|
Latitude *float64 `json:"latitude,omitempty"`
|
||||||
|
Longitude *float64 `json:"longitude,omitempty"`
|
||||||
|
Altitude *int `json:"altitude,omitempty"`
|
||||||
|
Speed *int `json:"speed,omitempty"`
|
||||||
|
Track *int `json:"track,omitempty"`
|
||||||
|
VerticalRate *int `json:"vertical_rate,omitempty"`
|
||||||
|
Squawk *string `json:"squawk,omitempty"`
|
||||||
|
Callsign *string `json:"callsign,omitempty"`
|
||||||
|
SourceID string `json:"source_id"`
|
||||||
|
SignalStrength *float64 `json:"signal_strength,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CallsignInfo represents enriched callsign information
|
||||||
|
type CallsignInfo struct {
|
||||||
|
OriginalCallsign string `json:"original_callsign"`
|
||||||
|
AirlineCode string `json:"airline_code"`
|
||||||
|
FlightNumber string `json:"flight_number"`
|
||||||
|
AirlineName string `json:"airline_name"`
|
||||||
|
AirlineCountry string `json:"airline_country"`
|
||||||
|
DisplayName string `json:"display_name"`
|
||||||
|
IsValid bool `json:"is_valid"`
|
||||||
|
LastUpdated time.Time `json:"last_updated"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AirlineRecord represents embedded airline data from OpenFlights
|
||||||
|
type AirlineRecord struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Alias string `json:"alias"`
|
||||||
|
IATACode string `json:"iata_code"`
|
||||||
|
ICAOCode string `json:"icao_code"`
|
||||||
|
Callsign string `json:"callsign"`
|
||||||
|
Country string `json:"country"`
|
||||||
|
Active bool `json:"active"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AirportRecord represents embedded airport data from OpenFlights
|
||||||
|
type AirportRecord struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
City string `json:"city"`
|
||||||
|
Country string `json:"country"`
|
||||||
|
IATA string `json:"iata"`
|
||||||
|
ICAO string `json:"icao"`
|
||||||
|
Latitude float64 `json:"latitude"`
|
||||||
|
Longitude float64 `json:"longitude"`
|
||||||
|
Altitude int `json:"altitude"`
|
||||||
|
TimezoneOffset float64 `json:"timezone_offset"`
|
||||||
|
DST string `json:"dst"`
|
||||||
|
Timezone string `json:"timezone"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// DatabaseError represents database operation errors
|
||||||
|
type DatabaseError struct {
|
||||||
|
Operation string
|
||||||
|
Err error
|
||||||
|
Query string
|
||||||
|
Retryable bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *DatabaseError) Error() string {
|
||||||
|
if e.Query != "" {
|
||||||
|
return fmt.Sprintf("database %s error: %v (query: %s)", e.Operation, e.Err, e.Query)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("database %s error: %v", e.Operation, e.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *DatabaseError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDatabase creates a new database connection with the given configuration
|
||||||
|
func NewDatabase(config *Config) (*Database, error) {
|
||||||
|
if config == nil {
|
||||||
|
config = DefaultConfig()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve database path
|
||||||
|
dbPath, err := ResolveDatabasePath(config.Path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, &DatabaseError{
|
||||||
|
Operation: "path_resolution",
|
||||||
|
Err: err,
|
||||||
|
Retryable: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
config.Path = dbPath
|
||||||
|
|
||||||
|
// Open database connection
|
||||||
|
conn, err := sql.Open("sqlite3", buildConnectionString(dbPath))
|
||||||
|
if err != nil {
|
||||||
|
return nil, &DatabaseError{
|
||||||
|
Operation: "connect",
|
||||||
|
Err: err,
|
||||||
|
Retryable: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure connection pool
|
||||||
|
conn.SetMaxOpenConns(config.MaxOpenConns)
|
||||||
|
conn.SetMaxIdleConns(config.MaxIdleConns)
|
||||||
|
conn.SetConnMaxLifetime(config.ConnMaxLifetime)
|
||||||
|
|
||||||
|
// Test connection
|
||||||
|
if err := conn.Ping(); err != nil {
|
||||||
|
conn.Close()
|
||||||
|
return nil, &DatabaseError{
|
||||||
|
Operation: "ping",
|
||||||
|
Err: err,
|
||||||
|
Retryable: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
db := &Database{
|
||||||
|
conn: conn,
|
||||||
|
config: config,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize components
|
||||||
|
db.migrator = NewMigrator(conn)
|
||||||
|
db.callsign = NewCallsignManager(conn)
|
||||||
|
db.history = NewHistoryManager(conn, config.MaxHistoryDays)
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize runs database migrations and sets up embedded data
|
||||||
|
func (db *Database) Initialize() error {
|
||||||
|
// Run schema migrations
|
||||||
|
if err := db.migrator.MigrateToLatest(); err != nil {
|
||||||
|
return &DatabaseError{
|
||||||
|
Operation: "migration",
|
||||||
|
Err: err,
|
||||||
|
Retryable: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load embedded OpenFlights data if not already loaded
|
||||||
|
if err := db.callsign.LoadEmbeddedData(); err != nil {
|
||||||
|
return &DatabaseError{
|
||||||
|
Operation: "load_embedded_data",
|
||||||
|
Err: err,
|
||||||
|
Retryable: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetConfig returns the database configuration
|
||||||
|
func (db *Database) GetConfig() *Config {
|
||||||
|
return db.config
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetConnection returns the underlying database connection
|
||||||
|
func (db *Database) GetConnection() *sql.DB {
|
||||||
|
return db.conn
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetHistoryManager returns the history manager
|
||||||
|
func (db *Database) GetHistoryManager() *HistoryManager {
|
||||||
|
return db.history
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetCallsignManager returns the callsign manager
|
||||||
|
func (db *Database) GetCallsignManager() *CallsignManager {
|
||||||
|
return db.callsign
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close closes the database connection and stops background tasks
|
||||||
|
func (db *Database) Close() error {
|
||||||
|
if db.conn != nil {
|
||||||
|
return db.conn.Close()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Health returns the database health status
|
||||||
|
func (db *Database) Health() error {
|
||||||
|
if db.conn == nil {
|
||||||
|
return fmt.Errorf("database connection not initialized")
|
||||||
|
}
|
||||||
|
return db.conn.Ping()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// DefaultConfig returns the default database configuration
|
||||||
|
func DefaultConfig() *Config {
|
||||||
|
return &Config{
|
||||||
|
Path: "", // Auto-resolved
|
||||||
|
MaxHistoryDays: 7,
|
||||||
|
BackupOnUpgrade: true,
|
||||||
|
MaxOpenConns: 10,
|
||||||
|
MaxIdleConns: 5,
|
||||||
|
ConnMaxLifetime: time.Hour,
|
||||||
|
VacuumInterval: 24 * time.Hour,
|
||||||
|
CleanupInterval: time.Hour,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildConnectionString creates SQLite connection string with optimizations
|
||||||
|
func buildConnectionString(path string) string {
|
||||||
|
return fmt.Sprintf("%s?_journal_mode=WAL&_synchronous=NORMAL&_cache_size=-64000&_temp_store=MEMORY&_foreign_keys=ON", path)
|
||||||
|
}
|
||||||
167
internal/database/database_test.go
Normal file
167
internal/database/database_test.go
Normal file
|
|
@ -0,0 +1,167 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewDatabase(t *testing.T) {
|
||||||
|
// Create temporary database file
|
||||||
|
tempFile, err := os.CreateTemp("", "test_skyview_*.db")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to create temp database file:", err)
|
||||||
|
}
|
||||||
|
defer os.Remove(tempFile.Name())
|
||||||
|
tempFile.Close()
|
||||||
|
|
||||||
|
config := &Config{
|
||||||
|
Path: tempFile.Name(),
|
||||||
|
VacuumInterval: time.Hour,
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := NewDatabase(config)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to create database:", err)
|
||||||
|
}
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("NewDatabase() returned nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test connection
|
||||||
|
conn := db.GetConnection()
|
||||||
|
if conn == nil {
|
||||||
|
t.Fatal("GetConnection() returned nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test basic query
|
||||||
|
var result int
|
||||||
|
err = conn.QueryRow("SELECT 1").Scan(&result)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("Basic query failed:", err)
|
||||||
|
}
|
||||||
|
if result != 1 {
|
||||||
|
t.Error("Basic query returned wrong result:", result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDatabaseClose(t *testing.T) {
|
||||||
|
tempFile, err := os.CreateTemp("", "test_skyview_*.db")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to create temp database file:", err)
|
||||||
|
}
|
||||||
|
defer os.Remove(tempFile.Name())
|
||||||
|
tempFile.Close()
|
||||||
|
|
||||||
|
config := &Config{Path: tempFile.Name()}
|
||||||
|
db, err := NewDatabase(config)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to create database:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close should not error
|
||||||
|
if err := db.Close(); err != nil {
|
||||||
|
t.Error("Database Close() returned error:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second close should be safe
|
||||||
|
if err := db.Close(); err != nil {
|
||||||
|
t.Error("Second Close() returned error:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connection should be nil after close
|
||||||
|
conn := db.GetConnection()
|
||||||
|
if conn != nil {
|
||||||
|
t.Error("GetConnection() should return nil after Close()")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDatabaseConfig(t *testing.T) {
|
||||||
|
tempFile, err := os.CreateTemp("", "test_skyview_*.db")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to create temp database file:", err)
|
||||||
|
}
|
||||||
|
defer os.Remove(tempFile.Name())
|
||||||
|
tempFile.Close()
|
||||||
|
|
||||||
|
config := &Config{
|
||||||
|
Path: tempFile.Name(),
|
||||||
|
VacuumInterval: 2 * time.Hour,
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := NewDatabase(config)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to create database:", err)
|
||||||
|
}
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
// Test that config is stored correctly
|
||||||
|
if db.config != config {
|
||||||
|
t.Error("Database config not stored correctly")
|
||||||
|
}
|
||||||
|
if db.config.VacuumInterval != 2*time.Hour {
|
||||||
|
t.Error("VacuumInterval not preserved:", db.config.VacuumInterval)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDatabaseMigrations(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
conn := db.GetConnection()
|
||||||
|
|
||||||
|
// Check that essential tables exist after migrations
|
||||||
|
tables := []string{"airlines", "airports", "callsign_cache", "data_sources", "aircraft_history"}
|
||||||
|
for _, table := range tables {
|
||||||
|
var count int
|
||||||
|
query := "SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name=?"
|
||||||
|
err := conn.QueryRow(query, table).Scan(&count)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Failed to check for table %s: %v", table, err)
|
||||||
|
}
|
||||||
|
if count != 1 {
|
||||||
|
t.Errorf("Table %s does not exist", table)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDatabasePragmas(t *testing.T) {
|
||||||
|
tempFile, err := os.CreateTemp("", "test_skyview_*.db")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to create temp database file:", err)
|
||||||
|
}
|
||||||
|
defer os.Remove(tempFile.Name())
|
||||||
|
tempFile.Close()
|
||||||
|
|
||||||
|
config := &Config{Path: tempFile.Name()}
|
||||||
|
db, err := NewDatabase(config)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to create database:", err)
|
||||||
|
}
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
conn := db.GetConnection()
|
||||||
|
|
||||||
|
// Check that foreign keys are enabled
|
||||||
|
var foreignKeys int
|
||||||
|
err = conn.QueryRow("PRAGMA foreign_keys").Scan(&foreignKeys)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("Failed to check foreign_keys pragma:", err)
|
||||||
|
}
|
||||||
|
if foreignKeys != 1 {
|
||||||
|
t.Error("Foreign keys should be enabled")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check journal mode
|
||||||
|
var journalMode string
|
||||||
|
err = conn.QueryRow("PRAGMA journal_mode").Scan(&journalMode)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("Failed to check journal_mode:", err)
|
||||||
|
}
|
||||||
|
// Should be WAL mode for better concurrency
|
||||||
|
if journalMode != "wal" {
|
||||||
|
t.Errorf("Expected WAL journal mode, got: %s", journalMode)
|
||||||
|
}
|
||||||
|
}
|
||||||
526
internal/database/loader.go
Normal file
526
internal/database/loader.go
Normal file
|
|
@ -0,0 +1,526 @@
|
||||||
|
// Package database - Data loader for external sources
|
||||||
|
//
|
||||||
|
// This module handles loading aviation data from external sources at runtime,
|
||||||
|
// maintaining license compliance by not embedding any AGPL or restricted data
|
||||||
|
// in the SkyView binary.
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/tls"
|
||||||
|
"database/sql"
|
||||||
|
"encoding/csv"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DataLoader handles loading external data sources into the database
|
||||||
|
type DataLoader struct {
|
||||||
|
conn *sql.DB
|
||||||
|
client *http.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
// DataSource represents an external aviation data source
|
||||||
|
type DataSource struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
License string `json:"license"`
|
||||||
|
URL string `json:"url"`
|
||||||
|
RequiresConsent bool `json:"requires_consent"`
|
||||||
|
UserAcceptedLicense bool `json:"user_accepted_license"`
|
||||||
|
Format string `json:"format"` // "openflights", "ourairports", "csv"
|
||||||
|
Version string `json:"version"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadResult contains the results of a data loading operation
|
||||||
|
type LoadResult struct {
|
||||||
|
Source string `json:"source"`
|
||||||
|
RecordsTotal int `json:"records_total"`
|
||||||
|
RecordsNew int `json:"records_new"`
|
||||||
|
RecordsError int `json:"records_error"`
|
||||||
|
Duration time.Duration `json:"duration"`
|
||||||
|
Errors []string `json:"errors,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDataLoader creates a new data loader with HTTP client
|
||||||
|
func NewDataLoader(conn *sql.DB) *DataLoader {
|
||||||
|
// Check for insecure TLS environment variable
|
||||||
|
insecureTLS := os.Getenv("SKYVIEW_INSECURE_TLS") == "1"
|
||||||
|
|
||||||
|
transport := &http.Transport{
|
||||||
|
MaxIdleConns: 10,
|
||||||
|
IdleConnTimeout: 90 * time.Second,
|
||||||
|
DisableCompression: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow insecure certificates if requested
|
||||||
|
if insecureTLS {
|
||||||
|
transport.TLSClientConfig = &tls.Config{
|
||||||
|
InsecureSkipVerify: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &DataLoader{
|
||||||
|
conn: conn,
|
||||||
|
client: &http.Client{
|
||||||
|
Timeout: 30 * time.Second,
|
||||||
|
Transport: transport,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAvailableDataSources returns all supported data sources with license info
|
||||||
|
func GetAvailableDataSources() []DataSource {
|
||||||
|
return []DataSource{
|
||||||
|
{
|
||||||
|
Name: "OpenFlights Airlines",
|
||||||
|
License: "AGPL-3.0",
|
||||||
|
URL: "https://raw.githubusercontent.com/jpatokal/openflights/master/data/airlines.dat",
|
||||||
|
RequiresConsent: false, // Runtime data consumption doesn't require explicit consent
|
||||||
|
Format: "openflights",
|
||||||
|
Version: "latest",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "OpenFlights Airports",
|
||||||
|
License: "AGPL-3.0",
|
||||||
|
URL: "https://raw.githubusercontent.com/jpatokal/openflights/master/data/airports.dat",
|
||||||
|
RequiresConsent: false, // Runtime data consumption doesn't require explicit consent
|
||||||
|
Format: "openflights",
|
||||||
|
Version: "latest",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "OurAirports",
|
||||||
|
License: "Public Domain",
|
||||||
|
URL: "https://raw.githubusercontent.com/davidmegginson/ourairports-data/main/airports.csv",
|
||||||
|
RequiresConsent: false,
|
||||||
|
Format: "ourairports",
|
||||||
|
Version: "latest",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadDataSource downloads and imports data from an external source
|
||||||
|
func (dl *DataLoader) LoadDataSource(source DataSource) (*LoadResult, error) {
|
||||||
|
result := &LoadResult{
|
||||||
|
Source: source.Name,
|
||||||
|
}
|
||||||
|
startTime := time.Now()
|
||||||
|
defer func() {
|
||||||
|
result.Duration = time.Since(startTime)
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Check license acceptance if required
|
||||||
|
if source.RequiresConsent && !source.UserAcceptedLicense {
|
||||||
|
return nil, fmt.Errorf("user has not accepted license for source: %s (%s)", source.Name, source.License)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Download data
|
||||||
|
resp, err := dl.client.Get(source.URL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to download data from %s: %v", source.URL, err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, fmt.Errorf("HTTP error downloading data: %s", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse and load data based on format
|
||||||
|
switch source.Format {
|
||||||
|
case "openflights":
|
||||||
|
if strings.Contains(source.Name, "Airlines") {
|
||||||
|
return dl.loadOpenFlightsAirlines(resp.Body, source, result)
|
||||||
|
} else if strings.Contains(source.Name, "Airports") {
|
||||||
|
return dl.loadOpenFlightsAirports(resp.Body, source, result)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unknown OpenFlights data type: %s", source.Name)
|
||||||
|
|
||||||
|
case "ourairports":
|
||||||
|
return dl.loadOurAirports(resp.Body, source, result)
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported data format: %s", source.Format)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadOpenFlightsAirlines loads airline data in OpenFlights format
|
||||||
|
func (dl *DataLoader) loadOpenFlightsAirlines(reader io.Reader, source DataSource, result *LoadResult) (*LoadResult, error) {
|
||||||
|
tx, err := dl.conn.Begin()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to begin transaction: %v", err)
|
||||||
|
}
|
||||||
|
defer tx.Rollback()
|
||||||
|
|
||||||
|
// Record data source
|
||||||
|
if err := dl.recordDataSource(tx, source); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear existing data from this source
|
||||||
|
_, err = tx.Exec(`DELETE FROM airlines WHERE data_source = ?`, source.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to clear existing airline data: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
csvReader := csv.NewReader(reader)
|
||||||
|
csvReader.FieldsPerRecord = -1 // Variable number of fields
|
||||||
|
|
||||||
|
insertStmt, err := tx.Prepare(`
|
||||||
|
INSERT OR REPLACE INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to prepare insert statement: %v", err)
|
||||||
|
}
|
||||||
|
defer insertStmt.Close()
|
||||||
|
|
||||||
|
for {
|
||||||
|
record, err := csvReader.Read()
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
result.RecordsError++
|
||||||
|
result.Errors = append(result.Errors, fmt.Sprintf("CSV parse error: %v", err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(record) < 7 {
|
||||||
|
result.RecordsError++
|
||||||
|
result.Errors = append(result.Errors, "insufficient fields in record")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
result.RecordsTotal++
|
||||||
|
|
||||||
|
// Parse OpenFlights airline format:
|
||||||
|
// ID, Name, Alias, IATA, ICAO, Callsign, Country, Active
|
||||||
|
id, _ := strconv.Atoi(record[0])
|
||||||
|
name := strings.Trim(record[1], `"`)
|
||||||
|
alias := strings.Trim(record[2], `"`)
|
||||||
|
iata := strings.Trim(record[3], `"`)
|
||||||
|
icao := strings.Trim(record[4], `"`)
|
||||||
|
callsign := strings.Trim(record[5], `"`)
|
||||||
|
country := strings.Trim(record[6], `"`)
|
||||||
|
active := len(record) > 7 && strings.Trim(record[7], `"`) == "Y"
|
||||||
|
|
||||||
|
// Convert \N to empty strings
|
||||||
|
if alias == "\\N" { alias = "" }
|
||||||
|
if iata == "\\N" { iata = "" }
|
||||||
|
if icao == "\\N" { icao = "" }
|
||||||
|
if callsign == "\\N" { callsign = "" }
|
||||||
|
|
||||||
|
_, err = insertStmt.Exec(id, name, alias, iata, icao, callsign, country, active, source.Name)
|
||||||
|
if err != nil {
|
||||||
|
result.RecordsError++
|
||||||
|
result.Errors = append(result.Errors, fmt.Sprintf("insert error for airline %s: %v", name, err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
result.RecordsNew++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update record count
|
||||||
|
_, err = tx.Exec(`UPDATE data_sources SET record_count = ? WHERE name = ?`, result.RecordsNew, source.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to update record count: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, tx.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadOpenFlightsAirports loads airport data in OpenFlights format
|
||||||
|
func (dl *DataLoader) loadOpenFlightsAirports(reader io.Reader, source DataSource, result *LoadResult) (*LoadResult, error) {
|
||||||
|
tx, err := dl.conn.Begin()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to begin transaction: %v", err)
|
||||||
|
}
|
||||||
|
defer tx.Rollback()
|
||||||
|
|
||||||
|
// Record data source
|
||||||
|
if err := dl.recordDataSource(tx, source); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear existing data from this source
|
||||||
|
_, err = tx.Exec(`DELETE FROM airports WHERE data_source = ?`, source.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to clear existing airport data: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
csvReader := csv.NewReader(reader)
|
||||||
|
csvReader.FieldsPerRecord = -1
|
||||||
|
|
||||||
|
insertStmt, err := tx.Prepare(`
|
||||||
|
INSERT OR REPLACE INTO airports (id, name, city, country, iata_code, icao_code, latitude, longitude,
|
||||||
|
elevation_ft, timezone_offset, dst_type, timezone, data_source)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to prepare insert statement: %v", err)
|
||||||
|
}
|
||||||
|
defer insertStmt.Close()
|
||||||
|
|
||||||
|
for {
|
||||||
|
record, err := csvReader.Read()
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
result.RecordsError++
|
||||||
|
result.Errors = append(result.Errors, fmt.Sprintf("CSV parse error: %v", err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(record) < 12 {
|
||||||
|
result.RecordsError++
|
||||||
|
result.Errors = append(result.Errors, "insufficient fields in airport record")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
result.RecordsTotal++
|
||||||
|
|
||||||
|
// Parse OpenFlights airport format
|
||||||
|
id, _ := strconv.Atoi(record[0])
|
||||||
|
name := strings.Trim(record[1], `"`)
|
||||||
|
city := strings.Trim(record[2], `"`)
|
||||||
|
country := strings.Trim(record[3], `"`)
|
||||||
|
iata := strings.Trim(record[4], `"`)
|
||||||
|
icao := strings.Trim(record[5], `"`)
|
||||||
|
lat, _ := strconv.ParseFloat(record[6], 64)
|
||||||
|
lon, _ := strconv.ParseFloat(record[7], 64)
|
||||||
|
alt, _ := strconv.Atoi(record[8])
|
||||||
|
tzOffset, _ := strconv.ParseFloat(record[9], 64)
|
||||||
|
dst := strings.Trim(record[10], `"`)
|
||||||
|
timezone := strings.Trim(record[11], `"`)
|
||||||
|
|
||||||
|
// Convert \N to empty strings
|
||||||
|
if iata == "\\N" { iata = "" }
|
||||||
|
if icao == "\\N" { icao = "" }
|
||||||
|
if dst == "\\N" { dst = "" }
|
||||||
|
if timezone == "\\N" { timezone = "" }
|
||||||
|
|
||||||
|
_, err = insertStmt.Exec(id, name, city, country, iata, icao, lat, lon, alt, tzOffset, dst, timezone, source.Name)
|
||||||
|
if err != nil {
|
||||||
|
result.RecordsError++
|
||||||
|
result.Errors = append(result.Errors, fmt.Sprintf("insert error for airport %s: %v", name, err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
result.RecordsNew++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update record count
|
||||||
|
_, err = tx.Exec(`UPDATE data_sources SET record_count = ? WHERE name = ?`, result.RecordsNew, source.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to update record count: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, tx.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadOurAirports loads airport data in OurAirports CSV format
|
||||||
|
func (dl *DataLoader) loadOurAirports(reader io.Reader, source DataSource, result *LoadResult) (*LoadResult, error) {
|
||||||
|
// Start database transaction
|
||||||
|
tx, err := dl.conn.Begin()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to begin transaction: %v", err)
|
||||||
|
}
|
||||||
|
defer tx.Rollback()
|
||||||
|
|
||||||
|
csvReader := csv.NewReader(reader)
|
||||||
|
|
||||||
|
// Read header row
|
||||||
|
headers, err := csvReader.Read()
|
||||||
|
if err != nil {
|
||||||
|
result.RecordsError = 1
|
||||||
|
result.Errors = []string{fmt.Sprintf("Failed to read CSV header: %v", err)}
|
||||||
|
return result, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create header index map for easier field access
|
||||||
|
headerIndex := make(map[string]int)
|
||||||
|
for i, header := range headers {
|
||||||
|
headerIndex[strings.TrimSpace(header)] = i
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare statement for airports
|
||||||
|
stmt, err := tx.Prepare(`
|
||||||
|
INSERT OR REPLACE INTO airports (
|
||||||
|
source_id, name, ident, type, icao_code, iata_code,
|
||||||
|
latitude, longitude, elevation_ft, country_code,
|
||||||
|
municipality, continent, scheduled_service,
|
||||||
|
home_link, wikipedia_link, keywords, data_source
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
result.RecordsError = 1
|
||||||
|
result.Errors = []string{fmt.Sprintf("Failed to prepare statement: %v", err)}
|
||||||
|
return result, err
|
||||||
|
}
|
||||||
|
defer stmt.Close()
|
||||||
|
|
||||||
|
// Process each row
|
||||||
|
for {
|
||||||
|
record, err := csvReader.Read()
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
result.RecordsError++
|
||||||
|
result.Errors = append(result.Errors, fmt.Sprintf("CSV read error: %v", err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip rows with insufficient fields
|
||||||
|
if len(record) < len(headerIndex) {
|
||||||
|
result.RecordsError++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract fields using header index
|
||||||
|
sourceID := getFieldByHeader(record, headerIndex, "id")
|
||||||
|
ident := getFieldByHeader(record, headerIndex, "ident")
|
||||||
|
name := getFieldByHeader(record, headerIndex, "name")
|
||||||
|
icaoCode := getFieldByHeader(record, headerIndex, "icao_code")
|
||||||
|
iataCode := getFieldByHeader(record, headerIndex, "iata_code")
|
||||||
|
airportType := getFieldByHeader(record, headerIndex, "type")
|
||||||
|
countryCode := getFieldByHeader(record, headerIndex, "iso_country")
|
||||||
|
municipality := getFieldByHeader(record, headerIndex, "municipality")
|
||||||
|
continent := getFieldByHeader(record, headerIndex, "continent")
|
||||||
|
homeLink := getFieldByHeader(record, headerIndex, "home_link")
|
||||||
|
wikipediaLink := getFieldByHeader(record, headerIndex, "wikipedia_link")
|
||||||
|
keywords := getFieldByHeader(record, headerIndex, "keywords")
|
||||||
|
|
||||||
|
// Parse coordinates
|
||||||
|
var latitude, longitude float64
|
||||||
|
if latStr := getFieldByHeader(record, headerIndex, "latitude_deg"); latStr != "" {
|
||||||
|
if lat, err := strconv.ParseFloat(latStr, 64); err == nil {
|
||||||
|
latitude = lat
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if lngStr := getFieldByHeader(record, headerIndex, "longitude_deg"); lngStr != "" {
|
||||||
|
if lng, err := strconv.ParseFloat(lngStr, 64); err == nil {
|
||||||
|
longitude = lng
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse elevation
|
||||||
|
var elevation int
|
||||||
|
if elevStr := getFieldByHeader(record, headerIndex, "elevation_ft"); elevStr != "" {
|
||||||
|
if elev, err := strconv.Atoi(elevStr); err == nil {
|
||||||
|
elevation = elev
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse scheduled service
|
||||||
|
scheduledService := getFieldByHeader(record, headerIndex, "scheduled_service") == "yes"
|
||||||
|
|
||||||
|
// Insert airport record
|
||||||
|
_, err = stmt.Exec(
|
||||||
|
sourceID, name, ident, airportType, icaoCode, iataCode,
|
||||||
|
latitude, longitude, elevation, countryCode, municipality, continent,
|
||||||
|
scheduledService, homeLink, wikipediaLink, keywords, source.Name,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
result.RecordsError++
|
||||||
|
result.Errors = append(result.Errors, fmt.Sprintf("Insert error for %s: %v", ident, err))
|
||||||
|
} else {
|
||||||
|
result.RecordsNew++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update data source tracking
|
||||||
|
_, err = tx.Exec(`
|
||||||
|
INSERT OR REPLACE INTO data_sources (name, license, url, imported_at, record_count, user_accepted_license)
|
||||||
|
VALUES (?, ?, ?, CURRENT_TIMESTAMP, ?, ?)
|
||||||
|
`, source.Name, source.License, source.URL, result.RecordsNew, source.UserAcceptedLicense)
|
||||||
|
if err != nil {
|
||||||
|
return result, fmt.Errorf("failed to update data source tracking: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, tx.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
// getFieldByHeader safely gets a field value by header name
|
||||||
|
func getFieldByHeader(record []string, headerIndex map[string]int, fieldName string) string {
|
||||||
|
if idx, exists := headerIndex[fieldName]; exists && idx < len(record) {
|
||||||
|
return strings.TrimSpace(record[idx])
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetLoadedDataSources returns all data sources that have been imported
|
||||||
|
func (dl *DataLoader) GetLoadedDataSources() ([]DataSource, error) {
|
||||||
|
query := `
|
||||||
|
SELECT name, license, url, COALESCE(version, 'latest'), user_accepted_license
|
||||||
|
FROM data_sources
|
||||||
|
ORDER BY name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := dl.conn.Query(query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var sources []DataSource
|
||||||
|
for rows.Next() {
|
||||||
|
var source DataSource
|
||||||
|
err := rows.Scan(
|
||||||
|
&source.Name,
|
||||||
|
&source.License,
|
||||||
|
&source.URL,
|
||||||
|
&source.Version,
|
||||||
|
&source.UserAcceptedLicense,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sources = append(sources, source)
|
||||||
|
}
|
||||||
|
|
||||||
|
return sources, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// recordDataSource records information about the data source being imported
|
||||||
|
func (dl *DataLoader) recordDataSource(tx *sql.Tx, source DataSource) error {
|
||||||
|
_, err := tx.Exec(`
|
||||||
|
INSERT OR REPLACE INTO data_sources
|
||||||
|
(name, license, url, version, user_accepted_license)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
`, source.Name, source.License, source.URL, source.Version, source.UserAcceptedLicense)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// ClearDataSource removes all data from a specific source
|
||||||
|
func (dl *DataLoader) ClearDataSource(sourceName string) error {
|
||||||
|
tx, err := dl.conn.Begin()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to begin transaction: %v", err)
|
||||||
|
}
|
||||||
|
defer tx.Rollback()
|
||||||
|
|
||||||
|
// Clear from all tables
|
||||||
|
_, err = tx.Exec(`DELETE FROM airlines WHERE data_source = ?`, sourceName)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to clear airlines: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = tx.Exec(`DELETE FROM airports WHERE data_source = ?`, sourceName)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to clear airports: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = tx.Exec(`DELETE FROM data_sources WHERE name = ?`, sourceName)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to clear data source record: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tx.Commit()
|
||||||
|
}
|
||||||
177
internal/database/loader_test.go
Normal file
177
internal/database/loader_test.go
Normal file
|
|
@ -0,0 +1,177 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDataLoader_Creation(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
loader := NewDataLoader(db.GetConnection())
|
||||||
|
if loader == nil {
|
||||||
|
t.Fatal("NewDataLoader returned nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDataLoader_LoadOpenFlightsAirlines(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
loader := NewDataLoader(db.GetConnection())
|
||||||
|
|
||||||
|
// Create a test data source for OpenFlights Airlines
|
||||||
|
source := DataSource{
|
||||||
|
Name: "OpenFlights Airlines Test",
|
||||||
|
License: "ODbL 1.0",
|
||||||
|
URL: "https://raw.githubusercontent.com/jpatokal/openflights/master/data/airlines.dat",
|
||||||
|
Format: "openflights",
|
||||||
|
Version: "2024-test",
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := loader.LoadDataSource(source)
|
||||||
|
if err != nil {
|
||||||
|
// Network issues in tests are acceptable
|
||||||
|
if strings.Contains(err.Error(), "connection") ||
|
||||||
|
strings.Contains(err.Error(), "timeout") ||
|
||||||
|
strings.Contains(err.Error(), "no such host") {
|
||||||
|
t.Skipf("Skipping network test due to connectivity issue: %v", err)
|
||||||
|
}
|
||||||
|
t.Fatal("LoadDataSource failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if result == nil {
|
||||||
|
t.Fatal("Expected load result, got nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Logf("Loaded airlines: Total=%d, New=%d, Errors=%d, Duration=%v",
|
||||||
|
result.RecordsTotal, result.RecordsNew, result.RecordsError, result.Duration)
|
||||||
|
|
||||||
|
// Verify some data was processed
|
||||||
|
if result.RecordsTotal == 0 {
|
||||||
|
t.Error("No records were processed")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDataLoader_LoadOurAirports(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
loader := NewDataLoader(db.GetConnection())
|
||||||
|
|
||||||
|
// Create a test data source for OurAirports
|
||||||
|
source := DataSource{
|
||||||
|
Name: "OurAirports Test",
|
||||||
|
License: "CC0 1.0",
|
||||||
|
URL: "https://davidmegginson.github.io/ourairports-data/airports.csv",
|
||||||
|
Format: "ourairports",
|
||||||
|
Version: "2024-test",
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := loader.LoadDataSource(source)
|
||||||
|
if err != nil {
|
||||||
|
// Network issues in tests are acceptable
|
||||||
|
if strings.Contains(err.Error(), "connection") ||
|
||||||
|
strings.Contains(err.Error(), "timeout") ||
|
||||||
|
strings.Contains(err.Error(), "no such host") {
|
||||||
|
t.Skipf("Skipping network test due to connectivity issue: %v", err)
|
||||||
|
}
|
||||||
|
t.Fatal("LoadDataSource failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if result != nil {
|
||||||
|
t.Logf("Loaded airports: Total=%d, New=%d, Errors=%d, Duration=%v",
|
||||||
|
result.RecordsTotal, result.RecordsNew, result.RecordsError, result.Duration)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDataLoader_GetLoadedDataSources(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
loader := NewDataLoader(db.GetConnection())
|
||||||
|
|
||||||
|
sources, err := loader.GetLoadedDataSources()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("GetLoadedDataSources failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initially should be empty or minimal
|
||||||
|
t.Logf("Found %d loaded data sources", len(sources))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDataLoader_ClearDataSource(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
loader := NewDataLoader(db.GetConnection())
|
||||||
|
|
||||||
|
// Test clearing a non-existent source (should not error)
|
||||||
|
err := loader.ClearDataSource("nonexistent")
|
||||||
|
if err != nil {
|
||||||
|
t.Error("ClearDataSource should not error on nonexistent source:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDataSource_Struct(t *testing.T) {
|
||||||
|
source := DataSource{
|
||||||
|
Name: "Test Source",
|
||||||
|
License: "Test License",
|
||||||
|
URL: "https://example.com/data.csv",
|
||||||
|
RequiresConsent: false,
|
||||||
|
UserAcceptedLicense: true,
|
||||||
|
Format: "csv",
|
||||||
|
Version: "1.0",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test that all fields are accessible
|
||||||
|
if source.Name != "Test Source" {
|
||||||
|
t.Error("Name field not preserved")
|
||||||
|
}
|
||||||
|
if source.License != "Test License" {
|
||||||
|
t.Error("License field not preserved")
|
||||||
|
}
|
||||||
|
if source.URL != "https://example.com/data.csv" {
|
||||||
|
t.Error("URL field not preserved")
|
||||||
|
}
|
||||||
|
if source.RequiresConsent != false {
|
||||||
|
t.Error("RequiresConsent field not preserved")
|
||||||
|
}
|
||||||
|
if source.UserAcceptedLicense != true {
|
||||||
|
t.Error("UserAcceptedLicense field not preserved")
|
||||||
|
}
|
||||||
|
if source.Format != "csv" {
|
||||||
|
t.Error("Format field not preserved")
|
||||||
|
}
|
||||||
|
if source.Version != "1.0" {
|
||||||
|
t.Error("Version field not preserved")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadResult_Struct(t *testing.T) {
|
||||||
|
result := LoadResult{
|
||||||
|
Source: "Test Source",
|
||||||
|
RecordsTotal: 100,
|
||||||
|
RecordsNew: 80,
|
||||||
|
RecordsError: 5,
|
||||||
|
Errors: []string{"error1", "error2"},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test that all fields are accessible
|
||||||
|
if result.Source != "Test Source" {
|
||||||
|
t.Error("Source field not preserved")
|
||||||
|
}
|
||||||
|
if result.RecordsTotal != 100 {
|
||||||
|
t.Error("RecordsTotal field not preserved")
|
||||||
|
}
|
||||||
|
if result.RecordsNew != 80 {
|
||||||
|
t.Error("RecordsNew field not preserved")
|
||||||
|
}
|
||||||
|
if result.RecordsError != 5 {
|
||||||
|
t.Error("RecordsError field not preserved")
|
||||||
|
}
|
||||||
|
if len(result.Errors) != 2 {
|
||||||
|
t.Error("Errors field not preserved")
|
||||||
|
}
|
||||||
|
}
|
||||||
362
internal/database/manager_callsign.go
Normal file
362
internal/database/manager_callsign.go
Normal file
|
|
@ -0,0 +1,362 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CallsignManager struct {
|
||||||
|
db *sql.DB
|
||||||
|
mutex sync.RWMutex
|
||||||
|
|
||||||
|
// Compiled regex patterns for callsign parsing
|
||||||
|
airlinePattern *regexp.Regexp
|
||||||
|
flightPattern *regexp.Regexp
|
||||||
|
}
|
||||||
|
|
||||||
|
type CallsignParseResult struct {
|
||||||
|
OriginalCallsign string
|
||||||
|
AirlineCode string
|
||||||
|
FlightNumber string
|
||||||
|
IsValid bool
|
||||||
|
ParsedTime time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCallsignManager(db *sql.DB) *CallsignManager {
|
||||||
|
return &CallsignManager{
|
||||||
|
db: db,
|
||||||
|
// Match airline code (2-3 letters) followed by flight number (1-4 digits, optional letter)
|
||||||
|
airlinePattern: regexp.MustCompile(`^([A-Z]{2,3})([0-9]{1,4}[A-Z]?)$`),
|
||||||
|
// More flexible pattern for general flight identification
|
||||||
|
flightPattern: regexp.MustCompile(`^([A-Z0-9]+)([0-9]+[A-Z]?)$`),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cm *CallsignManager) ParseCallsign(callsign string) *CallsignParseResult {
|
||||||
|
result := &CallsignParseResult{
|
||||||
|
OriginalCallsign: callsign,
|
||||||
|
ParsedTime: time.Now(),
|
||||||
|
IsValid: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
if callsign == "" {
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean and normalize the callsign
|
||||||
|
normalized := strings.TrimSpace(strings.ToUpper(callsign))
|
||||||
|
|
||||||
|
// Try airline pattern first (most common for commercial flights)
|
||||||
|
if matches := cm.airlinePattern.FindStringSubmatch(normalized); len(matches) == 3 {
|
||||||
|
result.AirlineCode = matches[1]
|
||||||
|
result.FlightNumber = matches[2]
|
||||||
|
result.IsValid = true
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to general flight pattern
|
||||||
|
if matches := cm.flightPattern.FindStringSubmatch(normalized); len(matches) == 3 {
|
||||||
|
result.AirlineCode = matches[1]
|
||||||
|
result.FlightNumber = matches[2]
|
||||||
|
result.IsValid = true
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cm *CallsignManager) GetCallsignInfo(callsign string) (*CallsignInfo, error) {
|
||||||
|
cm.mutex.RLock()
|
||||||
|
defer cm.mutex.RUnlock()
|
||||||
|
|
||||||
|
if callsign == "" {
|
||||||
|
return nil, fmt.Errorf("empty callsign")
|
||||||
|
}
|
||||||
|
|
||||||
|
// First check the cache
|
||||||
|
cached, err := cm.getCallsignFromCache(callsign)
|
||||||
|
if err == nil && cached != nil {
|
||||||
|
return cached, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the callsign
|
||||||
|
parsed := cm.ParseCallsign(callsign)
|
||||||
|
if !parsed.IsValid {
|
||||||
|
return &CallsignInfo{
|
||||||
|
OriginalCallsign: callsign,
|
||||||
|
IsValid: false,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Look up airline information
|
||||||
|
airline, err := cm.getAirlineByCode(parsed.AirlineCode)
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
return nil, fmt.Errorf("failed to lookup airline %s: %w", parsed.AirlineCode, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the result
|
||||||
|
info := &CallsignInfo{
|
||||||
|
OriginalCallsign: callsign,
|
||||||
|
AirlineCode: parsed.AirlineCode,
|
||||||
|
FlightNumber: parsed.FlightNumber,
|
||||||
|
IsValid: true,
|
||||||
|
LastUpdated: time.Now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if airline != nil {
|
||||||
|
info.AirlineName = airline.Name
|
||||||
|
info.AirlineCountry = airline.Country
|
||||||
|
info.DisplayName = fmt.Sprintf("%s Flight %s", airline.Name, parsed.FlightNumber)
|
||||||
|
} else {
|
||||||
|
info.DisplayName = fmt.Sprintf("%s %s", parsed.AirlineCode, parsed.FlightNumber)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache the result (fire and forget)
|
||||||
|
go func() {
|
||||||
|
if err := cm.cacheCallsignInfo(info); err != nil {
|
||||||
|
// Log error but don't fail the lookup
|
||||||
|
fmt.Printf("Warning: failed to cache callsign info for %s: %v\n", callsign, err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return info, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cm *CallsignManager) getCallsignFromCache(callsign string) (*CallsignInfo, error) {
|
||||||
|
query := `
|
||||||
|
SELECT callsign, airline_icao, flight_number, airline_name,
|
||||||
|
airline_country, '', 1, cached_at, expires_at
|
||||||
|
FROM callsign_cache
|
||||||
|
WHERE callsign = ? AND expires_at > datetime('now')
|
||||||
|
`
|
||||||
|
|
||||||
|
var info CallsignInfo
|
||||||
|
var cacheExpires time.Time
|
||||||
|
|
||||||
|
err := cm.db.QueryRow(query, callsign).Scan(
|
||||||
|
&info.OriginalCallsign,
|
||||||
|
&info.AirlineCode,
|
||||||
|
&info.FlightNumber,
|
||||||
|
&info.AirlineName,
|
||||||
|
&info.AirlineCountry,
|
||||||
|
&info.DisplayName,
|
||||||
|
&info.IsValid,
|
||||||
|
&info.LastUpdated,
|
||||||
|
&cacheExpires,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &info, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cm *CallsignManager) cacheCallsignInfo(info *CallsignInfo) error {
|
||||||
|
// Cache for 24 hours by default
|
||||||
|
cacheExpires := time.Now().Add(24 * time.Hour)
|
||||||
|
|
||||||
|
query := `
|
||||||
|
INSERT OR REPLACE INTO callsign_cache
|
||||||
|
(callsign, airline_icao, flight_number, airline_name,
|
||||||
|
airline_country, cached_at, expires_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`
|
||||||
|
|
||||||
|
_, err := cm.db.Exec(query,
|
||||||
|
info.OriginalCallsign,
|
||||||
|
info.AirlineCode,
|
||||||
|
info.FlightNumber,
|
||||||
|
info.AirlineName,
|
||||||
|
info.AirlineCountry,
|
||||||
|
info.LastUpdated,
|
||||||
|
cacheExpires,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cm *CallsignManager) getAirlineByCode(code string) (*AirlineRecord, error) {
|
||||||
|
query := `
|
||||||
|
SELECT icao_code, iata_code, name, country, active
|
||||||
|
FROM airlines
|
||||||
|
WHERE (icao_code = ? OR iata_code = ?) AND active = 1
|
||||||
|
ORDER BY
|
||||||
|
CASE WHEN icao_code = ? THEN 1 ELSE 2 END,
|
||||||
|
name
|
||||||
|
LIMIT 1
|
||||||
|
`
|
||||||
|
|
||||||
|
var airline AirlineRecord
|
||||||
|
err := cm.db.QueryRow(query, code, code, code).Scan(
|
||||||
|
&airline.ICAOCode,
|
||||||
|
&airline.IATACode,
|
||||||
|
&airline.Name,
|
||||||
|
&airline.Country,
|
||||||
|
&airline.Active,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &airline, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cm *CallsignManager) GetAirlinesByCountry(country string) ([]AirlineRecord, error) {
|
||||||
|
cm.mutex.RLock()
|
||||||
|
defer cm.mutex.RUnlock()
|
||||||
|
|
||||||
|
query := `
|
||||||
|
SELECT icao_code, iata_code, name, country, active
|
||||||
|
FROM airlines
|
||||||
|
WHERE country = ? AND active = 1
|
||||||
|
ORDER BY name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := cm.db.Query(query, country)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var airlines []AirlineRecord
|
||||||
|
for rows.Next() {
|
||||||
|
var airline AirlineRecord
|
||||||
|
err := rows.Scan(
|
||||||
|
&airline.ICAOCode,
|
||||||
|
&airline.IATACode,
|
||||||
|
&airline.Name,
|
||||||
|
&airline.Country,
|
||||||
|
&airline.Active,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
airlines = append(airlines, airline)
|
||||||
|
}
|
||||||
|
|
||||||
|
return airlines, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cm *CallsignManager) SearchAirlines(query string) ([]AirlineRecord, error) {
|
||||||
|
cm.mutex.RLock()
|
||||||
|
defer cm.mutex.RUnlock()
|
||||||
|
|
||||||
|
searchQuery := `
|
||||||
|
SELECT icao_code, iata_code, name, country, active
|
||||||
|
FROM airlines
|
||||||
|
WHERE (
|
||||||
|
name LIKE ? OR
|
||||||
|
icao_code LIKE ? OR
|
||||||
|
iata_code LIKE ? OR
|
||||||
|
country LIKE ?
|
||||||
|
) AND active = 1
|
||||||
|
ORDER BY
|
||||||
|
CASE
|
||||||
|
WHEN name LIKE ? THEN 1
|
||||||
|
WHEN icao_code = ? OR iata_code = ? THEN 2
|
||||||
|
ELSE 3
|
||||||
|
END,
|
||||||
|
name
|
||||||
|
LIMIT 50
|
||||||
|
`
|
||||||
|
|
||||||
|
searchTerm := "%" + strings.ToUpper(query) + "%"
|
||||||
|
exactTerm := strings.ToUpper(query)
|
||||||
|
|
||||||
|
rows, err := cm.db.Query(searchQuery,
|
||||||
|
searchTerm, searchTerm, searchTerm, searchTerm,
|
||||||
|
exactTerm, exactTerm, exactTerm,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var airlines []AirlineRecord
|
||||||
|
for rows.Next() {
|
||||||
|
var airline AirlineRecord
|
||||||
|
err := rows.Scan(
|
||||||
|
&airline.ICAOCode,
|
||||||
|
&airline.IATACode,
|
||||||
|
&airline.Name,
|
||||||
|
&airline.Country,
|
||||||
|
&airline.Active,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
airlines = append(airlines, airline)
|
||||||
|
}
|
||||||
|
|
||||||
|
return airlines, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cm *CallsignManager) ClearExpiredCache() error {
|
||||||
|
cm.mutex.Lock()
|
||||||
|
defer cm.mutex.Unlock()
|
||||||
|
|
||||||
|
query := `DELETE FROM callsign_cache WHERE expires_at <= datetime('now')`
|
||||||
|
_, err := cm.db.Exec(query)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cm *CallsignManager) GetCacheStats() (map[string]interface{}, error) {
|
||||||
|
cm.mutex.RLock()
|
||||||
|
defer cm.mutex.RUnlock()
|
||||||
|
|
||||||
|
stats := make(map[string]interface{})
|
||||||
|
|
||||||
|
// Total cached entries
|
||||||
|
var totalCached int
|
||||||
|
err := cm.db.QueryRow(`SELECT COUNT(*) FROM callsign_cache`).Scan(&totalCached)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
stats["total_cached"] = totalCached
|
||||||
|
|
||||||
|
// Valid (non-expired) entries
|
||||||
|
var validCached int
|
||||||
|
err = cm.db.QueryRow(`SELECT COUNT(*) FROM callsign_cache WHERE expires_at > datetime('now')`).Scan(&validCached)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
stats["valid_cached"] = validCached
|
||||||
|
|
||||||
|
// Expired entries
|
||||||
|
stats["expired_cached"] = totalCached - validCached
|
||||||
|
|
||||||
|
// Total airlines in database
|
||||||
|
var totalAirlines int
|
||||||
|
err = cm.db.QueryRow(`SELECT COUNT(*) FROM airlines WHERE active = 1`).Scan(&totalAirlines)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
stats["total_airlines"] = totalAirlines
|
||||||
|
|
||||||
|
return stats, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cm *CallsignManager) LoadEmbeddedData() error {
|
||||||
|
// Check if airlines table has data
|
||||||
|
var count int
|
||||||
|
err := cm.db.QueryRow(`SELECT COUNT(*) FROM airlines`).Scan(&count)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if count > 0 {
|
||||||
|
// Data already loaded
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// For now, we'll implement this as a placeholder
|
||||||
|
// In a full implementation, this would load embedded airline data
|
||||||
|
// from embedded files or resources
|
||||||
|
return nil
|
||||||
|
}
|
||||||
268
internal/database/manager_callsign_test.go
Normal file
268
internal/database/manager_callsign_test.go
Normal file
|
|
@ -0,0 +1,268 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCallsignManager_Creation(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
manager := NewCallsignManager(db.GetConnection())
|
||||||
|
if manager == nil {
|
||||||
|
t.Fatal("NewCallsignManager returned nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCallsignManager_ParseCallsign(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
manager := NewCallsignManager(db.GetConnection())
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
callsign string
|
||||||
|
expectedValid bool
|
||||||
|
expectedAirline string
|
||||||
|
expectedFlight string
|
||||||
|
}{
|
||||||
|
{"UAL123", true, "UAL", "123"},
|
||||||
|
{"BA4567", true, "BA", "4567"},
|
||||||
|
{"AFR89", true, "AFR", "89"},
|
||||||
|
{"N123AB", false, "", ""}, // Aircraft registration, not callsign
|
||||||
|
{"INVALID", false, "", ""}, // No numbers
|
||||||
|
{"123", false, "", ""}, // Only numbers
|
||||||
|
{"A", false, "", ""}, // Too short
|
||||||
|
{"", false, "", ""}, // Empty
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
result := manager.ParseCallsign(tc.callsign)
|
||||||
|
if result.IsValid != tc.expectedValid {
|
||||||
|
t.Errorf("ParseCallsign(%s): expected valid=%v, got %v",
|
||||||
|
tc.callsign, tc.expectedValid, result.IsValid)
|
||||||
|
}
|
||||||
|
if result.IsValid && result.AirlineCode != tc.expectedAirline {
|
||||||
|
t.Errorf("ParseCallsign(%s): expected airline=%s, got %s",
|
||||||
|
tc.callsign, tc.expectedAirline, result.AirlineCode)
|
||||||
|
}
|
||||||
|
if result.IsValid && result.FlightNumber != tc.expectedFlight {
|
||||||
|
t.Errorf("ParseCallsign(%s): expected flight=%s, got %s",
|
||||||
|
tc.callsign, tc.expectedFlight, result.FlightNumber)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCallsignManager_GetCallsignInfo(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
manager := NewCallsignManager(db.GetConnection())
|
||||||
|
|
||||||
|
// Insert test airline data
|
||||||
|
conn := db.GetConnection()
|
||||||
|
_, err := conn.Exec(`
|
||||||
|
INSERT INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
||||||
|
VALUES (1, 'Test Airways', 'Test', 'TA', 'TST', 'TESTAIR', 'United States', 1, 'test')
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to insert test data:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test valid callsign
|
||||||
|
info, err := manager.GetCallsignInfo("TST123")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("GetCallsignInfo failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if info == nil {
|
||||||
|
t.Fatal("Expected callsign info, got nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
if info.OriginalCallsign != "TST123" {
|
||||||
|
t.Errorf("Expected callsign TST123, got %s", info.OriginalCallsign)
|
||||||
|
}
|
||||||
|
if info.AirlineCode != "TST" {
|
||||||
|
t.Errorf("Expected airline code TST, got %s", info.AirlineCode)
|
||||||
|
}
|
||||||
|
if info.FlightNumber != "123" {
|
||||||
|
t.Errorf("Expected flight number 123, got %s", info.FlightNumber)
|
||||||
|
}
|
||||||
|
if info.AirlineName != "Test Airways" {
|
||||||
|
t.Errorf("Expected airline name 'Test Airways', got %s", info.AirlineName)
|
||||||
|
}
|
||||||
|
if info.AirlineCountry != "United States" {
|
||||||
|
t.Errorf("Expected airline country 'United States', got %s", info.AirlineCountry)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCallsignManager_GetCallsignInfo_InvalidCallsign(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
manager := NewCallsignManager(db.GetConnection())
|
||||||
|
|
||||||
|
// Test with invalid callsign format
|
||||||
|
info, err := manager.GetCallsignInfo("INVALID")
|
||||||
|
if err != nil {
|
||||||
|
t.Error("GetCallsignInfo should not error on invalid format:", err)
|
||||||
|
}
|
||||||
|
if info == nil {
|
||||||
|
t.Fatal("Expected info structure even for invalid callsign")
|
||||||
|
}
|
||||||
|
if info.IsValid {
|
||||||
|
t.Error("Invalid callsign should not be marked as valid")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test with unknown airline
|
||||||
|
info, err = manager.GetCallsignInfo("UNK123")
|
||||||
|
if err != nil {
|
||||||
|
t.Error("GetCallsignInfo should not error on unknown airline:", err)
|
||||||
|
}
|
||||||
|
if info == nil {
|
||||||
|
t.Fatal("Expected info structure for unknown airline")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCallsignManager_GetCallsignInfo_EmptyCallsign(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
manager := NewCallsignManager(db.GetConnection())
|
||||||
|
|
||||||
|
// Test with empty callsign
|
||||||
|
info, err := manager.GetCallsignInfo("")
|
||||||
|
if err == nil {
|
||||||
|
t.Error("GetCallsignInfo should error on empty callsign")
|
||||||
|
}
|
||||||
|
if info != nil {
|
||||||
|
t.Error("Expected nil info for empty callsign")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCallsignManager_ClearExpiredCache(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
manager := NewCallsignManager(db.GetConnection())
|
||||||
|
|
||||||
|
err := manager.ClearExpiredCache()
|
||||||
|
if err != nil {
|
||||||
|
t.Error("ClearExpiredCache should not error:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCallsignManager_GetCacheStats(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
manager := NewCallsignManager(db.GetConnection())
|
||||||
|
|
||||||
|
stats, err := manager.GetCacheStats()
|
||||||
|
if err != nil {
|
||||||
|
t.Error("GetCacheStats should not error:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if stats == nil {
|
||||||
|
t.Error("Expected cache stats, got nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Logf("Cache stats: %+v", stats)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCallsignManager_SearchAirlines(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
manager := NewCallsignManager(db.GetConnection())
|
||||||
|
|
||||||
|
// Insert test airline data
|
||||||
|
conn := db.GetConnection()
|
||||||
|
_, err := conn.Exec(`
|
||||||
|
INSERT INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
||||||
|
VALUES (1, 'Test Airways', 'Test', 'TA', 'TST', 'TESTAIR', 'United States', 1, 'test'),
|
||||||
|
(2, 'Another Airline', 'Another', 'AA', 'ANO', 'ANOTHER', 'Canada', 1, 'test')
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to insert test data:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search for airlines
|
||||||
|
airlines, err := manager.SearchAirlines("Test")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("SearchAirlines failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
found := false
|
||||||
|
for _, airline := range airlines {
|
||||||
|
if airline.Name == "Test Airways" {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
t.Error("Expected to find Test Airways in search results")
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Logf("Found %d airlines matching 'Test'", len(airlines))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCallsignManager_GetAirlinesByCountry(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
manager := NewCallsignManager(db.GetConnection())
|
||||||
|
|
||||||
|
// Insert test airline data
|
||||||
|
conn := db.GetConnection()
|
||||||
|
_, err := conn.Exec(`
|
||||||
|
INSERT INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
||||||
|
VALUES (1, 'US Airways', 'US', 'US', 'USA', 'USAIR', 'United States', 1, 'test'),
|
||||||
|
(2, 'Canada Air', 'CA', 'CA', 'CAN', 'CANAIR', 'Canada', 1, 'test')
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to insert test data:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get airlines by country
|
||||||
|
airlines, err := manager.GetAirlinesByCountry("United States")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("GetAirlinesByCountry failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
found := false
|
||||||
|
for _, airline := range airlines {
|
||||||
|
if airline.Name == "US Airways" {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
t.Error("Expected to find US Airways for United States")
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Logf("Found %d airlines in United States", len(airlines))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCallsignParseResult_Struct(t *testing.T) {
|
||||||
|
result := &CallsignParseResult{
|
||||||
|
OriginalCallsign: "UAL123",
|
||||||
|
AirlineCode: "UAL",
|
||||||
|
FlightNumber: "123",
|
||||||
|
IsValid: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test that all fields are accessible
|
||||||
|
if result.OriginalCallsign != "UAL123" {
|
||||||
|
t.Error("OriginalCallsign field not preserved")
|
||||||
|
}
|
||||||
|
if result.AirlineCode != "UAL" {
|
||||||
|
t.Error("AirlineCode field not preserved")
|
||||||
|
}
|
||||||
|
if result.FlightNumber != "123" {
|
||||||
|
t.Error("FlightNumber field not preserved")
|
||||||
|
}
|
||||||
|
if !result.IsValid {
|
||||||
|
t.Error("IsValid field not preserved")
|
||||||
|
}
|
||||||
|
}
|
||||||
411
internal/database/manager_history.go
Normal file
411
internal/database/manager_history.go
Normal file
|
|
@ -0,0 +1,411 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type HistoryManager struct {
|
||||||
|
db *sql.DB
|
||||||
|
mutex sync.RWMutex
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
maxHistoryDays int
|
||||||
|
cleanupTicker *time.Ticker
|
||||||
|
stopCleanup chan bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewHistoryManager(db *sql.DB, maxHistoryDays int) *HistoryManager {
|
||||||
|
hm := &HistoryManager{
|
||||||
|
db: db,
|
||||||
|
maxHistoryDays: maxHistoryDays,
|
||||||
|
stopCleanup: make(chan bool),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start periodic cleanup (every hour)
|
||||||
|
hm.cleanupTicker = time.NewTicker(1 * time.Hour)
|
||||||
|
go hm.periodicCleanup()
|
||||||
|
|
||||||
|
return hm
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) Close() {
|
||||||
|
if hm.cleanupTicker != nil {
|
||||||
|
hm.cleanupTicker.Stop()
|
||||||
|
}
|
||||||
|
if hm.stopCleanup != nil {
|
||||||
|
close(hm.stopCleanup)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) periodicCleanup() {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-hm.cleanupTicker.C:
|
||||||
|
if err := hm.CleanupOldHistory(); err != nil {
|
||||||
|
fmt.Printf("Warning: failed to cleanup old history: %v\n", err)
|
||||||
|
}
|
||||||
|
case <-hm.stopCleanup:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) RecordAircraft(record *AircraftHistoryRecord) error {
|
||||||
|
hm.mutex.Lock()
|
||||||
|
defer hm.mutex.Unlock()
|
||||||
|
|
||||||
|
query := `
|
||||||
|
INSERT INTO aircraft_history
|
||||||
|
(icao, callsign, squawk, latitude, longitude, altitude,
|
||||||
|
vertical_rate, speed, track, source_id, signal_strength, timestamp)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`
|
||||||
|
|
||||||
|
_, err := hm.db.Exec(query,
|
||||||
|
record.ICAO,
|
||||||
|
record.Callsign,
|
||||||
|
record.Squawk,
|
||||||
|
record.Latitude,
|
||||||
|
record.Longitude,
|
||||||
|
record.Altitude,
|
||||||
|
record.VerticalRate,
|
||||||
|
record.Speed,
|
||||||
|
record.Track,
|
||||||
|
record.SourceID,
|
||||||
|
record.SignalStrength,
|
||||||
|
record.Timestamp,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) RecordAircraftBatch(records []AircraftHistoryRecord) error {
|
||||||
|
if len(records) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
hm.mutex.Lock()
|
||||||
|
defer hm.mutex.Unlock()
|
||||||
|
|
||||||
|
tx, err := hm.db.Begin()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer tx.Rollback()
|
||||||
|
|
||||||
|
stmt, err := tx.Prepare(`
|
||||||
|
INSERT INTO aircraft_history
|
||||||
|
(icao, callsign, squawk, latitude, longitude, altitude,
|
||||||
|
vertical_rate, speed, track, source_id, signal_strength, timestamp)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer stmt.Close()
|
||||||
|
|
||||||
|
for _, record := range records {
|
||||||
|
_, err := stmt.Exec(
|
||||||
|
record.ICAO,
|
||||||
|
record.Callsign,
|
||||||
|
record.Squawk,
|
||||||
|
record.Latitude,
|
||||||
|
record.Longitude,
|
||||||
|
record.Altitude,
|
||||||
|
record.VerticalRate,
|
||||||
|
record.Speed,
|
||||||
|
record.Track,
|
||||||
|
record.SourceID,
|
||||||
|
record.SignalStrength,
|
||||||
|
record.Timestamp,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to insert record for ICAO %s: %w", record.ICAO, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return tx.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) GetAircraftHistory(icao string, hours int) ([]AircraftHistoryRecord, error) {
|
||||||
|
hm.mutex.RLock()
|
||||||
|
defer hm.mutex.RUnlock()
|
||||||
|
|
||||||
|
since := time.Now().Add(-time.Duration(hours) * time.Hour)
|
||||||
|
|
||||||
|
query := `
|
||||||
|
SELECT icao, callsign, squawk, latitude, longitude, altitude,
|
||||||
|
vertical_rate, speed, track, source_id, signal_strength, timestamp
|
||||||
|
FROM aircraft_history
|
||||||
|
WHERE icao = ? AND timestamp >= ?
|
||||||
|
ORDER BY timestamp DESC
|
||||||
|
LIMIT 1000
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := hm.db.Query(query, icao, since)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var records []AircraftHistoryRecord
|
||||||
|
for rows.Next() {
|
||||||
|
var record AircraftHistoryRecord
|
||||||
|
err := rows.Scan(
|
||||||
|
&record.ICAO,
|
||||||
|
&record.Callsign,
|
||||||
|
&record.Squawk,
|
||||||
|
&record.Latitude,
|
||||||
|
&record.Longitude,
|
||||||
|
&record.Altitude,
|
||||||
|
&record.VerticalRate,
|
||||||
|
&record.Speed,
|
||||||
|
&record.Track,
|
||||||
|
&record.SourceID,
|
||||||
|
&record.SignalStrength,
|
||||||
|
&record.Timestamp,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
records = append(records, record)
|
||||||
|
}
|
||||||
|
|
||||||
|
return records, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) GetAircraftTrack(icao string, hours int) ([]TrackPoint, error) {
|
||||||
|
hm.mutex.RLock()
|
||||||
|
defer hm.mutex.RUnlock()
|
||||||
|
|
||||||
|
since := time.Now().Add(-time.Duration(hours) * time.Hour)
|
||||||
|
|
||||||
|
query := `
|
||||||
|
SELECT latitude, longitude, altitude, timestamp
|
||||||
|
FROM aircraft_history
|
||||||
|
WHERE icao = ? AND timestamp >= ?
|
||||||
|
AND latitude IS NOT NULL AND longitude IS NOT NULL
|
||||||
|
ORDER BY timestamp ASC
|
||||||
|
LIMIT 500
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := hm.db.Query(query, icao, since)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var track []TrackPoint
|
||||||
|
for rows.Next() {
|
||||||
|
var point TrackPoint
|
||||||
|
err := rows.Scan(
|
||||||
|
&point.Latitude,
|
||||||
|
&point.Longitude,
|
||||||
|
&point.Altitude,
|
||||||
|
&point.Timestamp,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
track = append(track, point)
|
||||||
|
}
|
||||||
|
|
||||||
|
return track, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) GetRecentAircraft(hours int, limit int) ([]string, error) {
|
||||||
|
hm.mutex.RLock()
|
||||||
|
defer hm.mutex.RUnlock()
|
||||||
|
|
||||||
|
since := time.Now().Add(-time.Duration(hours) * time.Hour)
|
||||||
|
|
||||||
|
query := `
|
||||||
|
SELECT DISTINCT icao
|
||||||
|
FROM aircraft_history
|
||||||
|
WHERE timestamp >= ?
|
||||||
|
ORDER BY MAX(timestamp) DESC
|
||||||
|
LIMIT ?
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := hm.db.Query(query, since, limit)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var icaos []string
|
||||||
|
for rows.Next() {
|
||||||
|
var icao string
|
||||||
|
err := rows.Scan(&icao)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
icaos = append(icaos, icao)
|
||||||
|
}
|
||||||
|
|
||||||
|
return icaos, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) GetAircraftLastSeen(icao string) (time.Time, error) {
|
||||||
|
hm.mutex.RLock()
|
||||||
|
defer hm.mutex.RUnlock()
|
||||||
|
|
||||||
|
query := `
|
||||||
|
SELECT MAX(timestamp)
|
||||||
|
FROM aircraft_history
|
||||||
|
WHERE icao = ?
|
||||||
|
`
|
||||||
|
|
||||||
|
var lastSeen time.Time
|
||||||
|
err := hm.db.QueryRow(query, icao).Scan(&lastSeen)
|
||||||
|
return lastSeen, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) CleanupOldHistory() error {
|
||||||
|
hm.mutex.Lock()
|
||||||
|
defer hm.mutex.Unlock()
|
||||||
|
|
||||||
|
if hm.maxHistoryDays <= 0 {
|
||||||
|
return nil // No cleanup if maxHistoryDays is 0 or negative
|
||||||
|
}
|
||||||
|
|
||||||
|
cutoff := time.Now().AddDate(0, 0, -hm.maxHistoryDays)
|
||||||
|
|
||||||
|
query := `DELETE FROM aircraft_history WHERE timestamp < ?`
|
||||||
|
result, err := hm.db.Exec(query, cutoff)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
rowsAffected, err := result.RowsAffected()
|
||||||
|
if err == nil && rowsAffected > 0 {
|
||||||
|
fmt.Printf("Cleaned up %d old aircraft history records\n", rowsAffected)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) GetStatistics() (map[string]interface{}, error) {
|
||||||
|
return hm.GetHistoryStats()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) GetHistoryStats() (map[string]interface{}, error) {
|
||||||
|
hm.mutex.RLock()
|
||||||
|
defer hm.mutex.RUnlock()
|
||||||
|
|
||||||
|
stats := make(map[string]interface{})
|
||||||
|
|
||||||
|
// Total records
|
||||||
|
var totalRecords int
|
||||||
|
err := hm.db.QueryRow(`SELECT COUNT(*) FROM aircraft_history`).Scan(&totalRecords)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
stats["total_records"] = totalRecords
|
||||||
|
|
||||||
|
// Unique aircraft
|
||||||
|
var uniqueAircraft int
|
||||||
|
err = hm.db.QueryRow(`SELECT COUNT(DISTINCT icao) FROM aircraft_history`).Scan(&uniqueAircraft)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
stats["unique_aircraft"] = uniqueAircraft
|
||||||
|
|
||||||
|
// Recent records (last 24 hours)
|
||||||
|
var recentRecords int
|
||||||
|
since := time.Now().Add(-24 * time.Hour)
|
||||||
|
err = hm.db.QueryRow(`SELECT COUNT(*) FROM aircraft_history WHERE timestamp >= ?`, since).Scan(&recentRecords)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
stats["recent_records_24h"] = recentRecords
|
||||||
|
|
||||||
|
// Oldest and newest record timestamps (only if records exist)
|
||||||
|
if totalRecords > 0 {
|
||||||
|
var oldestTimestamp, newestTimestamp time.Time
|
||||||
|
err = hm.db.QueryRow(`SELECT MIN(timestamp), MAX(timestamp) FROM aircraft_history`).Scan(&oldestTimestamp, &newestTimestamp)
|
||||||
|
if err == nil {
|
||||||
|
stats["oldest_record"] = oldestTimestamp
|
||||||
|
stats["newest_record"] = newestTimestamp
|
||||||
|
stats["history_days"] = int(time.Since(oldestTimestamp).Hours() / 24)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return stats, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hm *HistoryManager) GetActivitySummary(hours int) (map[string]interface{}, error) {
|
||||||
|
hm.mutex.RLock()
|
||||||
|
defer hm.mutex.RUnlock()
|
||||||
|
|
||||||
|
since := time.Now().Add(-time.Duration(hours) * time.Hour)
|
||||||
|
|
||||||
|
summary := make(map[string]interface{})
|
||||||
|
|
||||||
|
// Aircraft count in time period
|
||||||
|
var aircraftCount int
|
||||||
|
err := hm.db.QueryRow(`
|
||||||
|
SELECT COUNT(DISTINCT icao)
|
||||||
|
FROM aircraft_history
|
||||||
|
WHERE timestamp >= ?
|
||||||
|
`, since).Scan(&aircraftCount)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
summary["aircraft_count"] = aircraftCount
|
||||||
|
|
||||||
|
// Message count in time period
|
||||||
|
var messageCount int
|
||||||
|
err = hm.db.QueryRow(`
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM aircraft_history
|
||||||
|
WHERE timestamp >= ?
|
||||||
|
`, since).Scan(&messageCount)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
summary["message_count"] = messageCount
|
||||||
|
|
||||||
|
// Most active sources
|
||||||
|
query := `
|
||||||
|
SELECT source_id, COUNT(*) as count
|
||||||
|
FROM aircraft_history
|
||||||
|
WHERE timestamp >= ?
|
||||||
|
GROUP BY source_id
|
||||||
|
ORDER BY count DESC
|
||||||
|
LIMIT 5
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := hm.db.Query(query, since)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
sources := make([]map[string]interface{}, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
var sourceID string
|
||||||
|
var count int
|
||||||
|
err := rows.Scan(&sourceID, &count)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sources = append(sources, map[string]interface{}{
|
||||||
|
"source_id": sourceID,
|
||||||
|
"count": count,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
summary["top_sources"] = sources
|
||||||
|
|
||||||
|
return summary, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type TrackPoint struct {
|
||||||
|
Latitude float64 `json:"latitude"`
|
||||||
|
Longitude float64 `json:"longitude"`
|
||||||
|
Altitude *int `json:"altitude,omitempty"`
|
||||||
|
Timestamp time.Time `json:"timestamp"`
|
||||||
|
}
|
||||||
419
internal/database/migrations.go
Normal file
419
internal/database/migrations.go
Normal file
|
|
@ -0,0 +1,419 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Migrator handles database schema migrations with rollback support
|
||||||
|
type Migrator struct {
|
||||||
|
conn *sql.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migration represents a database schema change
|
||||||
|
type Migration struct {
|
||||||
|
Version int
|
||||||
|
Description string
|
||||||
|
Up string
|
||||||
|
Down string
|
||||||
|
DataLoss bool
|
||||||
|
Checksum string
|
||||||
|
}
|
||||||
|
|
||||||
|
// MigrationRecord represents a completed migration in the database
|
||||||
|
type MigrationRecord struct {
|
||||||
|
Version int `json:"version"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
AppliedAt time.Time `json:"applied_at"`
|
||||||
|
Checksum string `json:"checksum"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMigrator creates a new database migrator
|
||||||
|
func NewMigrator(conn *sql.DB) *Migrator {
|
||||||
|
return &Migrator{conn: conn}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMigrations returns all available migrations in version order
|
||||||
|
func GetMigrations() []Migration {
|
||||||
|
migrations := []Migration{
|
||||||
|
{
|
||||||
|
Version: 1,
|
||||||
|
Description: "Initial schema with aircraft history",
|
||||||
|
Up: `
|
||||||
|
-- Schema metadata table
|
||||||
|
CREATE TABLE IF NOT EXISTS schema_info (
|
||||||
|
version INTEGER PRIMARY KEY,
|
||||||
|
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
description TEXT NOT NULL,
|
||||||
|
checksum TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Aircraft position history
|
||||||
|
CREATE TABLE IF NOT EXISTS aircraft_history (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
icao TEXT NOT NULL,
|
||||||
|
timestamp TIMESTAMP NOT NULL,
|
||||||
|
latitude REAL,
|
||||||
|
longitude REAL,
|
||||||
|
altitude INTEGER,
|
||||||
|
speed INTEGER,
|
||||||
|
track INTEGER,
|
||||||
|
vertical_rate INTEGER,
|
||||||
|
squawk TEXT,
|
||||||
|
callsign TEXT,
|
||||||
|
source_id TEXT NOT NULL,
|
||||||
|
signal_strength REAL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes for aircraft history
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_aircraft_history_icao_time ON aircraft_history(icao, timestamp);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_aircraft_history_timestamp ON aircraft_history(timestamp);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_aircraft_history_callsign ON aircraft_history(callsign);
|
||||||
|
`,
|
||||||
|
Down: `
|
||||||
|
DROP TABLE IF EXISTS aircraft_history;
|
||||||
|
DROP TABLE IF EXISTS schema_info;
|
||||||
|
`,
|
||||||
|
DataLoss: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Version: 2,
|
||||||
|
Description: "Add callsign enhancement tables",
|
||||||
|
Up: `
|
||||||
|
-- Airlines data table (unified schema for all sources)
|
||||||
|
CREATE TABLE IF NOT EXISTS airlines (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
alias TEXT,
|
||||||
|
iata_code TEXT,
|
||||||
|
icao_code TEXT,
|
||||||
|
callsign TEXT,
|
||||||
|
country TEXT,
|
||||||
|
country_code TEXT,
|
||||||
|
active BOOLEAN DEFAULT 1,
|
||||||
|
data_source TEXT NOT NULL DEFAULT 'unknown',
|
||||||
|
source_id TEXT, -- Original ID from source
|
||||||
|
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Airports data table (unified schema for all sources)
|
||||||
|
CREATE TABLE IF NOT EXISTS airports (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
ident TEXT, -- Airport identifier (ICAO, FAA, etc.)
|
||||||
|
type TEXT, -- Airport type (large_airport, medium_airport, etc.)
|
||||||
|
city TEXT,
|
||||||
|
municipality TEXT, -- More specific than city
|
||||||
|
region TEXT, -- State/province
|
||||||
|
country TEXT,
|
||||||
|
country_code TEXT, -- ISO country code
|
||||||
|
continent TEXT,
|
||||||
|
iata_code TEXT,
|
||||||
|
icao_code TEXT,
|
||||||
|
local_code TEXT,
|
||||||
|
gps_code TEXT,
|
||||||
|
latitude REAL,
|
||||||
|
longitude REAL,
|
||||||
|
elevation_ft INTEGER,
|
||||||
|
scheduled_service BOOLEAN DEFAULT 0,
|
||||||
|
home_link TEXT,
|
||||||
|
wikipedia_link TEXT,
|
||||||
|
keywords TEXT,
|
||||||
|
timezone_offset REAL,
|
||||||
|
timezone TEXT,
|
||||||
|
dst_type TEXT,
|
||||||
|
data_source TEXT NOT NULL DEFAULT 'unknown',
|
||||||
|
source_id TEXT, -- Original ID from source
|
||||||
|
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
-- External API cache for callsign lookups
|
||||||
|
CREATE TABLE IF NOT EXISTS callsign_cache (
|
||||||
|
callsign TEXT PRIMARY KEY,
|
||||||
|
airline_icao TEXT,
|
||||||
|
airline_iata TEXT,
|
||||||
|
airline_name TEXT,
|
||||||
|
airline_country TEXT,
|
||||||
|
flight_number TEXT,
|
||||||
|
origin_iata TEXT,
|
||||||
|
destination_iata TEXT,
|
||||||
|
aircraft_type TEXT,
|
||||||
|
route TEXT,
|
||||||
|
status TEXT,
|
||||||
|
source TEXT NOT NULL DEFAULT 'local',
|
||||||
|
cached_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
expires_at TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Data source tracking
|
||||||
|
CREATE TABLE IF NOT EXISTS data_sources (
|
||||||
|
name TEXT PRIMARY KEY,
|
||||||
|
license TEXT NOT NULL,
|
||||||
|
url TEXT,
|
||||||
|
version TEXT,
|
||||||
|
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
record_count INTEGER DEFAULT 0,
|
||||||
|
user_accepted_license BOOLEAN DEFAULT 0
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes for airlines
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airlines_icao_code ON airlines(icao_code);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airlines_iata_code ON airlines(iata_code);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airlines_callsign ON airlines(callsign);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airlines_country_code ON airlines(country_code);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airlines_active ON airlines(active);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airlines_source ON airlines(data_source);
|
||||||
|
|
||||||
|
-- Indexes for airports
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airports_icao_code ON airports(icao_code);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airports_iata_code ON airports(iata_code);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airports_ident ON airports(ident);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airports_country_code ON airports(country_code);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airports_type ON airports(type);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airports_coords ON airports(latitude, longitude);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_airports_source ON airports(data_source);
|
||||||
|
|
||||||
|
-- Indexes for callsign cache
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_callsign_cache_expires ON callsign_cache(expires_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_callsign_cache_airline ON callsign_cache(airline_icao);
|
||||||
|
`,
|
||||||
|
Down: `
|
||||||
|
DROP TABLE IF EXISTS callsign_cache;
|
||||||
|
DROP TABLE IF EXISTS airports;
|
||||||
|
DROP TABLE IF EXISTS airlines;
|
||||||
|
DROP TABLE IF EXISTS data_sources;
|
||||||
|
`,
|
||||||
|
DataLoss: true,
|
||||||
|
},
|
||||||
|
// Future migrations will be added here
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate checksums
|
||||||
|
for i := range migrations {
|
||||||
|
migrations[i].Checksum = calculateChecksum(migrations[i].Up)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by version
|
||||||
|
sort.Slice(migrations, func(i, j int) bool {
|
||||||
|
return migrations[i].Version < migrations[j].Version
|
||||||
|
})
|
||||||
|
|
||||||
|
return migrations
|
||||||
|
}
|
||||||
|
|
||||||
|
// MigrateToLatest runs all pending migrations to bring database to latest schema
|
||||||
|
func (m *Migrator) MigrateToLatest() error {
|
||||||
|
currentVersion, err := m.getCurrentVersion()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get current version: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
migrations := GetMigrations()
|
||||||
|
|
||||||
|
for _, migration := range migrations {
|
||||||
|
if migration.Version <= currentVersion {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := m.applyMigration(migration); err != nil {
|
||||||
|
return fmt.Errorf("failed to apply migration %d: %v", migration.Version, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MigrateTo runs migrations to reach a specific version
|
||||||
|
func (m *Migrator) MigrateTo(targetVersion int) error {
|
||||||
|
currentVersion, err := m.getCurrentVersion()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get current version: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if targetVersion == currentVersion {
|
||||||
|
return nil // Already at target version
|
||||||
|
}
|
||||||
|
|
||||||
|
migrations := GetMigrations()
|
||||||
|
|
||||||
|
if targetVersion > currentVersion {
|
||||||
|
// Forward migration
|
||||||
|
for _, migration := range migrations {
|
||||||
|
if migration.Version <= currentVersion || migration.Version > targetVersion {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := m.applyMigration(migration); err != nil {
|
||||||
|
return fmt.Errorf("failed to apply migration %d: %v", migration.Version, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Rollback migration
|
||||||
|
// Sort in reverse order for rollback
|
||||||
|
sort.Slice(migrations, func(i, j int) bool {
|
||||||
|
return migrations[i].Version > migrations[j].Version
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, migration := range migrations {
|
||||||
|
if migration.Version > currentVersion || migration.Version <= targetVersion {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := m.rollbackMigration(migration); err != nil {
|
||||||
|
return fmt.Errorf("failed to rollback migration %d: %v", migration.Version, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAppliedMigrations returns all migrations that have been applied
|
||||||
|
func (m *Migrator) GetAppliedMigrations() ([]MigrationRecord, error) {
|
||||||
|
// Ensure schema_info table exists
|
||||||
|
if err := m.ensureSchemaInfoTable(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
query := `
|
||||||
|
SELECT version, description, applied_at, checksum
|
||||||
|
FROM schema_info
|
||||||
|
ORDER BY version
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := m.conn.Query(query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query applied migrations: %v", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var migrations []MigrationRecord
|
||||||
|
for rows.Next() {
|
||||||
|
var migration MigrationRecord
|
||||||
|
err := rows.Scan(
|
||||||
|
&migration.Version,
|
||||||
|
&migration.Description,
|
||||||
|
&migration.AppliedAt,
|
||||||
|
&migration.Checksum,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to scan migration record: %v", err)
|
||||||
|
}
|
||||||
|
migrations = append(migrations, migration)
|
||||||
|
}
|
||||||
|
|
||||||
|
return migrations, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getCurrentVersion returns the highest applied migration version
|
||||||
|
func (m *Migrator) getCurrentVersion() (int, error) {
|
||||||
|
if err := m.ensureSchemaInfoTable(); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var version int
|
||||||
|
err := m.conn.QueryRow(`SELECT COALESCE(MAX(version), 0) FROM schema_info`).Scan(&version)
|
||||||
|
if err != nil {
|
||||||
|
return 0, fmt.Errorf("failed to get current version: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return version, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// applyMigration executes a migration in a transaction
|
||||||
|
func (m *Migrator) applyMigration(migration Migration) error {
|
||||||
|
tx, err := m.conn.Begin()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to begin transaction: %v", err)
|
||||||
|
}
|
||||||
|
defer tx.Rollback()
|
||||||
|
|
||||||
|
// Warn about data loss
|
||||||
|
if migration.DataLoss {
|
||||||
|
// In a real application, this would show a warning to the user
|
||||||
|
// For now, we'll just log it
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute migration SQL
|
||||||
|
statements := strings.Split(migration.Up, ";")
|
||||||
|
for _, stmt := range statements {
|
||||||
|
stmt = strings.TrimSpace(stmt)
|
||||||
|
if stmt == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := tx.Exec(stmt); err != nil {
|
||||||
|
return fmt.Errorf("failed to execute migration statement: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Record migration
|
||||||
|
_, err = tx.Exec(`
|
||||||
|
INSERT INTO schema_info (version, description, checksum)
|
||||||
|
VALUES (?, ?, ?)
|
||||||
|
`, migration.Version, migration.Description, migration.Checksum)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to record migration: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tx.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
// rollbackMigration executes a migration rollback in a transaction
|
||||||
|
func (m *Migrator) rollbackMigration(migration Migration) error {
|
||||||
|
if migration.Down == "" {
|
||||||
|
return fmt.Errorf("migration %d has no rollback script", migration.Version)
|
||||||
|
}
|
||||||
|
|
||||||
|
tx, err := m.conn.Begin()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to begin transaction: %v", err)
|
||||||
|
}
|
||||||
|
defer tx.Rollback()
|
||||||
|
|
||||||
|
// Execute rollback SQL
|
||||||
|
statements := strings.Split(migration.Down, ";")
|
||||||
|
for _, stmt := range statements {
|
||||||
|
stmt = strings.TrimSpace(stmt)
|
||||||
|
if stmt == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := tx.Exec(stmt); err != nil {
|
||||||
|
return fmt.Errorf("failed to execute rollback statement: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove migration record
|
||||||
|
_, err = tx.Exec(`DELETE FROM schema_info WHERE version = ?`, migration.Version)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to remove migration record: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tx.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ensureSchemaInfoTable creates the schema_info table if it doesn't exist
|
||||||
|
func (m *Migrator) ensureSchemaInfoTable() error {
|
||||||
|
_, err := m.conn.Exec(`
|
||||||
|
CREATE TABLE IF NOT EXISTS schema_info (
|
||||||
|
version INTEGER PRIMARY KEY,
|
||||||
|
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
description TEXT NOT NULL,
|
||||||
|
checksum TEXT NOT NULL
|
||||||
|
)
|
||||||
|
`)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// calculateChecksum generates a checksum for migration content
|
||||||
|
func calculateChecksum(content string) string {
|
||||||
|
// Simple checksum - in production, use a proper hash function
|
||||||
|
return fmt.Sprintf("%x", len(content))
|
||||||
|
}
|
||||||
208
internal/database/optimization.go
Normal file
208
internal/database/optimization.go
Normal file
|
|
@ -0,0 +1,208 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// OptimizationManager handles database storage optimization using SQLite built-in features
|
||||||
|
type OptimizationManager struct {
|
||||||
|
db *Database
|
||||||
|
config *Config
|
||||||
|
lastVacuum time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewOptimizationManager creates a new optimization manager
|
||||||
|
func NewOptimizationManager(db *Database, config *Config) *OptimizationManager {
|
||||||
|
return &OptimizationManager{
|
||||||
|
db: db,
|
||||||
|
config: config,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// PerformMaintenance runs database maintenance tasks including VACUUM
|
||||||
|
func (om *OptimizationManager) PerformMaintenance() error {
|
||||||
|
now := time.Now()
|
||||||
|
|
||||||
|
// Check if VACUUM is needed
|
||||||
|
if om.config.VacuumInterval > 0 && now.Sub(om.lastVacuum) >= om.config.VacuumInterval {
|
||||||
|
if err := om.VacuumDatabase(); err != nil {
|
||||||
|
return fmt.Errorf("vacuum failed: %w", err)
|
||||||
|
}
|
||||||
|
om.lastVacuum = now
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// VacuumDatabase performs VACUUM to reclaim space and optimize database
|
||||||
|
func (om *OptimizationManager) VacuumDatabase() error {
|
||||||
|
conn := om.db.GetConnection()
|
||||||
|
if conn == nil {
|
||||||
|
return fmt.Errorf("database connection not available")
|
||||||
|
}
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
// Get size before VACUUM
|
||||||
|
sizeBefore, err := om.getDatabaseSize()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get database size: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform VACUUM
|
||||||
|
if _, err := conn.Exec("VACUUM"); err != nil {
|
||||||
|
return fmt.Errorf("VACUUM operation failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get size after VACUUM
|
||||||
|
sizeAfter, err := om.getDatabaseSize()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get database size after VACUUM: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
duration := time.Since(start)
|
||||||
|
savedBytes := sizeBefore - sizeAfter
|
||||||
|
savedPercent := float64(savedBytes) / float64(sizeBefore) * 100
|
||||||
|
|
||||||
|
fmt.Printf("VACUUM completed in %v: %.1f MB → %.1f MB (saved %.1f MB, %.1f%%)\n",
|
||||||
|
duration,
|
||||||
|
float64(sizeBefore)/(1024*1024),
|
||||||
|
float64(sizeAfter)/(1024*1024),
|
||||||
|
float64(savedBytes)/(1024*1024),
|
||||||
|
savedPercent)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// OptimizeDatabase applies various SQLite optimizations for better storage efficiency
|
||||||
|
func (om *OptimizationManager) OptimizeDatabase() error {
|
||||||
|
conn := om.db.GetConnection()
|
||||||
|
if conn == nil {
|
||||||
|
return fmt.Errorf("database connection not available")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Optimizing database for storage efficiency...")
|
||||||
|
|
||||||
|
// Apply storage-friendly pragmas
|
||||||
|
optimizations := []struct{
|
||||||
|
name string
|
||||||
|
query string
|
||||||
|
description string
|
||||||
|
}{
|
||||||
|
{"Auto VACUUM", "PRAGMA auto_vacuum = INCREMENTAL", "Enable incremental auto-vacuum"},
|
||||||
|
{"Incremental VACUUM", "PRAGMA incremental_vacuum", "Reclaim free pages incrementally"},
|
||||||
|
{"Optimize", "PRAGMA optimize", "Update SQLite query planner statistics"},
|
||||||
|
{"Analyze", "ANALYZE", "Update table statistics for better query plans"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, opt := range optimizations {
|
||||||
|
if _, err := conn.Exec(opt.query); err != nil {
|
||||||
|
fmt.Printf("Warning: %s failed: %v\n", opt.name, err)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("✓ %s: %s\n", opt.name, opt.description)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// OptimizePageSize sets an optimal page size for the database (requires rebuild)
|
||||||
|
func (om *OptimizationManager) OptimizePageSize(pageSize int) error {
|
||||||
|
conn := om.db.GetConnection()
|
||||||
|
if conn == nil {
|
||||||
|
return fmt.Errorf("database connection not available")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check current page size
|
||||||
|
var currentPageSize int
|
||||||
|
if err := conn.QueryRow("PRAGMA page_size").Scan(¤tPageSize); err != nil {
|
||||||
|
return fmt.Errorf("failed to get current page size: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if currentPageSize == pageSize {
|
||||||
|
fmt.Printf("Page size already optimal: %d bytes\n", pageSize)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Optimizing page size: %d → %d bytes (requires VACUUM)\n", currentPageSize, pageSize)
|
||||||
|
|
||||||
|
// Set new page size
|
||||||
|
query := fmt.Sprintf("PRAGMA page_size = %d", pageSize)
|
||||||
|
if _, err := conn.Exec(query); err != nil {
|
||||||
|
return fmt.Errorf("failed to set page size: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// VACUUM to apply the new page size
|
||||||
|
if err := om.VacuumDatabase(); err != nil {
|
||||||
|
return fmt.Errorf("failed to apply page size change: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetOptimizationStats returns current database optimization statistics
|
||||||
|
func (om *OptimizationManager) GetOptimizationStats() (*OptimizationStats, error) {
|
||||||
|
stats := &OptimizationStats{}
|
||||||
|
|
||||||
|
// Get database size
|
||||||
|
size, err := om.getDatabaseSize()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
stats.DatabaseSize = size
|
||||||
|
|
||||||
|
// Get page statistics
|
||||||
|
conn := om.db.GetConnection()
|
||||||
|
if conn != nil {
|
||||||
|
var pageSize, pageCount, freelistCount int
|
||||||
|
conn.QueryRow("PRAGMA page_size").Scan(&pageSize)
|
||||||
|
conn.QueryRow("PRAGMA page_count").Scan(&pageCount)
|
||||||
|
conn.QueryRow("PRAGMA freelist_count").Scan(&freelistCount)
|
||||||
|
|
||||||
|
stats.PageSize = pageSize
|
||||||
|
stats.PageCount = pageCount
|
||||||
|
stats.FreePages = freelistCount
|
||||||
|
stats.UsedPages = pageCount - freelistCount
|
||||||
|
|
||||||
|
if pageCount > 0 {
|
||||||
|
stats.Efficiency = float64(stats.UsedPages) / float64(pageCount) * 100
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check auto vacuum setting
|
||||||
|
var autoVacuum int
|
||||||
|
conn.QueryRow("PRAGMA auto_vacuum").Scan(&autoVacuum)
|
||||||
|
stats.AutoVacuumEnabled = autoVacuum > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
stats.LastVacuum = om.lastVacuum
|
||||||
|
|
||||||
|
return stats, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// OptimizationStats holds database storage optimization statistics
|
||||||
|
type OptimizationStats struct {
|
||||||
|
DatabaseSize int64 `json:"database_size"`
|
||||||
|
PageSize int `json:"page_size"`
|
||||||
|
PageCount int `json:"page_count"`
|
||||||
|
UsedPages int `json:"used_pages"`
|
||||||
|
FreePages int `json:"free_pages"`
|
||||||
|
Efficiency float64 `json:"efficiency_percent"`
|
||||||
|
AutoVacuumEnabled bool `json:"auto_vacuum_enabled"`
|
||||||
|
LastVacuum time.Time `json:"last_vacuum"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// getDatabaseSize returns the current database file size in bytes
|
||||||
|
func (om *OptimizationManager) getDatabaseSize() (int64, error) {
|
||||||
|
if om.config.Path == "" {
|
||||||
|
return 0, fmt.Errorf("database path not configured")
|
||||||
|
}
|
||||||
|
|
||||||
|
stat, err := os.Stat(om.config.Path)
|
||||||
|
if err != nil {
|
||||||
|
return 0, fmt.Errorf("failed to stat database file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return stat.Size(), nil
|
||||||
|
}
|
||||||
307
internal/database/optimization_test.go
Normal file
307
internal/database/optimization_test.go
Normal file
|
|
@ -0,0 +1,307 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestOptimizationManager_VacuumDatabase(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
config := &Config{Path: db.config.Path}
|
||||||
|
optimizer := NewOptimizationManager(db, config)
|
||||||
|
|
||||||
|
err := optimizer.VacuumDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("VacuumDatabase failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify vacuum was successful by checking database integrity
|
||||||
|
conn := db.GetConnection()
|
||||||
|
var result string
|
||||||
|
err = conn.QueryRow("PRAGMA integrity_check").Scan(&result)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("Failed to run integrity check:", err)
|
||||||
|
}
|
||||||
|
if result != "ok" {
|
||||||
|
t.Errorf("Database integrity check failed: %s", result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptimizationManager_OptimizeDatabase(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
config := &Config{Path: db.config.Path}
|
||||||
|
optimizer := NewOptimizationManager(db, config)
|
||||||
|
|
||||||
|
err := optimizer.OptimizeDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("OptimizeDatabase failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that auto_vacuum was set
|
||||||
|
conn := db.GetConnection()
|
||||||
|
var autoVacuum int
|
||||||
|
err = conn.QueryRow("PRAGMA auto_vacuum").Scan(&autoVacuum)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("Failed to check auto_vacuum setting:", err)
|
||||||
|
}
|
||||||
|
// Should be 2 (INCREMENTAL) after optimization
|
||||||
|
if autoVacuum != 2 {
|
||||||
|
t.Errorf("Expected auto_vacuum = 2 (INCREMENTAL), got %d", autoVacuum)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptimizationManager_OptimizePageSize(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
config := &Config{Path: db.config.Path}
|
||||||
|
optimizer := NewOptimizationManager(db, config)
|
||||||
|
|
||||||
|
// Get current page size
|
||||||
|
conn := db.GetConnection()
|
||||||
|
var currentPageSize int
|
||||||
|
err := conn.QueryRow("PRAGMA page_size").Scan(¤tPageSize)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to get current page size:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set a different page size
|
||||||
|
targetPageSize := 8192
|
||||||
|
if currentPageSize == targetPageSize {
|
||||||
|
targetPageSize = 4096 // Use different size if already at target
|
||||||
|
}
|
||||||
|
|
||||||
|
err = optimizer.OptimizePageSize(targetPageSize)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("OptimizePageSize failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify page size was changed
|
||||||
|
var newPageSize int
|
||||||
|
err = conn.QueryRow("PRAGMA page_size").Scan(&newPageSize)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("Failed to get new page size:", err)
|
||||||
|
}
|
||||||
|
if newPageSize != targetPageSize {
|
||||||
|
t.Errorf("Expected page size %d, got %d", targetPageSize, newPageSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test setting same page size (should be no-op)
|
||||||
|
err = optimizer.OptimizePageSize(targetPageSize)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("OptimizePageSize failed for same page size:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptimizationManager_GetOptimizationStats(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
config := &Config{Path: db.config.Path}
|
||||||
|
optimizer := NewOptimizationManager(db, config)
|
||||||
|
|
||||||
|
// Insert some test data to make stats more meaningful
|
||||||
|
conn := db.GetConnection()
|
||||||
|
_, err := conn.Exec(`
|
||||||
|
INSERT INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
||||||
|
VALUES (1, 'Test Airways', 'Test', 'TA', 'TST', 'TESTAIR', 'United States', 1, 'test')
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("Failed to insert test data:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
stats, err := optimizer.GetOptimizationStats()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("GetOptimizationStats failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if stats == nil {
|
||||||
|
t.Fatal("Expected stats, got nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check basic stats
|
||||||
|
if stats.DatabaseSize <= 0 {
|
||||||
|
t.Error("Database size should be greater than 0")
|
||||||
|
}
|
||||||
|
if stats.PageSize <= 0 {
|
||||||
|
t.Error("Page size should be greater than 0")
|
||||||
|
}
|
||||||
|
if stats.PageCount <= 0 {
|
||||||
|
t.Error("Page count should be greater than 0")
|
||||||
|
}
|
||||||
|
if stats.UsedPages < 0 {
|
||||||
|
t.Error("Used pages should be non-negative")
|
||||||
|
}
|
||||||
|
if stats.FreePages < 0 {
|
||||||
|
t.Error("Free pages should be non-negative")
|
||||||
|
}
|
||||||
|
if stats.Efficiency < 0 || stats.Efficiency > 100 {
|
||||||
|
t.Errorf("Efficiency should be between 0-100%%, got %.2f%%", stats.Efficiency)
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Logf("Database stats: Size=%d bytes, Pages=%d (used=%d, free=%d), Efficiency=%.1f%%",
|
||||||
|
stats.DatabaseSize, stats.PageCount, stats.UsedPages, stats.FreePages, stats.Efficiency)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptimizationManager_PerformMaintenance(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
config := &Config{
|
||||||
|
Path: db.config.Path,
|
||||||
|
VacuumInterval: time.Millisecond, // Very short interval for testing
|
||||||
|
}
|
||||||
|
optimizer := NewOptimizationManager(db, config)
|
||||||
|
|
||||||
|
// Should perform vacuum due to short interval
|
||||||
|
err := optimizer.PerformMaintenance()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("PerformMaintenance failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that lastVacuum was updated
|
||||||
|
if optimizer.lastVacuum.IsZero() {
|
||||||
|
t.Error("lastVacuum should be set after maintenance")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait a bit and run again with longer interval
|
||||||
|
config.VacuumInterval = time.Hour // Long interval
|
||||||
|
err = optimizer.PerformMaintenance()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Second PerformMaintenance failed:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptimizationManager_getDatabaseSize(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
config := &Config{Path: db.config.Path}
|
||||||
|
optimizer := NewOptimizationManager(db, config)
|
||||||
|
|
||||||
|
size, err := optimizer.getDatabaseSize()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("getDatabaseSize failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if size <= 0 {
|
||||||
|
t.Error("Database size should be greater than 0")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify size matches actual file size
|
||||||
|
stat, err := os.Stat(db.config.Path)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to stat database file:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if size != stat.Size() {
|
||||||
|
t.Errorf("getDatabaseSize returned %d, but file size is %d", size, stat.Size())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptimizationManager_InvalidPath(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
// Test with invalid path
|
||||||
|
config := &Config{Path: "/nonexistent/path/database.db"}
|
||||||
|
optimizer := NewOptimizationManager(db, config)
|
||||||
|
|
||||||
|
_, err := optimizer.getDatabaseSize()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("getDatabaseSize should fail with invalid path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptimizationStats_JSON(t *testing.T) {
|
||||||
|
stats := &OptimizationStats{
|
||||||
|
DatabaseSize: 1024000,
|
||||||
|
PageSize: 4096,
|
||||||
|
PageCount: 250,
|
||||||
|
UsedPages: 200,
|
||||||
|
FreePages: 50,
|
||||||
|
Efficiency: 80.0,
|
||||||
|
AutoVacuumEnabled: true,
|
||||||
|
LastVacuum: time.Now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test that all fields are accessible
|
||||||
|
if stats.DatabaseSize != 1024000 {
|
||||||
|
t.Error("DatabaseSize not preserved")
|
||||||
|
}
|
||||||
|
if stats.PageSize != 4096 {
|
||||||
|
t.Error("PageSize not preserved")
|
||||||
|
}
|
||||||
|
if stats.Efficiency != 80.0 {
|
||||||
|
t.Error("Efficiency not preserved")
|
||||||
|
}
|
||||||
|
if !stats.AutoVacuumEnabled {
|
||||||
|
t.Error("AutoVacuumEnabled not preserved")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptimizationManager_WithRealData(t *testing.T) {
|
||||||
|
db, cleanup := setupTestDatabase(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
// Load some real data to make optimization more realistic
|
||||||
|
// Skip actual data loading in tests as it requires network access
|
||||||
|
// Just insert minimal test data
|
||||||
|
conn := db.GetConnection()
|
||||||
|
_, err := conn.Exec(`INSERT INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
||||||
|
VALUES (1, 'Test Airways', 'Test', 'TA', 'TST', 'TESTAIR', 'United States', 1, 'test')`)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to insert test data:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
config := &Config{Path: db.config.Path}
|
||||||
|
optimizer := NewOptimizationManager(db, config)
|
||||||
|
|
||||||
|
// Get stats before optimization
|
||||||
|
statsBefore, err := optimizer.GetOptimizationStats()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to get stats before optimization:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run optimization
|
||||||
|
err = optimizer.OptimizeDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("OptimizeDatabase failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = optimizer.VacuumDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("VacuumDatabase failed:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get stats after optimization
|
||||||
|
statsAfter, err := optimizer.GetOptimizationStats()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to get stats after optimization:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare efficiency
|
||||||
|
t.Logf("Optimization results: %.2f%% → %.2f%% efficiency",
|
||||||
|
statsBefore.Efficiency, statsAfter.Efficiency)
|
||||||
|
|
||||||
|
// After optimization, we should have auto-vacuum enabled
|
||||||
|
if !statsAfter.AutoVacuumEnabled {
|
||||||
|
t.Error("Auto-vacuum should be enabled after optimization")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Database should still be functional
|
||||||
|
conn = db.GetConnection()
|
||||||
|
var count int
|
||||||
|
err = conn.QueryRow("SELECT COUNT(*) FROM airlines").Scan(&count)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("Database not functional after optimization:", err)
|
||||||
|
}
|
||||||
|
if count == 0 {
|
||||||
|
t.Error("Data lost during optimization")
|
||||||
|
}
|
||||||
|
}
|
||||||
174
internal/database/path.go
Normal file
174
internal/database/path.go
Normal file
|
|
@ -0,0 +1,174 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ResolveDatabasePath determines the appropriate database file location
|
||||||
|
// based on configuration, system type, and available permissions
|
||||||
|
func ResolveDatabasePath(configPath string) (string, error) {
|
||||||
|
// Use explicit configuration path if provided
|
||||||
|
if configPath != "" {
|
||||||
|
if err := ensureDirExists(filepath.Dir(configPath)); err != nil {
|
||||||
|
return "", fmt.Errorf("cannot create directory for configured path %s: %v", configPath, err)
|
||||||
|
}
|
||||||
|
return configPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try system location first (for services)
|
||||||
|
if systemPath, err := trySystemPath(); err == nil {
|
||||||
|
return systemPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try user data directory
|
||||||
|
if userPath, err := tryUserPath(); err == nil {
|
||||||
|
return userPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback to current directory
|
||||||
|
return tryCurrentDirPath()
|
||||||
|
}
|
||||||
|
|
||||||
|
// trySystemPath attempts to use system-wide database location
|
||||||
|
func trySystemPath() (string, error) {
|
||||||
|
var systemDir string
|
||||||
|
|
||||||
|
switch runtime.GOOS {
|
||||||
|
case "linux":
|
||||||
|
systemDir = "/var/lib/skyview"
|
||||||
|
case "darwin":
|
||||||
|
systemDir = "/usr/local/var/skyview"
|
||||||
|
case "windows":
|
||||||
|
systemDir = filepath.Join(os.Getenv("PROGRAMDATA"), "skyview")
|
||||||
|
default:
|
||||||
|
return "", fmt.Errorf("system path not supported on %s", runtime.GOOS)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if directory exists and is writable
|
||||||
|
if err := ensureDirExists(systemDir); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
dbPath := filepath.Join(systemDir, "skyview.db")
|
||||||
|
|
||||||
|
// Test write permissions
|
||||||
|
if err := testWritePermissions(dbPath); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return dbPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// tryUserPath attempts to use user data directory
|
||||||
|
func tryUserPath() (string, error) {
|
||||||
|
var userDataDir string
|
||||||
|
|
||||||
|
switch runtime.GOOS {
|
||||||
|
case "linux":
|
||||||
|
if xdgData := os.Getenv("XDG_DATA_HOME"); xdgData != "" {
|
||||||
|
userDataDir = xdgData
|
||||||
|
} else {
|
||||||
|
home, err := os.UserHomeDir()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
userDataDir = filepath.Join(home, ".local", "share")
|
||||||
|
}
|
||||||
|
case "darwin":
|
||||||
|
home, err := os.UserHomeDir()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
userDataDir = filepath.Join(home, "Library", "Application Support")
|
||||||
|
case "windows":
|
||||||
|
userDataDir = os.Getenv("APPDATA")
|
||||||
|
if userDataDir == "" {
|
||||||
|
return "", fmt.Errorf("APPDATA environment variable not set")
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return "", fmt.Errorf("user path not supported on %s", runtime.GOOS)
|
||||||
|
}
|
||||||
|
|
||||||
|
skyviewDir := filepath.Join(userDataDir, "skyview")
|
||||||
|
|
||||||
|
if err := ensureDirExists(skyviewDir); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
dbPath := filepath.Join(skyviewDir, "skyview.db")
|
||||||
|
|
||||||
|
// Test write permissions
|
||||||
|
if err := testWritePermissions(dbPath); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return dbPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// tryCurrentDirPath uses current directory as fallback
|
||||||
|
func tryCurrentDirPath() (string, error) {
|
||||||
|
currentDir, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("cannot get current directory: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
dbPath := filepath.Join(currentDir, "skyview.db")
|
||||||
|
|
||||||
|
// Test write permissions
|
||||||
|
if err := testWritePermissions(dbPath); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return dbPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ensureDirExists creates directory if it doesn't exist
|
||||||
|
func ensureDirExists(dir string) error {
|
||||||
|
if _, err := os.Stat(dir); os.IsNotExist(err) {
|
||||||
|
if err := os.MkdirAll(dir, 0755); err != nil {
|
||||||
|
return fmt.Errorf("cannot create directory %s: %v", dir, err)
|
||||||
|
}
|
||||||
|
} else if err != nil {
|
||||||
|
return fmt.Errorf("cannot access directory %s: %v", dir, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// testWritePermissions verifies write access to the database path
|
||||||
|
func testWritePermissions(dbPath string) error {
|
||||||
|
dir := filepath.Dir(dbPath)
|
||||||
|
|
||||||
|
// Check directory write permissions
|
||||||
|
testFile := filepath.Join(dir, ".skyview_write_test")
|
||||||
|
if err := os.WriteFile(testFile, []byte("test"), 0644); err != nil {
|
||||||
|
return fmt.Errorf("no write permission to directory %s: %v", dir, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up test file
|
||||||
|
os.Remove(testFile)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDatabaseDirectory returns the directory containing the database file
|
||||||
|
func GetDatabaseDirectory(dbPath string) string {
|
||||||
|
return filepath.Dir(dbPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsSystemPath returns true if the database path is in a system location
|
||||||
|
func IsSystemPath(dbPath string) bool {
|
||||||
|
switch runtime.GOOS {
|
||||||
|
case "linux":
|
||||||
|
return filepath.HasPrefix(dbPath, "/var/lib/skyview")
|
||||||
|
case "darwin":
|
||||||
|
return filepath.HasPrefix(dbPath, "/usr/local/var/skyview")
|
||||||
|
case "windows":
|
||||||
|
programData := os.Getenv("PROGRAMDATA")
|
||||||
|
return programData != "" && filepath.HasPrefix(dbPath, filepath.Join(programData, "skyview"))
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
36
internal/database/test_helpers.go
Normal file
36
internal/database/test_helpers.go
Normal file
|
|
@ -0,0 +1,36 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
// setupTestDatabase creates a temporary database for testing
|
||||||
|
func setupTestDatabase(t *testing.T) (*Database, func()) {
|
||||||
|
tempFile, err := os.CreateTemp("", "test_skyview_*.db")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to create temp database file:", err)
|
||||||
|
}
|
||||||
|
tempFile.Close()
|
||||||
|
|
||||||
|
config := &Config{Path: tempFile.Name()}
|
||||||
|
db, err := NewDatabase(config)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to create database:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize the database (run migrations)
|
||||||
|
err = db.Initialize()
|
||||||
|
if err != nil {
|
||||||
|
db.Close()
|
||||||
|
os.Remove(tempFile.Name())
|
||||||
|
t.Fatal("Failed to initialize database:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup := func() {
|
||||||
|
db.Close()
|
||||||
|
os.Remove(tempFile.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, cleanup
|
||||||
|
}
|
||||||
|
|
@ -27,6 +27,7 @@ import (
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"skyview/internal/database"
|
||||||
"skyview/internal/icao"
|
"skyview/internal/icao"
|
||||||
"skyview/internal/modes"
|
"skyview/internal/modes"
|
||||||
"skyview/internal/squawk"
|
"skyview/internal/squawk"
|
||||||
|
|
@ -272,6 +273,7 @@ type Merger struct {
|
||||||
sources map[string]*Source // Source ID -> source information
|
sources map[string]*Source // Source ID -> source information
|
||||||
icaoDB *icao.Database // ICAO country lookup database
|
icaoDB *icao.Database // ICAO country lookup database
|
||||||
squawkDB *squawk.Database // Transponder code lookup database
|
squawkDB *squawk.Database // Transponder code lookup database
|
||||||
|
db *database.Database // Optional persistent database
|
||||||
mu sync.RWMutex // Protects all maps and slices
|
mu sync.RWMutex // Protects all maps and slices
|
||||||
historyLimit int // Maximum history points to retain
|
historyLimit int // Maximum history points to retain
|
||||||
staleTimeout time.Duration // Time before aircraft considered stale (15 seconds)
|
staleTimeout time.Duration // Time before aircraft considered stale (15 seconds)
|
||||||
|
|
@ -295,6 +297,23 @@ type updateMetric struct {
|
||||||
//
|
//
|
||||||
// The merger is ready for immediate use after creation.
|
// The merger is ready for immediate use after creation.
|
||||||
func NewMerger() (*Merger, error) {
|
func NewMerger() (*Merger, error) {
|
||||||
|
return NewMergerWithDatabase(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMergerWithDatabase creates a new aircraft data merger with optional database support.
|
||||||
|
//
|
||||||
|
// If a database is provided, aircraft positions will be persisted to the database
|
||||||
|
// for historical analysis and long-term tracking. The database parameter can be nil
|
||||||
|
// to disable persistence.
|
||||||
|
//
|
||||||
|
// Default settings:
|
||||||
|
// - History limit: 500 points per aircraft
|
||||||
|
// - Stale timeout: 15 seconds
|
||||||
|
// - Empty aircraft and source maps
|
||||||
|
// - Update metrics tracking enabled
|
||||||
|
//
|
||||||
|
// The merger is ready for immediate use after creation.
|
||||||
|
func NewMergerWithDatabase(db *database.Database) (*Merger, error) {
|
||||||
icaoDB, err := icao.NewDatabase()
|
icaoDB, err := icao.NewDatabase()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to initialize ICAO database: %w", err)
|
return nil, fmt.Errorf("failed to initialize ICAO database: %w", err)
|
||||||
|
|
@ -307,6 +326,7 @@ func NewMerger() (*Merger, error) {
|
||||||
sources: make(map[string]*Source),
|
sources: make(map[string]*Source),
|
||||||
icaoDB: icaoDB,
|
icaoDB: icaoDB,
|
||||||
squawkDB: squawkDB,
|
squawkDB: squawkDB,
|
||||||
|
db: db,
|
||||||
historyLimit: 500,
|
historyLimit: 500,
|
||||||
staleTimeout: 15 * time.Second, // Aircraft timeout - reasonable for ADS-B tracking
|
staleTimeout: 15 * time.Second, // Aircraft timeout - reasonable for ADS-B tracking
|
||||||
updateMetrics: make(map[uint32]*updateMetric),
|
updateMetrics: make(map[uint32]*updateMetric),
|
||||||
|
|
@ -428,6 +448,11 @@ func (m *Merger) UpdateAircraft(sourceID string, aircraft *modes.Aircraft, signa
|
||||||
|
|
||||||
state.LastUpdate = timestamp
|
state.LastUpdate = timestamp
|
||||||
state.TotalMessages++
|
state.TotalMessages++
|
||||||
|
|
||||||
|
// Persist to database if available and aircraft has position
|
||||||
|
if m.db != nil && aircraft.Latitude != 0 && aircraft.Longitude != 0 {
|
||||||
|
m.saveAircraftToDatabase(aircraft, sourceID, signal, timestamp)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// mergeAircraftData intelligently merges data from multiple sources with conflict resolution.
|
// mergeAircraftData intelligently merges data from multiple sources with conflict resolution.
|
||||||
|
|
@ -1048,6 +1073,49 @@ func (m *Merger) validatePosition(aircraft *modes.Aircraft, state *AircraftState
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// saveAircraftToDatabase persists aircraft position data to the database
|
||||||
|
func (m *Merger) saveAircraftToDatabase(aircraft *modes.Aircraft, sourceID string, signal float64, timestamp time.Time) {
|
||||||
|
// Convert ICAO24 to hex string
|
||||||
|
icaoHex := fmt.Sprintf("%06X", aircraft.ICAO24)
|
||||||
|
|
||||||
|
// Prepare database record
|
||||||
|
record := database.AircraftHistoryRecord{
|
||||||
|
ICAO: icaoHex,
|
||||||
|
Timestamp: timestamp,
|
||||||
|
Latitude: &aircraft.Latitude,
|
||||||
|
Longitude: &aircraft.Longitude,
|
||||||
|
SourceID: sourceID,
|
||||||
|
SignalStrength: &signal,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add optional fields if available
|
||||||
|
if aircraft.Altitude > 0 {
|
||||||
|
record.Altitude = &aircraft.Altitude
|
||||||
|
}
|
||||||
|
if aircraft.GroundSpeed > 0 {
|
||||||
|
record.Speed = &aircraft.GroundSpeed
|
||||||
|
}
|
||||||
|
if aircraft.Track >= 0 && aircraft.Track < 360 {
|
||||||
|
record.Track = &aircraft.Track
|
||||||
|
}
|
||||||
|
if aircraft.VerticalRate != 0 {
|
||||||
|
record.VerticalRate = &aircraft.VerticalRate
|
||||||
|
}
|
||||||
|
if aircraft.Squawk != "" && aircraft.Squawk != "0000" {
|
||||||
|
record.Squawk = &aircraft.Squawk
|
||||||
|
}
|
||||||
|
if aircraft.Callsign != "" {
|
||||||
|
record.Callsign = &aircraft.Callsign
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save to database (non-blocking to avoid slowing down real-time processing)
|
||||||
|
go func() {
|
||||||
|
if err := m.db.GetHistoryManager().RecordAircraft(&record); err != nil {
|
||||||
|
log.Printf("Warning: Failed to save aircraft %s to database: %v", icaoHex, err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
// Close closes the merger and releases resources
|
// Close closes the merger and releases resources
|
||||||
func (m *Merger) Close() error {
|
func (m *Merger) Close() error {
|
||||||
m.mu.Lock()
|
m.mu.Lock()
|
||||||
|
|
|
||||||
|
|
@ -18,8 +18,10 @@ import (
|
||||||
"embed"
|
"embed"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io/fs"
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
@ -29,6 +31,7 @@ import (
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
"github.com/gorilla/websocket"
|
"github.com/gorilla/websocket"
|
||||||
|
|
||||||
|
"skyview/internal/database"
|
||||||
"skyview/internal/merger"
|
"skyview/internal/merger"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -52,12 +55,13 @@ type OriginConfig struct {
|
||||||
// - Concurrent broadcast system for WebSocket clients
|
// - Concurrent broadcast system for WebSocket clients
|
||||||
// - CORS support for cross-origin web applications
|
// - CORS support for cross-origin web applications
|
||||||
type Server struct {
|
type Server struct {
|
||||||
host string // Bind address for HTTP server
|
host string // Bind address for HTTP server
|
||||||
port int // TCP port for HTTP server
|
port int // TCP port for HTTP server
|
||||||
merger *merger.Merger // Data source for aircraft information
|
merger *merger.Merger // Data source for aircraft information
|
||||||
staticFiles embed.FS // Embedded static web assets
|
database *database.Database // Optional database for persistence
|
||||||
server *http.Server // HTTP server instance
|
staticFiles embed.FS // Embedded static web assets
|
||||||
origin OriginConfig // Geographic reference point
|
server *http.Server // HTTP server instance
|
||||||
|
origin OriginConfig // Geographic reference point
|
||||||
|
|
||||||
// WebSocket management
|
// WebSocket management
|
||||||
wsClients map[*websocket.Conn]bool // Active WebSocket client connections
|
wsClients map[*websocket.Conn]bool // Active WebSocket client connections
|
||||||
|
|
@ -98,15 +102,17 @@ type AircraftUpdate struct {
|
||||||
// - host: Bind address (empty for all interfaces, "localhost" for local only)
|
// - host: Bind address (empty for all interfaces, "localhost" for local only)
|
||||||
// - port: TCP port number for the HTTP server
|
// - port: TCP port number for the HTTP server
|
||||||
// - merger: Data merger instance providing aircraft information
|
// - merger: Data merger instance providing aircraft information
|
||||||
|
// - database: Optional database for persistence and callsign enhancement
|
||||||
// - staticFiles: Embedded filesystem containing web assets
|
// - staticFiles: Embedded filesystem containing web assets
|
||||||
// - origin: Geographic reference point for the map interface
|
// - origin: Geographic reference point for the map interface
|
||||||
//
|
//
|
||||||
// Returns a configured but not yet started server instance.
|
// Returns a configured but not yet started server instance.
|
||||||
func NewWebServer(host string, port int, merger *merger.Merger, staticFiles embed.FS, origin OriginConfig) *Server {
|
func NewWebServer(host string, port int, merger *merger.Merger, database *database.Database, staticFiles embed.FS, origin OriginConfig) *Server {
|
||||||
return &Server{
|
return &Server{
|
||||||
host: host,
|
host: host,
|
||||||
port: port,
|
port: port,
|
||||||
merger: merger,
|
merger: merger,
|
||||||
|
database: database,
|
||||||
staticFiles: staticFiles,
|
staticFiles: staticFiles,
|
||||||
origin: origin,
|
origin: origin,
|
||||||
wsClients: make(map[*websocket.Conn]bool),
|
wsClients: make(map[*websocket.Conn]bool),
|
||||||
|
|
@ -204,6 +210,10 @@ func (s *Server) setupRoutes() http.Handler {
|
||||||
api.HandleFunc("/origin", s.handleGetOrigin).Methods("GET")
|
api.HandleFunc("/origin", s.handleGetOrigin).Methods("GET")
|
||||||
api.HandleFunc("/coverage/{sourceId}", s.handleGetCoverage).Methods("GET")
|
api.HandleFunc("/coverage/{sourceId}", s.handleGetCoverage).Methods("GET")
|
||||||
api.HandleFunc("/heatmap/{sourceId}", s.handleGetHeatmap).Methods("GET")
|
api.HandleFunc("/heatmap/{sourceId}", s.handleGetHeatmap).Methods("GET")
|
||||||
|
// Database API endpoints
|
||||||
|
api.HandleFunc("/database/status", s.handleGetDatabaseStatus).Methods("GET")
|
||||||
|
api.HandleFunc("/database/sources", s.handleGetDataSources).Methods("GET")
|
||||||
|
api.HandleFunc("/callsign/{callsign}", s.handleGetCallsignInfo).Methods("GET")
|
||||||
|
|
||||||
// WebSocket
|
// WebSocket
|
||||||
router.HandleFunc("/ws", s.handleWebSocket)
|
router.HandleFunc("/ws", s.handleWebSocket)
|
||||||
|
|
@ -214,6 +224,8 @@ func (s *Server) setupRoutes() http.Handler {
|
||||||
|
|
||||||
// Main page
|
// Main page
|
||||||
router.HandleFunc("/", s.handleIndex)
|
router.HandleFunc("/", s.handleIndex)
|
||||||
|
// Database status page
|
||||||
|
router.HandleFunc("/database", s.handleDatabasePage)
|
||||||
|
|
||||||
// Enable CORS
|
// Enable CORS
|
||||||
return s.enableCORS(router)
|
return s.enableCORS(router)
|
||||||
|
|
@ -898,3 +910,193 @@ func (s *Server) handleDebugWebSocket(w http.ResponseWriter, r *http.Request) {
|
||||||
w.Header().Set("Content-Type", "application/json")
|
w.Header().Set("Content-Type", "application/json")
|
||||||
json.NewEncoder(w).Encode(response)
|
json.NewEncoder(w).Encode(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// handleGetDatabaseStatus returns database status and statistics
|
||||||
|
func (s *Server) handleGetDatabaseStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if s.database == nil {
|
||||||
|
http.Error(w, "Database not available", http.StatusServiceUnavailable)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response := make(map[string]interface{})
|
||||||
|
|
||||||
|
// Get database path and size information
|
||||||
|
dbConfig := s.database.GetConfig()
|
||||||
|
dbPath := dbConfig.Path
|
||||||
|
response["path"] = dbPath
|
||||||
|
|
||||||
|
// Get file size and modification time
|
||||||
|
if stat, err := os.Stat(dbPath); err == nil {
|
||||||
|
response["size_bytes"] = stat.Size()
|
||||||
|
response["size_mb"] = float64(stat.Size()) / (1024 * 1024)
|
||||||
|
response["modified"] = stat.ModTime().Unix()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get optimization statistics
|
||||||
|
optimizer := database.NewOptimizationManager(s.database, dbConfig)
|
||||||
|
if optimizationStats, err := optimizer.GetOptimizationStats(); err == nil {
|
||||||
|
response["efficiency_percent"] = optimizationStats.Efficiency
|
||||||
|
response["page_size"] = optimizationStats.PageSize
|
||||||
|
response["page_count"] = optimizationStats.PageCount
|
||||||
|
response["used_pages"] = optimizationStats.UsedPages
|
||||||
|
response["free_pages"] = optimizationStats.FreePages
|
||||||
|
response["auto_vacuum_enabled"] = optimizationStats.AutoVacuumEnabled
|
||||||
|
if !optimizationStats.LastVacuum.IsZero() {
|
||||||
|
response["last_vacuum"] = optimizationStats.LastVacuum.Unix()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get history statistics
|
||||||
|
historyStats, err := s.database.GetHistoryManager().GetStatistics()
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error getting history statistics: %v", err)
|
||||||
|
historyStats = make(map[string]interface{})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get callsign statistics if available
|
||||||
|
callsignStats := make(map[string]interface{})
|
||||||
|
if callsignManager := s.database.GetCallsignManager(); callsignManager != nil {
|
||||||
|
stats, err := callsignManager.GetCacheStats()
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error getting callsign statistics: %v", err)
|
||||||
|
} else {
|
||||||
|
callsignStats = stats
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get record counts for reference data
|
||||||
|
var airportCount, airlineCount int
|
||||||
|
s.database.GetConnection().QueryRow(`SELECT COUNT(*) FROM airports`).Scan(&airportCount)
|
||||||
|
s.database.GetConnection().QueryRow(`SELECT COUNT(*) FROM airlines`).Scan(&airlineCount)
|
||||||
|
|
||||||
|
referenceData := make(map[string]interface{})
|
||||||
|
referenceData["airports"] = airportCount
|
||||||
|
referenceData["airlines"] = airlineCount
|
||||||
|
|
||||||
|
response["database_available"] = true
|
||||||
|
response["path"] = dbPath
|
||||||
|
response["reference_data"] = referenceData
|
||||||
|
response["history"] = historyStats
|
||||||
|
response["callsign"] = callsignStats
|
||||||
|
response["timestamp"] = time.Now().Unix()
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleGetDataSources returns information about loaded external data sources
|
||||||
|
func (s *Server) handleGetDataSources(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if s.database == nil {
|
||||||
|
http.Error(w, "Database not available", http.StatusServiceUnavailable)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create data loader instance
|
||||||
|
loader := database.NewDataLoader(s.database.GetConnection())
|
||||||
|
|
||||||
|
availableSources := database.GetAvailableDataSources()
|
||||||
|
loadedSources, err := loader.GetLoadedDataSources()
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error getting loaded data sources: %v", err)
|
||||||
|
loadedSources = []database.DataSource{}
|
||||||
|
}
|
||||||
|
|
||||||
|
response := map[string]interface{}{
|
||||||
|
"available": availableSources,
|
||||||
|
"loaded": loadedSources,
|
||||||
|
"timestamp": time.Now().Unix(),
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleGetCallsignInfo returns enriched information about a callsign
|
||||||
|
func (s *Server) handleGetCallsignInfo(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if s.database == nil {
|
||||||
|
http.Error(w, "Database not available", http.StatusServiceUnavailable)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract callsign from URL parameters
|
||||||
|
vars := mux.Vars(r)
|
||||||
|
callsign := vars["callsign"]
|
||||||
|
|
||||||
|
if callsign == "" {
|
||||||
|
http.Error(w, "Callsign parameter required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get callsign information from database
|
||||||
|
callsignInfo, err := s.database.GetCallsignManager().GetCallsignInfo(callsign)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error getting callsign info for %s: %v", callsign, err)
|
||||||
|
http.Error(w, "Failed to lookup callsign information", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response := map[string]interface{}{
|
||||||
|
"callsign": callsignInfo,
|
||||||
|
"timestamp": time.Now().Unix(),
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// debugEmbeddedFiles lists all embedded files for debugging
|
||||||
|
func (s *Server) debugEmbeddedFiles() {
|
||||||
|
log.Println("=== Debugging Embedded Files ===")
|
||||||
|
err := fs.WalkDir(s.staticFiles, ".", func(path string, d fs.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error walking %s: %v", path, err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if !d.IsDir() {
|
||||||
|
info, _ := d.Info()
|
||||||
|
log.Printf("Embedded file: %s (size: %d bytes)", path, info.Size())
|
||||||
|
} else {
|
||||||
|
log.Printf("Embedded dir: %s/", path)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error walking embedded files: %v", err)
|
||||||
|
}
|
||||||
|
log.Println("=== End Embedded Files Debug ===")
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleDatabasePage serves the database status page
|
||||||
|
func (s *Server) handleDatabasePage(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Debug embedded files first
|
||||||
|
s.debugEmbeddedFiles()
|
||||||
|
|
||||||
|
// Try to read the database HTML file from embedded assets
|
||||||
|
data, err := s.staticFiles.ReadFile("static/database.html")
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error reading database.html: %v", err)
|
||||||
|
|
||||||
|
// Fallback: serve a simple HTML page with API calls
|
||||||
|
fallbackHTML := `<!DOCTYPE html>
|
||||||
|
<html><head><title>Database Status - SkyView</title></head>
|
||||||
|
<body>
|
||||||
|
<h1>Database Status</h1>
|
||||||
|
<div id="status">Loading...</div>
|
||||||
|
<script>
|
||||||
|
fetch('/api/database/status')
|
||||||
|
.then(r => r.json())
|
||||||
|
.then(data => {
|
||||||
|
document.getElementById('status').innerHTML =
|
||||||
|
'<pre>' + JSON.stringify(data, null, 2) + '</pre>';
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</body></html>`
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "text/html")
|
||||||
|
w.Write([]byte(fallbackHTML))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "text/html")
|
||||||
|
w.Write(data)
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -61,13 +61,24 @@ if ! go build -ldflags="$LDFLAGS" \
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Build skyview-data utility
|
||||||
|
echo_info "Building skyview-data..."
|
||||||
|
if ! go build -ldflags="$LDFLAGS" \
|
||||||
|
-o "$DEB_DIR/usr/bin/skyview-data" \
|
||||||
|
./cmd/skyview-data; then
|
||||||
|
echo_error "Failed to build skyview-data"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
echo_info "Built binaries:"
|
echo_info "Built binaries:"
|
||||||
echo_info " skyview: $(file "$DEB_DIR/usr/bin/skyview")"
|
echo_info " skyview: $(file "$DEB_DIR/usr/bin/skyview")"
|
||||||
echo_info " beast-dump: $(file "$DEB_DIR/usr/bin/beast-dump")"
|
echo_info " beast-dump: $(file "$DEB_DIR/usr/bin/beast-dump")"
|
||||||
|
echo_info " skyview-data: $(file "$DEB_DIR/usr/bin/skyview-data")"
|
||||||
|
|
||||||
# Set executable permissions
|
# Set executable permissions
|
||||||
chmod +x "$DEB_DIR/usr/bin/skyview"
|
chmod +x "$DEB_DIR/usr/bin/skyview"
|
||||||
chmod +x "$DEB_DIR/usr/bin/beast-dump"
|
chmod +x "$DEB_DIR/usr/bin/beast-dump"
|
||||||
|
chmod +x "$DEB_DIR/usr/bin/skyview-data"
|
||||||
|
|
||||||
# Generate incremental changelog from git history
|
# Generate incremental changelog from git history
|
||||||
echo_info "Generating incremental changelog from git history..."
|
echo_info "Generating incremental changelog from git history..."
|
||||||
|
|
|
||||||
81
scripts/update-database.sh
Executable file
81
scripts/update-database.sh
Executable file
|
|
@ -0,0 +1,81 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# SkyView Database Auto-Update Script
|
||||||
|
# Safe for cron execution - updates only public domain sources
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
SKYVIEW_DATA_CMD="${SKYVIEW_DATA_CMD:-skyview-data}"
|
||||||
|
LOCK_FILE="${TMPDIR:-/tmp}/skyview-data-update.lock"
|
||||||
|
LOG_FILE="${LOG_FILE:-/var/log/skyview/database-update.log}"
|
||||||
|
|
||||||
|
# Colors for output (disabled if not a tty)
|
||||||
|
if [ -t 1 ]; then
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m'
|
||||||
|
else
|
||||||
|
RED=''
|
||||||
|
GREEN=''
|
||||||
|
YELLOW=''
|
||||||
|
NC=''
|
||||||
|
fi
|
||||||
|
|
||||||
|
log() {
|
||||||
|
echo "$(date '+%Y-%m-%d %H:%M:%S') $1" | tee -a "${LOG_FILE}" 2>/dev/null || echo "$(date '+%Y-%m-%d %H:%M:%S') $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
error() {
|
||||||
|
echo -e "${RED}ERROR: $1${NC}" >&2
|
||||||
|
log "ERROR: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
success() {
|
||||||
|
echo -e "${GREEN}$1${NC}"
|
||||||
|
log "$1"
|
||||||
|
}
|
||||||
|
|
||||||
|
warn() {
|
||||||
|
echo -e "${YELLOW}WARNING: $1${NC}"
|
||||||
|
log "WARNING: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for lock file (prevent concurrent runs)
|
||||||
|
if [ -f "$LOCK_FILE" ]; then
|
||||||
|
if kill -0 "$(cat "$LOCK_FILE")" 2>/dev/null; then
|
||||||
|
error "Another instance is already running (PID: $(cat "$LOCK_FILE"))"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
warn "Removing stale lock file"
|
||||||
|
rm -f "$LOCK_FILE"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create lock file
|
||||||
|
echo $$ > "$LOCK_FILE"
|
||||||
|
trap 'rm -f "$LOCK_FILE"' EXIT
|
||||||
|
|
||||||
|
log "Starting SkyView database update"
|
||||||
|
|
||||||
|
# Check if skyview-data command exists
|
||||||
|
if ! command -v "$SKYVIEW_DATA_CMD" >/dev/null 2>&1; then
|
||||||
|
error "skyview-data command not found in PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Update database (this will auto-initialize if needed)
|
||||||
|
log "Running: $SKYVIEW_DATA_CMD update"
|
||||||
|
if "$SKYVIEW_DATA_CMD" update; then
|
||||||
|
success "Database update completed successfully"
|
||||||
|
|
||||||
|
# Show status for logging
|
||||||
|
"$SKYVIEW_DATA_CMD" status 2>/dev/null | while IFS= read -r line; do
|
||||||
|
log "STATUS: $line"
|
||||||
|
done
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
error "Database update failed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
Loading…
Add table
Add a link
Reference in a new issue