Compare commits
No commits in common. "d80bb3a10fb4908d29f30eeb06f86693424e9fbf" and "7b16327bd2f9ec43df37a87330febb94fc6356be" have entirely different histories.
d80bb3a10f
...
7b16327bd2
17 changed files with 104 additions and 2487 deletions
|
|
@ -28,6 +28,5 @@ import "embed"
|
||||||
// This approach ensures the web interface is always available without requiring
|
// This approach ensures the web interface is always available without requiring
|
||||||
// external file deployment or complicated asset management.
|
// external file deployment or complicated asset management.
|
||||||
//
|
//
|
||||||
// Updated to include database.html for database status page
|
|
||||||
//go:embed static
|
//go:embed static
|
||||||
var Static embed.FS
|
var Static embed.FS
|
||||||
|
|
|
||||||
|
|
@ -566,95 +566,6 @@ body {
|
||||||
color: #00ff88 !important;
|
color: #00ff88 !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Rich callsign display styles */
|
|
||||||
.callsign-display {
|
|
||||||
display: inline-block;
|
|
||||||
}
|
|
||||||
|
|
||||||
.callsign-display.enriched {
|
|
||||||
display: inline-flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 0.25rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.callsign-code {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.25rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.airline-code {
|
|
||||||
color: #00ff88 !important;
|
|
||||||
font-weight: 600;
|
|
||||||
font-family: monospace;
|
|
||||||
background: rgba(0, 255, 136, 0.1);
|
|
||||||
padding: 0.1rem 0.3rem;
|
|
||||||
border-radius: 3px;
|
|
||||||
border: 1px solid rgba(0, 255, 136, 0.3);
|
|
||||||
}
|
|
||||||
|
|
||||||
.flight-number {
|
|
||||||
color: #00a8ff !important;
|
|
||||||
font-weight: 500;
|
|
||||||
font-family: monospace;
|
|
||||||
}
|
|
||||||
|
|
||||||
.callsign-details {
|
|
||||||
font-size: 0.85rem;
|
|
||||||
opacity: 0.9;
|
|
||||||
}
|
|
||||||
|
|
||||||
.airline-name {
|
|
||||||
color: #ffd700 !important;
|
|
||||||
font-weight: 500;
|
|
||||||
}
|
|
||||||
|
|
||||||
.airline-country {
|
|
||||||
color: #cccccc !important;
|
|
||||||
font-size: 0.8rem;
|
|
||||||
opacity: 0.8;
|
|
||||||
}
|
|
||||||
|
|
||||||
.callsign-display.simple {
|
|
||||||
color: #00ff88 !important;
|
|
||||||
font-family: monospace;
|
|
||||||
}
|
|
||||||
|
|
||||||
.callsign-display.no-data {
|
|
||||||
color: #888888 !important;
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Compact callsign for table view */
|
|
||||||
.callsign-compact {
|
|
||||||
color: #00ff88 !important;
|
|
||||||
font-family: monospace;
|
|
||||||
font-weight: 500;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Loading state for callsign enhancement */
|
|
||||||
.callsign-loading {
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.callsign-loading::after {
|
|
||||||
content: '⟳';
|
|
||||||
margin-left: 0.25rem;
|
|
||||||
opacity: 0.6;
|
|
||||||
animation: spin 1s linear infinite;
|
|
||||||
font-size: 0.8rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes spin {
|
|
||||||
from { transform: rotate(0deg); }
|
|
||||||
to { transform: rotate(360deg); }
|
|
||||||
}
|
|
||||||
|
|
||||||
.callsign-enhanced {
|
|
||||||
/* Smooth transition when enhanced */
|
|
||||||
transition: all 0.3s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.popup-details {
|
.popup-details {
|
||||||
font-size: 0.9rem;
|
font-size: 0.9rem;
|
||||||
color: #ffffff !important;
|
color: #ffffff !important;
|
||||||
|
|
|
||||||
|
|
@ -28,10 +28,7 @@
|
||||||
<body>
|
<body>
|
||||||
<div id="app">
|
<div id="app">
|
||||||
<header class="header">
|
<header class="header">
|
||||||
<h1>SkyView <span class="version-info">v0.0.8</span>
|
<h1>SkyView <span class="version-info">v0.0.8</span> <a href="https://kode.naiv.no/olemd/skyview" target="_blank" class="repo-link" title="Project Repository">⚙</a></h1>
|
||||||
<a href="https://kode.naiv.no/olemd/skyview" target="_blank" class="repo-link" title="Project Repository">⚙</a>
|
|
||||||
<a href="/database" class="repo-link" title="Database Status">📊</a>
|
|
||||||
</h1>
|
|
||||||
|
|
||||||
<!-- Status indicators -->
|
<!-- Status indicators -->
|
||||||
<div class="status-section">
|
<div class="status-section">
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,6 @@ import { WebSocketManager } from './modules/websocket.js?v=2';
|
||||||
import { AircraftManager } from './modules/aircraft-manager.js?v=2';
|
import { AircraftManager } from './modules/aircraft-manager.js?v=2';
|
||||||
import { MapManager } from './modules/map-manager.js?v=2';
|
import { MapManager } from './modules/map-manager.js?v=2';
|
||||||
import { UIManager } from './modules/ui-manager.js?v=2';
|
import { UIManager } from './modules/ui-manager.js?v=2';
|
||||||
import { CallsignManager } from './modules/callsign-manager.js';
|
|
||||||
|
|
||||||
class SkyView {
|
class SkyView {
|
||||||
constructor() {
|
constructor() {
|
||||||
|
|
@ -16,7 +15,6 @@ class SkyView {
|
||||||
this.aircraftManager = null;
|
this.aircraftManager = null;
|
||||||
this.mapManager = null;
|
this.mapManager = null;
|
||||||
this.uiManager = null;
|
this.uiManager = null;
|
||||||
this.callsignManager = null;
|
|
||||||
|
|
||||||
// 3D Radar
|
// 3D Radar
|
||||||
this.radar3d = null;
|
this.radar3d = null;
|
||||||
|
|
@ -39,15 +37,12 @@ class SkyView {
|
||||||
this.uiManager.initializeViews();
|
this.uiManager.initializeViews();
|
||||||
this.uiManager.initializeEventListeners();
|
this.uiManager.initializeEventListeners();
|
||||||
|
|
||||||
// Initialize callsign manager for enriched callsign display
|
|
||||||
this.callsignManager = new CallsignManager();
|
|
||||||
|
|
||||||
// Initialize map manager and get the main map
|
// Initialize map manager and get the main map
|
||||||
this.mapManager = new MapManager();
|
this.mapManager = new MapManager();
|
||||||
const map = await this.mapManager.initializeMap();
|
const map = await this.mapManager.initializeMap();
|
||||||
|
|
||||||
// Initialize aircraft manager with the map and callsign manager
|
// Initialize aircraft manager with the map
|
||||||
this.aircraftManager = new AircraftManager(map, this.callsignManager);
|
this.aircraftManager = new AircraftManager(map);
|
||||||
|
|
||||||
// Set up selected aircraft trail callback
|
// Set up selected aircraft trail callback
|
||||||
this.aircraftManager.setSelectedAircraftCallback((icao) => {
|
this.aircraftManager.setSelectedAircraftCallback((icao) => {
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,7 @@
|
||||||
// Aircraft marker and data management module
|
// Aircraft marker and data management module
|
||||||
export class AircraftManager {
|
export class AircraftManager {
|
||||||
constructor(map, callsignManager = null) {
|
constructor(map) {
|
||||||
this.map = map;
|
this.map = map;
|
||||||
this.callsignManager = callsignManager;
|
|
||||||
this.aircraftData = new Map();
|
this.aircraftData = new Map();
|
||||||
this.aircraftMarkers = new Map();
|
this.aircraftMarkers = new Map();
|
||||||
this.aircraftTrails = new Map();
|
this.aircraftTrails = new Map();
|
||||||
|
|
@ -229,11 +228,6 @@ export class AircraftManager {
|
||||||
// Handle popup exactly like Leaflet expects
|
// Handle popup exactly like Leaflet expects
|
||||||
if (marker.isPopupOpen()) {
|
if (marker.isPopupOpen()) {
|
||||||
marker.setPopupContent(this.createPopupContent(aircraft));
|
marker.setPopupContent(this.createPopupContent(aircraft));
|
||||||
// Enhance callsign display for updated popup
|
|
||||||
const popupElement = marker.getPopup().getElement();
|
|
||||||
if (popupElement) {
|
|
||||||
this.enhanceCallsignDisplay(popupElement);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.markerUpdateCount++;
|
this.markerUpdateCount++;
|
||||||
|
|
@ -256,14 +250,6 @@ export class AircraftManager {
|
||||||
maxWidth: 450,
|
maxWidth: 450,
|
||||||
className: 'aircraft-popup'
|
className: 'aircraft-popup'
|
||||||
});
|
});
|
||||||
|
|
||||||
// Enhance callsign display when popup opens
|
|
||||||
marker.on('popupopen', (e) => {
|
|
||||||
const popupElement = e.popup.getElement();
|
|
||||||
if (popupElement) {
|
|
||||||
this.enhanceCallsignDisplay(popupElement);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
this.aircraftMarkers.set(icao, marker);
|
this.aircraftMarkers.set(icao, marker);
|
||||||
this.markerCreateCount++;
|
this.markerCreateCount++;
|
||||||
|
|
@ -449,7 +435,7 @@ export class AircraftManager {
|
||||||
<div class="flight-info">
|
<div class="flight-info">
|
||||||
<span class="icao-flag">${flag}</span>
|
<span class="icao-flag">${flag}</span>
|
||||||
<span class="flight-id">${aircraft.ICAO24 || 'N/A'}</span>
|
<span class="flight-id">${aircraft.ICAO24 || 'N/A'}</span>
|
||||||
${aircraft.Callsign ? `→ <span class="callsign-loading" data-callsign="${aircraft.Callsign}"><span class="callsign">${aircraft.Callsign}</span></span>` : ''}
|
${aircraft.Callsign ? `→ <span class="callsign">${aircraft.Callsign}</span>` : ''}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
@ -525,29 +511,6 @@ export class AircraftManager {
|
||||||
return minDistance === Infinity ? null : minDistance;
|
return minDistance === Infinity ? null : minDistance;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Enhance callsign display in popup after it's created
|
|
||||||
async enhanceCallsignDisplay(popupElement) {
|
|
||||||
if (!this.callsignManager) return;
|
|
||||||
|
|
||||||
const callsignElements = popupElement.querySelectorAll('.callsign-loading');
|
|
||||||
|
|
||||||
for (const element of callsignElements) {
|
|
||||||
const callsign = element.dataset.callsign;
|
|
||||||
if (!callsign) continue;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const callsignInfo = await this.callsignManager.getCallsignInfo(callsign);
|
|
||||||
const richDisplay = this.callsignManager.generateCallsignDisplay(callsignInfo, callsign);
|
|
||||||
element.innerHTML = richDisplay;
|
|
||||||
element.classList.remove('callsign-loading');
|
|
||||||
element.classList.add('callsign-enhanced');
|
|
||||||
} catch (error) {
|
|
||||||
console.warn(`Failed to enhance callsign display for ${callsign}:`, error);
|
|
||||||
// Keep the simple display on error
|
|
||||||
element.classList.remove('callsign-loading');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
toggleTrails() {
|
toggleTrails() {
|
||||||
this.showTrails = !this.showTrails;
|
this.showTrails = !this.showTrails;
|
||||||
|
|
|
||||||
|
|
@ -1,163 +0,0 @@
|
||||||
// Callsign enrichment and display module
|
|
||||||
export class CallsignManager {
|
|
||||||
constructor() {
|
|
||||||
this.callsignCache = new Map();
|
|
||||||
this.pendingRequests = new Map();
|
|
||||||
|
|
||||||
// Rate limiting to avoid overwhelming the API
|
|
||||||
this.lastRequestTime = 0;
|
|
||||||
this.requestInterval = 100; // Minimum 100ms between requests
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get enriched callsign information, using cache when available
|
|
||||||
* @param {string} callsign - The raw callsign to lookup
|
|
||||||
* @returns {Promise<Object>} - Enriched callsign data
|
|
||||||
*/
|
|
||||||
async getCallsignInfo(callsign) {
|
|
||||||
if (!callsign || callsign.trim() === '') {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const cleanCallsign = callsign.trim().toUpperCase();
|
|
||||||
|
|
||||||
// Check cache first
|
|
||||||
if (this.callsignCache.has(cleanCallsign)) {
|
|
||||||
return this.callsignCache.get(cleanCallsign);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if we already have a pending request for this callsign
|
|
||||||
if (this.pendingRequests.has(cleanCallsign)) {
|
|
||||||
return this.pendingRequests.get(cleanCallsign);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rate limiting
|
|
||||||
const now = Date.now();
|
|
||||||
if (now - this.lastRequestTime < this.requestInterval) {
|
|
||||||
await new Promise(resolve => setTimeout(resolve, this.requestInterval));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the API request
|
|
||||||
const requestPromise = this.fetchCallsignInfo(cleanCallsign);
|
|
||||||
this.pendingRequests.set(cleanCallsign, requestPromise);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const result = await requestPromise;
|
|
||||||
|
|
||||||
// Cache the result for future use
|
|
||||||
if (result && result.callsign) {
|
|
||||||
this.callsignCache.set(cleanCallsign, result.callsign);
|
|
||||||
}
|
|
||||||
|
|
||||||
return result ? result.callsign : null;
|
|
||||||
} catch (error) {
|
|
||||||
console.warn(`Failed to lookup callsign ${cleanCallsign}:`, error);
|
|
||||||
return null;
|
|
||||||
} finally {
|
|
||||||
// Clean up pending request
|
|
||||||
this.pendingRequests.delete(cleanCallsign);
|
|
||||||
this.lastRequestTime = Date.now();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch callsign information from the API
|
|
||||||
* @param {string} callsign - The callsign to lookup
|
|
||||||
* @returns {Promise<Object>} - API response
|
|
||||||
*/
|
|
||||||
async fetchCallsignInfo(callsign) {
|
|
||||||
const response = await fetch(`/api/callsign/${encodeURIComponent(callsign)}`);
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return await response.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate rich HTML display for a callsign
|
|
||||||
* @param {Object} callsignInfo - Enriched callsign data from API
|
|
||||||
* @param {string} originalCallsign - Original callsign if API data is null
|
|
||||||
* @returns {string} - HTML string for display
|
|
||||||
*/
|
|
||||||
generateCallsignDisplay(callsignInfo, originalCallsign = '') {
|
|
||||||
if (!callsignInfo || !callsignInfo.is_valid) {
|
|
||||||
// Fallback for invalid or missing callsign data
|
|
||||||
if (originalCallsign) {
|
|
||||||
return `<span class="callsign-display simple">${originalCallsign}</span>`;
|
|
||||||
}
|
|
||||||
return '<span class="callsign-display no-data">N/A</span>';
|
|
||||||
}
|
|
||||||
|
|
||||||
const parts = [];
|
|
||||||
|
|
||||||
// Airline code
|
|
||||||
if (callsignInfo.airline_code) {
|
|
||||||
parts.push(`<span class="airline-code">${callsignInfo.airline_code}</span>`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Flight number
|
|
||||||
if (callsignInfo.flight_number) {
|
|
||||||
parts.push(`<span class="flight-number">${callsignInfo.flight_number}</span>`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Airline name (if available)
|
|
||||||
let airlineInfo = '';
|
|
||||||
if (callsignInfo.airline_name) {
|
|
||||||
airlineInfo = `<span class="airline-name" title="${callsignInfo.airline_name}">
|
|
||||||
${callsignInfo.airline_name}
|
|
||||||
</span>`;
|
|
||||||
|
|
||||||
// Add country if available
|
|
||||||
if (callsignInfo.airline_country) {
|
|
||||||
airlineInfo += ` <span class="airline-country">(${callsignInfo.airline_country})</span>`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return `
|
|
||||||
<span class="callsign-display enriched">
|
|
||||||
<span class="callsign-code">${parts.join(' ')}</span>
|
|
||||||
${airlineInfo ? `<span class="callsign-details">${airlineInfo}</span>` : ''}
|
|
||||||
</span>
|
|
||||||
`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate compact callsign display for table view
|
|
||||||
* @param {Object} callsignInfo - Enriched callsign data
|
|
||||||
* @param {string} originalCallsign - Original callsign fallback
|
|
||||||
* @returns {string} - Compact HTML for table display
|
|
||||||
*/
|
|
||||||
generateCompactCallsignDisplay(callsignInfo, originalCallsign = '') {
|
|
||||||
if (!callsignInfo || !callsignInfo.is_valid) {
|
|
||||||
return originalCallsign || 'N/A';
|
|
||||||
}
|
|
||||||
|
|
||||||
// For tables, use the display_name or format airline + flight
|
|
||||||
if (callsignInfo.display_name) {
|
|
||||||
return `<span class="callsign-compact" title="${callsignInfo.airline_name || ''}">${callsignInfo.display_name}</span>`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return `<span class="callsign-compact">${callsignInfo.airline_code} ${callsignInfo.flight_number}</span>`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear the callsign cache (useful for memory management)
|
|
||||||
*/
|
|
||||||
clearCache() {
|
|
||||||
this.callsignCache.clear();
|
|
||||||
console.debug('Callsign cache cleared');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cache statistics for debugging
|
|
||||||
* @returns {Object} - Cache size and pending requests
|
|
||||||
*/
|
|
||||||
getCacheStats() {
|
|
||||||
return {
|
|
||||||
cacheSize: this.callsignCache.size,
|
|
||||||
pendingRequests: this.pendingRequests.size
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
762
debian/usr/share/doc/skyview-adsb/DATABASE.md
vendored
762
debian/usr/share/doc/skyview-adsb/DATABASE.md
vendored
|
|
@ -1,729 +1,99 @@
|
||||||
# SkyView Database Architecture
|
# SkyView Database Management
|
||||||
|
|
||||||
This document describes SkyView's SQLite database architecture, migration system, and integration approach for persistent data storage.
|
SkyView includes a comprehensive database management system for enriching aircraft callsigns with airline and airport information.
|
||||||
|
|
||||||
## Overview
|
## Quick Start
|
||||||
|
|
||||||
SkyView uses a single SQLite database to store:
|
### 1. Check Current Status
|
||||||
- **Historic aircraft data**: Position history, message counts, signal strength
|
|
||||||
- **Callsign lookup data**: Cached airline/airport information from external APIs
|
|
||||||
- **Embedded aviation data**: OpenFlights airline and airport databases
|
|
||||||
|
|
||||||
## Database Design Principles
|
|
||||||
|
|
||||||
### Embedded Architecture
|
|
||||||
- Single SQLite file for all persistent data
|
|
||||||
- No external database dependencies
|
|
||||||
- Self-contained deployment with embedded schemas
|
|
||||||
- Backward compatibility through versioned migrations
|
|
||||||
|
|
||||||
### Performance Optimization
|
|
||||||
- Strategic indexing for time-series aircraft data
|
|
||||||
- Efficient lookups for callsign enhancement
|
|
||||||
- Configurable data retention policies
|
|
||||||
- Query optimization for real-time operations
|
|
||||||
|
|
||||||
### Data Safety
|
|
||||||
- Atomic migration transactions
|
|
||||||
- Pre-migration backups for destructive changes
|
|
||||||
- Data loss warnings for schema changes
|
|
||||||
- Rollback capabilities where possible
|
|
||||||
|
|
||||||
## Database Schema
|
|
||||||
|
|
||||||
### Core Tables
|
|
||||||
|
|
||||||
#### `schema_info`
|
|
||||||
Tracks database version and applied migrations:
|
|
||||||
```sql
|
|
||||||
CREATE TABLE schema_info (
|
|
||||||
version INTEGER PRIMARY KEY,
|
|
||||||
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
description TEXT,
|
|
||||||
checksum TEXT
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `aircraft_history`
|
|
||||||
Stores time-series aircraft position and message data:
|
|
||||||
```sql
|
|
||||||
CREATE TABLE aircraft_history (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
icao TEXT NOT NULL,
|
|
||||||
timestamp TIMESTAMP NOT NULL,
|
|
||||||
latitude REAL,
|
|
||||||
longitude REAL,
|
|
||||||
altitude INTEGER,
|
|
||||||
speed INTEGER,
|
|
||||||
track INTEGER,
|
|
||||||
vertical_rate INTEGER,
|
|
||||||
squawk TEXT,
|
|
||||||
callsign TEXT,
|
|
||||||
source_id TEXT NOT NULL,
|
|
||||||
signal_strength REAL
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
**Indexes:**
|
|
||||||
- `idx_aircraft_history_icao_time`: Fast queries by aircraft and time range
|
|
||||||
- `idx_aircraft_history_timestamp`: Time-based cleanup and queries
|
|
||||||
- `idx_aircraft_history_callsign`: Callsign-based searches
|
|
||||||
|
|
||||||
#### `airlines`
|
|
||||||
Multi-source airline database with unified schema:
|
|
||||||
```sql
|
|
||||||
CREATE TABLE airlines (
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
alias TEXT,
|
|
||||||
iata_code TEXT,
|
|
||||||
icao_code TEXT,
|
|
||||||
callsign TEXT,
|
|
||||||
country TEXT,
|
|
||||||
country_code TEXT,
|
|
||||||
active BOOLEAN DEFAULT 1,
|
|
||||||
data_source TEXT NOT NULL DEFAULT 'unknown',
|
|
||||||
source_id TEXT,
|
|
||||||
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
**Indexes:**
|
|
||||||
- `idx_airlines_icao_code`: ICAO code lookup (primary for callsign enhancement)
|
|
||||||
- `idx_airlines_iata_code`: IATA code lookup
|
|
||||||
- `idx_airlines_callsign`: Radio callsign lookup
|
|
||||||
- `idx_airlines_country_code`: Country-based filtering
|
|
||||||
- `idx_airlines_active`: Active airlines filtering
|
|
||||||
- `idx_airlines_source`: Data source tracking
|
|
||||||
|
|
||||||
#### `airports`
|
|
||||||
Multi-source airport database with comprehensive metadata:
|
|
||||||
```sql
|
|
||||||
CREATE TABLE airports (
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
ident TEXT,
|
|
||||||
type TEXT,
|
|
||||||
city TEXT,
|
|
||||||
municipality TEXT,
|
|
||||||
region TEXT,
|
|
||||||
country TEXT,
|
|
||||||
country_code TEXT,
|
|
||||||
continent TEXT,
|
|
||||||
iata_code TEXT,
|
|
||||||
icao_code TEXT,
|
|
||||||
local_code TEXT,
|
|
||||||
gps_code TEXT,
|
|
||||||
latitude REAL,
|
|
||||||
longitude REAL,
|
|
||||||
elevation_ft INTEGER,
|
|
||||||
scheduled_service BOOLEAN DEFAULT 0,
|
|
||||||
home_link TEXT,
|
|
||||||
wikipedia_link TEXT,
|
|
||||||
keywords TEXT,
|
|
||||||
timezone_offset REAL,
|
|
||||||
timezone TEXT,
|
|
||||||
dst_type TEXT,
|
|
||||||
data_source TEXT NOT NULL DEFAULT 'unknown',
|
|
||||||
source_id TEXT,
|
|
||||||
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
**Indexes:**
|
|
||||||
- `idx_airports_icao_code`: ICAO code lookup
|
|
||||||
- `idx_airports_iata_code`: IATA code lookup
|
|
||||||
- `idx_airports_ident`: Airport identifier lookup
|
|
||||||
- `idx_airports_country_code`: Country-based filtering
|
|
||||||
- `idx_airports_type`: Airport type filtering
|
|
||||||
- `idx_airports_coords`: Geographic coordinate queries
|
|
||||||
- `idx_airports_source`: Data source tracking
|
|
||||||
|
|
||||||
#### `callsign_cache`
|
|
||||||
Caches external API lookups and local enrichment for callsign enhancement:
|
|
||||||
```sql
|
|
||||||
CREATE TABLE callsign_cache (
|
|
||||||
callsign TEXT PRIMARY KEY,
|
|
||||||
airline_icao TEXT,
|
|
||||||
airline_iata TEXT,
|
|
||||||
airline_name TEXT,
|
|
||||||
airline_country TEXT,
|
|
||||||
flight_number TEXT,
|
|
||||||
origin_iata TEXT, -- Departure airport IATA code
|
|
||||||
destination_iata TEXT, -- Arrival airport IATA code
|
|
||||||
aircraft_type TEXT,
|
|
||||||
route TEXT, -- Full route description
|
|
||||||
status TEXT, -- Flight status (scheduled, delayed, etc.)
|
|
||||||
source TEXT NOT NULL DEFAULT 'local',
|
|
||||||
cached_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
expires_at TIMESTAMP NOT NULL
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
**Route Information Fields:**
|
|
||||||
- **`origin_iata`**: IATA code of departure airport (e.g., "JFK" for New York JFK)
|
|
||||||
- **`destination_iata`**: IATA code of arrival airport (e.g., "LAX" for Los Angeles)
|
|
||||||
- **`route`**: Human-readable route description (e.g., "JFK-LAX" or "New York to Los Angeles")
|
|
||||||
- **`status`**: Current flight status when available from external APIs
|
|
||||||
|
|
||||||
These fields enable enhanced flight tracking with origin-destination pairs and route visualization.
|
|
||||||
|
|
||||||
**Indexes:**
|
|
||||||
- `idx_callsign_cache_expires`: Efficient cache cleanup
|
|
||||||
- `idx_callsign_cache_airline`: Airline-based queries
|
|
||||||
|
|
||||||
#### `data_sources`
|
|
||||||
Tracks loaded external data sources and their metadata:
|
|
||||||
```sql
|
|
||||||
CREATE TABLE data_sources (
|
|
||||||
name TEXT PRIMARY KEY,
|
|
||||||
license TEXT NOT NULL,
|
|
||||||
url TEXT,
|
|
||||||
version TEXT,
|
|
||||||
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
record_count INTEGER DEFAULT 0,
|
|
||||||
user_accepted_license BOOLEAN DEFAULT 0
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
## Database Location Strategy
|
|
||||||
|
|
||||||
### Path Resolution Order
|
|
||||||
1. **Explicit configuration**: `database.path` in config file
|
|
||||||
2. **System service**: `/var/lib/skyview/skyview.db`
|
|
||||||
3. **User mode**: `~/.local/share/skyview/skyview.db`
|
|
||||||
4. **Fallback**: `./skyview.db` in current directory
|
|
||||||
|
|
||||||
### Directory Permissions
|
|
||||||
- System: `root:root` with `755` permissions for `/var/lib/skyview/`
|
|
||||||
- User: User-owned directories with standard permissions
|
|
||||||
- Service: `skyview:skyview` user/group for system service
|
|
||||||
|
|
||||||
## Migration System
|
|
||||||
|
|
||||||
### Migration Structure
|
|
||||||
```go
|
|
||||||
type Migration struct {
|
|
||||||
Version int // Sequential version number
|
|
||||||
Description string // Human-readable description
|
|
||||||
Up string // SQL for applying migration
|
|
||||||
Down string // SQL for rollback (optional)
|
|
||||||
DataLoss bool // Warning flag for destructive changes
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Migration Process
|
|
||||||
1. **Version Check**: Compare current schema version with available migrations
|
|
||||||
2. **Backup**: Create automatic backup before destructive changes
|
|
||||||
3. **Transaction**: Wrap each migration in atomic transaction
|
|
||||||
4. **Validation**: Verify schema integrity after migration
|
|
||||||
5. **Logging**: Record successful migrations in `schema_info`
|
|
||||||
|
|
||||||
### Data Loss Protection
|
|
||||||
- Migrations marked with `DataLoss: true` require explicit user consent
|
|
||||||
- Automatic backups created before destructive operations
|
|
||||||
- Warning messages displayed during upgrade process
|
|
||||||
- Rollback SQL provided where possible
|
|
||||||
|
|
||||||
### Example Migration Sequence
|
|
||||||
```go
|
|
||||||
var migrations = []Migration{
|
|
||||||
{
|
|
||||||
Version: 1,
|
|
||||||
Description: "Initial schema with aircraft history",
|
|
||||||
Up: createInitialSchema,
|
|
||||||
DataLoss: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Version: 2,
|
|
||||||
Description: "Add OpenFlights airline and airport data",
|
|
||||||
Up: addAviationTables,
|
|
||||||
DataLoss: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Version: 3,
|
|
||||||
Description: "Add callsign lookup cache",
|
|
||||||
Up: addCallsignCache,
|
|
||||||
DataLoss: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Data Sources and Loading
|
|
||||||
|
|
||||||
SkyView supports multiple aviation data sources with automatic conflict resolution and license compliance.
|
|
||||||
|
|
||||||
### Supported Data Sources
|
|
||||||
|
|
||||||
#### OpenFlights Airlines Database
|
|
||||||
- **Source**: https://openflights.org/data.html
|
|
||||||
- **License**: Open Database License (ODbL) 1.0
|
|
||||||
- **Content**: Global airline data with ICAO/IATA codes, callsigns, and country information
|
|
||||||
- **Records**: ~6,162 airlines
|
|
||||||
- **Update Method**: Runtime download (no license confirmation required)
|
|
||||||
|
|
||||||
#### OpenFlights Airports Database
|
|
||||||
- **Source**: https://openflights.org/data.html
|
|
||||||
- **License**: Open Database License (ODbL) 1.0
|
|
||||||
- **Content**: Global airport data with coordinates, codes, and metadata
|
|
||||||
- **Records**: ~7,698 airports
|
|
||||||
- **Update Method**: Runtime download
|
|
||||||
|
|
||||||
#### OurAirports Database
|
|
||||||
- **Source**: https://ourairports.com/data/
|
|
||||||
- **License**: Creative Commons Zero (CC0) 1.0
|
|
||||||
- **Content**: Comprehensive airport database with detailed metadata
|
|
||||||
- **Records**: ~83,557 airports
|
|
||||||
- **Update Method**: Runtime download
|
|
||||||
|
|
||||||
### Data Loading System
|
|
||||||
|
|
||||||
#### Intelligent Conflict Resolution
|
|
||||||
The data loading system uses **INSERT OR REPLACE** upserts to handle overlapping data:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
INSERT OR REPLACE INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
||||||
```
|
|
||||||
|
|
||||||
This ensures that:
|
|
||||||
- Duplicate records are automatically updated rather than causing errors
|
|
||||||
- Later data sources can override earlier ones
|
|
||||||
- Database integrity is maintained during bulk loads
|
|
||||||
|
|
||||||
#### Loading Process
|
|
||||||
1. **Source Validation**: Verify data source accessibility and format
|
|
||||||
2. **Incremental Processing**: Process data in chunks to manage memory
|
|
||||||
3. **Error Handling**: Log and continue on individual record errors
|
|
||||||
4. **Statistics Reporting**: Track records processed, added, and errors
|
|
||||||
5. **Source Tracking**: Record metadata about each loaded source
|
|
||||||
|
|
||||||
#### Performance Characteristics
|
|
||||||
- **OpenFlights Airlines**: ~6,162 records in ~363ms
|
|
||||||
- **OpenFlights Airports**: ~7,698 records in ~200ms
|
|
||||||
- **OurAirports**: ~83,557 records in ~980ms
|
|
||||||
- **Error Rate**: <0.1% under normal conditions
|
|
||||||
|
|
||||||
## Configuration Integration
|
|
||||||
|
|
||||||
### Database Configuration
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"database": {
|
|
||||||
"path": "/var/lib/skyview-adsb/skyview.db",
|
|
||||||
"max_history_days": 7,
|
|
||||||
"backup_on_upgrade": true,
|
|
||||||
"vacuum_interval": "24h",
|
|
||||||
"page_size": 4096
|
|
||||||
},
|
|
||||||
"callsign": {
|
|
||||||
"enabled": true,
|
|
||||||
"cache_hours": 24,
|
|
||||||
"external_apis": true,
|
|
||||||
"privacy_mode": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Configuration Fields
|
|
||||||
|
|
||||||
#### `database`
|
|
||||||
- **`path`**: Database file location (empty = auto-resolve)
|
|
||||||
- **`max_history_days`**: Retention policy for aircraft history (0 = unlimited)
|
|
||||||
- **`backup_on_upgrade`**: Create backup before schema migrations
|
|
||||||
|
|
||||||
#### `callsign`
|
|
||||||
- **`enabled`**: Enable callsign enhancement features
|
|
||||||
- **`cache_hours`**: TTL for cached external API results
|
|
||||||
- **`privacy_mode`**: Disable all external data requests
|
|
||||||
- **`sources`**: Independent control for each data source
|
|
||||||
|
|
||||||
### Enhanced Configuration Example
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"callsign": {
|
|
||||||
"enabled": true,
|
|
||||||
"cache_hours": 24,
|
|
||||||
"privacy_mode": false,
|
|
||||||
"sources": {
|
|
||||||
"openflights_embedded": {
|
|
||||||
"enabled": true,
|
|
||||||
"priority": 1,
|
|
||||||
"license": "AGPL-3.0"
|
|
||||||
},
|
|
||||||
"faa_registry": {
|
|
||||||
"enabled": false,
|
|
||||||
"priority": 2,
|
|
||||||
"update_frequency": "weekly",
|
|
||||||
"license": "public_domain"
|
|
||||||
},
|
|
||||||
"opensky_api": {
|
|
||||||
"enabled": false,
|
|
||||||
"priority": 3,
|
|
||||||
"timeout_seconds": 5,
|
|
||||||
"max_retries": 2,
|
|
||||||
"requires_consent": true,
|
|
||||||
"license_warning": "Commercial use requires OpenSky Network consent",
|
|
||||||
"user_accepts_terms": false
|
|
||||||
},
|
|
||||||
"custom_database": {
|
|
||||||
"enabled": false,
|
|
||||||
"priority": 4,
|
|
||||||
"path": "",
|
|
||||||
"license": "user_verified"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"fallback_chain": ["openflights_embedded", "faa_registry", "opensky_api", "custom_database"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Individual Source Configuration Options
|
|
||||||
- **`enabled`**: Enable/disable this specific source
|
|
||||||
- **`priority`**: Processing order (lower numbers = higher priority)
|
|
||||||
- **`license`**: License type for compliance tracking
|
|
||||||
- **`requires_consent`**: Whether source requires explicit user consent
|
|
||||||
- **`user_accepts_terms`**: User acknowledgment of licensing terms
|
|
||||||
- **`timeout_seconds`**: Per-source timeout configuration
|
|
||||||
- **`max_retries`**: Per-source retry limits
|
|
||||||
- **`update_frequency`**: For downloadable sources (daily/weekly/monthly)
|
|
||||||
|
|
||||||
## Debian Package Integration
|
|
||||||
|
|
||||||
### Package Structure
|
|
||||||
```
|
|
||||||
/var/lib/skyview/ # Database directory
|
|
||||||
/etc/skyview/config.json # Default configuration
|
|
||||||
/usr/bin/skyview # Main application
|
|
||||||
/usr/share/skyview/ # Embedded resources
|
|
||||||
```
|
|
||||||
|
|
||||||
### Installation Process
|
|
||||||
1. **`postinst`**: Create directories, user accounts, permissions
|
|
||||||
2. **First Run**: Database initialization and migration on startup
|
|
||||||
3. **Upgrades**: Automatic schema migration with backup
|
|
||||||
4. **Service**: Systemd integration with proper database access
|
|
||||||
|
|
||||||
### Service User
|
|
||||||
- User: `skyview-adsb`
|
|
||||||
- Home: `/var/lib/skyview-adsb`
|
|
||||||
- Shell: `/bin/false` (service account)
|
|
||||||
- Database: Read/write access to `/var/lib/skyview-adsb/`
|
|
||||||
|
|
||||||
### Automatic Database Updates
|
|
||||||
The systemd service configuration includes automatic database updates on startup:
|
|
||||||
|
|
||||||
```ini
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
User=skyview-adsb
|
|
||||||
Group=skyview-adsb
|
|
||||||
# Update database before starting main service
|
|
||||||
ExecStartPre=/usr/bin/skyview-data -config /etc/skyview-adsb/config.json update
|
|
||||||
TimeoutStartSec=300
|
|
||||||
ExecStart=/usr/bin/skyview -config /etc/skyview-adsb/config.json
|
|
||||||
```
|
|
||||||
|
|
||||||
This ensures aviation data sources are refreshed before each service start, complementing the weekly timer-based updates.
|
|
||||||
|
|
||||||
## Data Retention and Cleanup
|
|
||||||
|
|
||||||
### Automatic Cleanup
|
|
||||||
- **Aircraft History**: Configurable retention period (`max_history_days`)
|
|
||||||
- **Cache Expiration**: TTL-based cleanup of external API cache
|
|
||||||
- **Optimization**: Periodic VACUUM operations for storage efficiency
|
|
||||||
|
|
||||||
### Manual Maintenance
|
|
||||||
```sql
|
|
||||||
-- Clean old aircraft history (example: 7 days)
|
|
||||||
DELETE FROM aircraft_history
|
|
||||||
WHERE timestamp < datetime('now', '-7 days');
|
|
||||||
|
|
||||||
-- Clean expired cache entries
|
|
||||||
DELETE FROM callsign_cache
|
|
||||||
WHERE expires_at < datetime('now');
|
|
||||||
|
|
||||||
-- Optimize database storage
|
|
||||||
VACUUM;
|
|
||||||
```
|
|
||||||
|
|
||||||
## Database Optimization
|
|
||||||
|
|
||||||
SkyView includes a comprehensive database optimization system that automatically manages storage efficiency and performance.
|
|
||||||
|
|
||||||
### Optimization Features
|
|
||||||
|
|
||||||
#### Automatic VACUUM Operations
|
|
||||||
- **Full VACUUM**: Rebuilds database to reclaim deleted space
|
|
||||||
- **Incremental VACUUM**: Gradual space reclamation with minimal performance impact
|
|
||||||
- **Scheduled Maintenance**: Configurable intervals for automatic optimization
|
|
||||||
- **Size Reporting**: Before/after statistics with space savings metrics
|
|
||||||
|
|
||||||
#### Storage Optimization
|
|
||||||
- **Page Size Optimization**: Configurable SQLite page size for optimal performance
|
|
||||||
- **Auto-Vacuum Configuration**: Enables incremental space reclamation
|
|
||||||
- **Statistics Updates**: ANALYZE operations for query plan optimization
|
|
||||||
- **Efficiency Monitoring**: Real-time storage efficiency reporting
|
|
||||||
|
|
||||||
### Using the Optimization System
|
|
||||||
|
|
||||||
#### Command Line Interface
|
|
||||||
```bash
|
```bash
|
||||||
# Run comprehensive database optimization
|
skyview-data status
|
||||||
skyview-data optimize
|
|
||||||
|
|
||||||
# Run with force flag to skip confirmation prompts
|
|
||||||
skyview-data optimize --force
|
|
||||||
|
|
||||||
# Check current optimization statistics
|
|
||||||
skyview-data optimize --stats-only
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Optimization Output Example
|
### 2. Import Safe Data (Recommended)
|
||||||
```
|
```bash
|
||||||
Optimizing database for storage efficiency...
|
# Import public domain sources automatically
|
||||||
✓ Auto VACUUM: Enable incremental auto-vacuum
|
skyview-data update
|
||||||
✓ Incremental VACUUM: Reclaim free pages incrementally
|
|
||||||
✓ Optimize: Update SQLite query planner statistics
|
|
||||||
✓ Analyze: Update table statistics for better query plans
|
|
||||||
|
|
||||||
VACUUM completed in 1.2s: 275.3 MB → 263.1 MB (saved 12.2 MB, 4.4%)
|
|
||||||
|
|
||||||
Database optimization completed successfully.
|
|
||||||
Storage efficiency: 96.8% (263.1 MB used of 272.4 MB allocated)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Configuration Options
|
### 3. Enable Automatic Updates (Optional)
|
||||||
```json
|
```bash
|
||||||
{
|
# Weekly updates on Sunday at 3 AM
|
||||||
"database": {
|
sudo systemctl enable --now skyview-database-update.timer
|
||||||
"vacuum_interval": "24h",
|
|
||||||
"page_size": 4096,
|
|
||||||
"enable_compression": true,
|
|
||||||
"compression_level": 6
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Optimization Statistics
|
## Available Data Sources
|
||||||
|
|
||||||
The optimization system provides detailed metrics about database performance:
|
### Safe Sources (Public Domain)
|
||||||
|
These sources are imported automatically with `skyview-data update`:
|
||||||
|
- **OurAirports**: Comprehensive airport database (public domain)
|
||||||
|
- **FAA Registry**: US aircraft registration data (public domain)
|
||||||
|
|
||||||
#### Available Statistics
|
### License-Required Sources
|
||||||
- **Database Size**: Total file size in bytes
|
These require explicit acceptance:
|
||||||
- **Page Statistics**: Page size, count, and utilization
|
- **OpenFlights**: Airline and airport data (AGPL-3.0 license)
|
||||||
- **Storage Efficiency**: Percentage of allocated space actually used
|
|
||||||
- **Free Space**: Amount of reclaimable space available
|
|
||||||
- **Auto-Vacuum Status**: Current auto-vacuum configuration
|
|
||||||
- **Last Optimization**: Timestamp of most recent optimization
|
|
||||||
|
|
||||||
#### Programmatic Access
|
## Commands
|
||||||
```go
|
|
||||||
// Get current optimization statistics
|
|
||||||
optimizer := NewOptimizationManager(db, config)
|
|
||||||
stats, err := optimizer.GetOptimizationStats()
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal("Failed to get stats:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Printf("Database efficiency: %.1f%%\n", stats.Efficiency)
|
### Basic Operations
|
||||||
fmt.Printf("Storage used: %.1f MB\n", float64(stats.DatabaseSize)/(1024*1024))
|
```bash
|
||||||
|
skyview-data list # Show available sources
|
||||||
|
skyview-data status # Show database status
|
||||||
|
skyview-data update # Update safe sources
|
||||||
|
skyview-data import openflights # Import licensed source
|
||||||
|
skyview-data clear <source> # Remove source data
|
||||||
```
|
```
|
||||||
|
|
||||||
## Performance Considerations
|
### Systemd Timer Management
|
||||||
|
```bash
|
||||||
|
# Enable weekly automatic updates
|
||||||
|
systemctl enable skyview-database-update.timer
|
||||||
|
systemctl start skyview-database-update.timer
|
||||||
|
|
||||||
### Query Optimization
|
# Check timer status
|
||||||
- Time-range queries use `idx_aircraft_history_icao_time`
|
systemctl status skyview-database-update.timer
|
||||||
- Callsign lookups prioritize local cache over external APIs
|
|
||||||
- Bulk operations use transactions for consistency
|
|
||||||
|
|
||||||
### Storage Efficiency
|
# View update logs
|
||||||
- Configurable history limits prevent unbounded growth
|
journalctl -u skyview-database-update.service
|
||||||
- Automatic VACUUM operations with optimization reporting
|
|
||||||
- Compressed timestamps and efficient data types
|
|
||||||
- Page size optimization for storage efficiency
|
|
||||||
- Auto-vacuum configuration for incremental space reclamation
|
|
||||||
|
|
||||||
### Memory Usage
|
# Disable automatic updates
|
||||||
- WAL mode for concurrent read/write access
|
systemctl disable skyview-database-update.timer
|
||||||
- Connection pooling for multiple goroutines
|
|
||||||
- Prepared statements for repeated queries
|
|
||||||
|
|
||||||
## Privacy and Security
|
|
||||||
|
|
||||||
### Privacy Mode
|
|
||||||
SkyView includes comprehensive privacy controls through the `privacy_mode` configuration option:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"callsign": {
|
|
||||||
"enabled": true,
|
|
||||||
"privacy_mode": true,
|
|
||||||
"external_apis": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Privacy Mode Features
|
## License Compliance
|
||||||
- **No External Calls**: Completely disables all external API requests
|
|
||||||
- **Local-Only Lookups**: Uses only embedded OpenFlights database for callsign enhancement
|
|
||||||
- **No Data Transmission**: Aircraft data never leaves the local system
|
|
||||||
- **Compliance**: Suitable for sensitive environments requiring air-gapped operation
|
|
||||||
|
|
||||||
#### Privacy Mode Behavior
|
SkyView maintains strict license separation:
|
||||||
| Feature | Privacy Mode ON | Privacy Mode OFF |
|
- **SkyView binary**: Contains no external data (stays MIT licensed)
|
||||||
|---------|----------------|------------------|
|
- **Runtime import**: Users choose which sources to import
|
||||||
| External API calls | ❌ Disabled | ✅ Configurable |
|
- **Safe defaults**: Only public domain sources updated automatically
|
||||||
| OpenFlights lookup | ✅ Enabled | ✅ Enabled |
|
- **User choice**: Each person decides their own license compatibility
|
||||||
| Callsign caching | ✅ Local only | ✅ Full caching |
|
|
||||||
| Data transmission | ❌ None | ⚠️ API calls only |
|
|
||||||
|
|
||||||
#### Use Cases for Privacy Mode
|
|
||||||
- **Military installations**: No external data transmission allowed
|
|
||||||
- **Air-gapped networks**: No internet connectivity available
|
|
||||||
- **Corporate policies**: External API usage prohibited
|
|
||||||
- **Personal privacy**: User preference for local-only operation
|
|
||||||
|
|
||||||
### Security Considerations
|
|
||||||
|
|
||||||
#### File Permissions
|
|
||||||
- Database files readable only by skyview user/group
|
|
||||||
- Configuration files protected from unauthorized access
|
|
||||||
- Backup files inherit secure permissions
|
|
||||||
|
|
||||||
#### Data Protection
|
|
||||||
- Local SQLite database with file-system level security
|
|
||||||
- No cloud storage or external database dependencies
|
|
||||||
- All aviation data processed and stored locally
|
|
||||||
|
|
||||||
#### Network Security
|
|
||||||
- External API calls (when enabled) use HTTPS only
|
|
||||||
- No persistent connections to external services
|
|
||||||
- Optional certificate validation for API endpoints
|
|
||||||
|
|
||||||
### Data Integrity
|
|
||||||
- Foreign key constraints where applicable
|
|
||||||
- Transaction isolation for concurrent operations
|
|
||||||
- Checksums for migration verification
|
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
### Common Issues
|
### Check Service Status
|
||||||
|
|
||||||
#### Database Locked
|
|
||||||
```
|
|
||||||
Error: database is locked
|
|
||||||
```
|
|
||||||
**Solution**: Stop SkyView service, check for stale lock files, restart
|
|
||||||
|
|
||||||
#### Migration Failures
|
|
||||||
```
|
|
||||||
Error: migration 3 failed: table already exists
|
|
||||||
```
|
|
||||||
**Solution**: Check schema version, restore from backup, retry migration
|
|
||||||
|
|
||||||
#### Permission Denied
|
|
||||||
```
|
|
||||||
Error: unable to open database file
|
|
||||||
```
|
|
||||||
**Solution**: Verify file permissions, check directory ownership, ensure disk space
|
|
||||||
|
|
||||||
### Diagnostic Commands
|
|
||||||
```bash
|
```bash
|
||||||
# Check database integrity
|
systemctl status skyview-database-update.timer
|
||||||
sqlite3 /var/lib/skyview/skyview.db "PRAGMA integrity_check;"
|
journalctl -u skyview-database-update.service -f
|
||||||
|
|
||||||
# View schema version
|
|
||||||
sqlite3 /var/lib/skyview/skyview.db "SELECT * FROM schema_info;"
|
|
||||||
|
|
||||||
# Database statistics
|
|
||||||
sqlite3 /var/lib/skyview/skyview.db ".dbinfo"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Testing and Quality Assurance
|
### Manual Database Reset
|
||||||
|
|
||||||
SkyView includes comprehensive test coverage for all database functionality to ensure reliability and data integrity.
|
|
||||||
|
|
||||||
### Test Coverage Areas
|
|
||||||
|
|
||||||
#### Core Database Functionality
|
|
||||||
- **Database Creation and Initialization**: Connection management, configuration handling
|
|
||||||
- **Migration System**: Schema versioning, upgrade/downgrade operations
|
|
||||||
- **Connection Pooling**: Concurrent access, connection lifecycle management
|
|
||||||
- **SQLite Pragma Settings**: WAL mode, foreign keys, performance optimizations
|
|
||||||
|
|
||||||
#### Data Loading and Management
|
|
||||||
- **Multi-Source Loading**: OpenFlights, OurAirports data integration
|
|
||||||
- **Conflict Resolution**: Upsert operations, duplicate handling
|
|
||||||
- **Error Handling**: Network failures, malformed data recovery
|
|
||||||
- **Performance Validation**: Loading speed, memory usage optimization
|
|
||||||
|
|
||||||
#### Callsign Enhancement System
|
|
||||||
- **Parsing Logic**: Callsign validation, airline code extraction
|
|
||||||
- **Database Integration**: Local lookups, caching operations
|
|
||||||
- **Search Functionality**: Airline filtering, country-based queries
|
|
||||||
- **Cache Management**: TTL handling, cleanup operations
|
|
||||||
|
|
||||||
#### Optimization System
|
|
||||||
- **VACUUM Operations**: Space reclamation, performance monitoring
|
|
||||||
- **Page Size Optimization**: Configuration validation, storage efficiency
|
|
||||||
- **Statistics Generation**: Metrics accuracy, reporting consistency
|
|
||||||
- **Maintenance Scheduling**: Automated optimization, interval management
|
|
||||||
|
|
||||||
### Test Infrastructure
|
|
||||||
|
|
||||||
#### Automated Test Setup
|
|
||||||
```go
|
|
||||||
// setupTestDatabase creates isolated test environment
|
|
||||||
func setupTestDatabase(t *testing.T) (*Database, func()) {
|
|
||||||
tempFile, _ := os.CreateTemp("", "test_skyview_*.db")
|
|
||||||
config := &Config{Path: tempFile.Name()}
|
|
||||||
db, _ := NewDatabase(config)
|
|
||||||
db.Initialize() // Run all migrations
|
|
||||||
|
|
||||||
cleanup := func() {
|
|
||||||
db.Close()
|
|
||||||
os.Remove(tempFile.Name())
|
|
||||||
}
|
|
||||||
return db, cleanup
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Network-Safe Testing
|
|
||||||
Tests gracefully handle network connectivity issues:
|
|
||||||
- Skip tests requiring external data sources when offline
|
|
||||||
- Provide meaningful error messages for connectivity failures
|
|
||||||
- Use local test data when external sources are unavailable
|
|
||||||
|
|
||||||
### Running Tests
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Run all database tests
|
systemctl stop skyview-database-update.timer
|
||||||
go test -v ./internal/database/...
|
skyview-data reset --force
|
||||||
|
skyview-data update
|
||||||
# Run tests in short mode (skip long-running network tests)
|
systemctl start skyview-database-update.timer
|
||||||
go test -v -short ./internal/database/...
|
|
||||||
|
|
||||||
# Run specific test categories
|
|
||||||
go test -v -run="TestDatabase" ./internal/database/...
|
|
||||||
go test -v -run="TestOptimization" ./internal/database/...
|
|
||||||
go test -v -run="TestCallsign" ./internal/database/...
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Future Enhancements
|
### Permissions Issues
|
||||||
|
```bash
|
||||||
|
sudo chown skyview:skyview /var/lib/skyview/
|
||||||
|
sudo chmod 755 /var/lib/skyview/
|
||||||
|
```
|
||||||
|
|
||||||
### Planned Features
|
## Files and Directories
|
||||||
- **Compression**: Time-series compression for long-term storage
|
|
||||||
- **Partitioning**: Date-based partitioning for large datasets
|
|
||||||
- **Replication**: Read replica support for high-availability setups
|
|
||||||
- **Analytics**: Built-in reporting and statistics tables
|
|
||||||
- **Enhanced Route Data**: Integration with additional flight tracking APIs
|
|
||||||
- **Geographic Indexing**: Spatial queries for airport proximity searches
|
|
||||||
|
|
||||||
### Migration Path
|
- `/usr/bin/skyview-data` - Database management command
|
||||||
- All enhancements will use versioned migrations
|
- `/var/lib/skyview/skyview.db` - Database file
|
||||||
- Backward compatibility maintained for existing installations
|
- `/usr/share/skyview/scripts/update-database.sh` - Cron helper script
|
||||||
- Data preservation prioritized over schema optimization
|
- `/lib/systemd/system/skyview-database-update.*` - Systemd timer files
|
||||||
- Comprehensive testing required for all schema changes
|
|
||||||
|
For detailed information, see `man skyview-data`.
|
||||||
345
docs/DATABASE.md
345
docs/DATABASE.md
|
|
@ -49,7 +49,7 @@ Stores time-series aircraft position and message data:
|
||||||
```sql
|
```sql
|
||||||
CREATE TABLE aircraft_history (
|
CREATE TABLE aircraft_history (
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
icao TEXT NOT NULL,
|
icao_hex TEXT NOT NULL,
|
||||||
timestamp TIMESTAMP NOT NULL,
|
timestamp TIMESTAMP NOT NULL,
|
||||||
latitude REAL,
|
latitude REAL,
|
||||||
longitude REAL,
|
longitude REAL,
|
||||||
|
|
@ -59,8 +59,9 @@ CREATE TABLE aircraft_history (
|
||||||
vertical_rate INTEGER,
|
vertical_rate INTEGER,
|
||||||
squawk TEXT,
|
squawk TEXT,
|
||||||
callsign TEXT,
|
callsign TEXT,
|
||||||
source_id TEXT NOT NULL,
|
source_id TEXT,
|
||||||
signal_strength REAL
|
signal_strength REAL,
|
||||||
|
message_count INTEGER DEFAULT 1
|
||||||
);
|
);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -70,123 +71,66 @@ CREATE TABLE aircraft_history (
|
||||||
- `idx_aircraft_history_callsign`: Callsign-based searches
|
- `idx_aircraft_history_callsign`: Callsign-based searches
|
||||||
|
|
||||||
#### `airlines`
|
#### `airlines`
|
||||||
Multi-source airline database with unified schema:
|
OpenFlights embedded airline database:
|
||||||
```sql
|
```sql
|
||||||
CREATE TABLE airlines (
|
CREATE TABLE airlines (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
alias TEXT,
|
alias TEXT,
|
||||||
iata_code TEXT,
|
iata TEXT,
|
||||||
icao_code TEXT,
|
icao TEXT,
|
||||||
callsign TEXT,
|
callsign TEXT,
|
||||||
country TEXT,
|
country TEXT,
|
||||||
country_code TEXT,
|
active BOOLEAN DEFAULT 1
|
||||||
active BOOLEAN DEFAULT 1,
|
|
||||||
data_source TEXT NOT NULL DEFAULT 'unknown',
|
|
||||||
source_id TEXT,
|
|
||||||
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
);
|
);
|
||||||
```
|
```
|
||||||
|
|
||||||
**Indexes:**
|
**Indexes:**
|
||||||
- `idx_airlines_icao_code`: ICAO code lookup (primary for callsign enhancement)
|
- `idx_airlines_icao`: ICAO code lookup (primary for callsign enhancement)
|
||||||
- `idx_airlines_iata_code`: IATA code lookup
|
- `idx_airlines_iata`: IATA code lookup
|
||||||
- `idx_airlines_callsign`: Radio callsign lookup
|
|
||||||
- `idx_airlines_country_code`: Country-based filtering
|
|
||||||
- `idx_airlines_active`: Active airlines filtering
|
|
||||||
- `idx_airlines_source`: Data source tracking
|
|
||||||
|
|
||||||
#### `airports`
|
#### `airports`
|
||||||
Multi-source airport database with comprehensive metadata:
|
OpenFlights embedded airport database:
|
||||||
```sql
|
```sql
|
||||||
CREATE TABLE airports (
|
CREATE TABLE airports (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
ident TEXT,
|
|
||||||
type TEXT,
|
|
||||||
city TEXT,
|
city TEXT,
|
||||||
municipality TEXT,
|
|
||||||
region TEXT,
|
|
||||||
country TEXT,
|
country TEXT,
|
||||||
country_code TEXT,
|
iata TEXT,
|
||||||
continent TEXT,
|
icao TEXT,
|
||||||
iata_code TEXT,
|
|
||||||
icao_code TEXT,
|
|
||||||
local_code TEXT,
|
|
||||||
gps_code TEXT,
|
|
||||||
latitude REAL,
|
latitude REAL,
|
||||||
longitude REAL,
|
longitude REAL,
|
||||||
elevation_ft INTEGER,
|
altitude INTEGER,
|
||||||
scheduled_service BOOLEAN DEFAULT 0,
|
|
||||||
home_link TEXT,
|
|
||||||
wikipedia_link TEXT,
|
|
||||||
keywords TEXT,
|
|
||||||
timezone_offset REAL,
|
timezone_offset REAL,
|
||||||
timezone TEXT,
|
|
||||||
dst_type TEXT,
|
dst_type TEXT,
|
||||||
data_source TEXT NOT NULL DEFAULT 'unknown',
|
timezone TEXT
|
||||||
source_id TEXT,
|
|
||||||
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
);
|
);
|
||||||
```
|
```
|
||||||
|
|
||||||
**Indexes:**
|
**Indexes:**
|
||||||
- `idx_airports_icao_code`: ICAO code lookup
|
- `idx_airports_icao`: ICAO code lookup
|
||||||
- `idx_airports_iata_code`: IATA code lookup
|
- `idx_airports_iata`: IATA code lookup
|
||||||
- `idx_airports_ident`: Airport identifier lookup
|
|
||||||
- `idx_airports_country_code`: Country-based filtering
|
|
||||||
- `idx_airports_type`: Airport type filtering
|
|
||||||
- `idx_airports_coords`: Geographic coordinate queries
|
|
||||||
- `idx_airports_source`: Data source tracking
|
|
||||||
|
|
||||||
#### `callsign_cache`
|
#### `callsign_cache`
|
||||||
Caches external API lookups and local enrichment for callsign enhancement:
|
Caches external API lookups for callsign enhancement:
|
||||||
```sql
|
```sql
|
||||||
CREATE TABLE callsign_cache (
|
CREATE TABLE callsign_cache (
|
||||||
callsign TEXT PRIMARY KEY,
|
callsign TEXT PRIMARY KEY,
|
||||||
airline_icao TEXT,
|
airline_icao TEXT,
|
||||||
airline_iata TEXT,
|
|
||||||
airline_name TEXT,
|
airline_name TEXT,
|
||||||
airline_country TEXT,
|
|
||||||
flight_number TEXT,
|
flight_number TEXT,
|
||||||
origin_iata TEXT, -- Departure airport IATA code
|
origin_iata TEXT,
|
||||||
destination_iata TEXT, -- Arrival airport IATA code
|
destination_iata TEXT,
|
||||||
aircraft_type TEXT,
|
aircraft_type TEXT,
|
||||||
route TEXT, -- Full route description
|
|
||||||
status TEXT, -- Flight status (scheduled, delayed, etc.)
|
|
||||||
source TEXT NOT NULL DEFAULT 'local',
|
|
||||||
cached_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
cached_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
expires_at TIMESTAMP NOT NULL
|
expires_at TIMESTAMP,
|
||||||
|
source TEXT DEFAULT 'local'
|
||||||
);
|
);
|
||||||
```
|
```
|
||||||
|
|
||||||
**Route Information Fields:**
|
|
||||||
- **`origin_iata`**: IATA code of departure airport (e.g., "JFK" for New York JFK)
|
|
||||||
- **`destination_iata`**: IATA code of arrival airport (e.g., "LAX" for Los Angeles)
|
|
||||||
- **`route`**: Human-readable route description (e.g., "JFK-LAX" or "New York to Los Angeles")
|
|
||||||
- **`status`**: Current flight status when available from external APIs
|
|
||||||
|
|
||||||
These fields enable enhanced flight tracking with origin-destination pairs and route visualization.
|
|
||||||
|
|
||||||
**Indexes:**
|
**Indexes:**
|
||||||
- `idx_callsign_cache_expires`: Efficient cache cleanup
|
- `idx_callsign_cache_expires`: Efficient cache cleanup
|
||||||
- `idx_callsign_cache_airline`: Airline-based queries
|
|
||||||
|
|
||||||
#### `data_sources`
|
|
||||||
Tracks loaded external data sources and their metadata:
|
|
||||||
```sql
|
|
||||||
CREATE TABLE data_sources (
|
|
||||||
name TEXT PRIMARY KEY,
|
|
||||||
license TEXT NOT NULL,
|
|
||||||
url TEXT,
|
|
||||||
version TEXT,
|
|
||||||
imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
record_count INTEGER DEFAULT 0,
|
|
||||||
user_accepted_license BOOLEAN DEFAULT 0
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
## Database Location Strategy
|
## Database Location Strategy
|
||||||
|
|
||||||
|
|
@ -251,72 +195,15 @@ var migrations = []Migration{
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Data Sources and Loading
|
|
||||||
|
|
||||||
SkyView supports multiple aviation data sources with automatic conflict resolution and license compliance.
|
|
||||||
|
|
||||||
### Supported Data Sources
|
|
||||||
|
|
||||||
#### OpenFlights Airlines Database
|
|
||||||
- **Source**: https://openflights.org/data.html
|
|
||||||
- **License**: Open Database License (ODbL) 1.0
|
|
||||||
- **Content**: Global airline data with ICAO/IATA codes, callsigns, and country information
|
|
||||||
- **Records**: ~6,162 airlines
|
|
||||||
- **Update Method**: Runtime download (no license confirmation required)
|
|
||||||
|
|
||||||
#### OpenFlights Airports Database
|
|
||||||
- **Source**: https://openflights.org/data.html
|
|
||||||
- **License**: Open Database License (ODbL) 1.0
|
|
||||||
- **Content**: Global airport data with coordinates, codes, and metadata
|
|
||||||
- **Records**: ~7,698 airports
|
|
||||||
- **Update Method**: Runtime download
|
|
||||||
|
|
||||||
#### OurAirports Database
|
|
||||||
- **Source**: https://ourairports.com/data/
|
|
||||||
- **License**: Creative Commons Zero (CC0) 1.0
|
|
||||||
- **Content**: Comprehensive airport database with detailed metadata
|
|
||||||
- **Records**: ~83,557 airports
|
|
||||||
- **Update Method**: Runtime download
|
|
||||||
|
|
||||||
### Data Loading System
|
|
||||||
|
|
||||||
#### Intelligent Conflict Resolution
|
|
||||||
The data loading system uses **INSERT OR REPLACE** upserts to handle overlapping data:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
INSERT OR REPLACE INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
||||||
```
|
|
||||||
|
|
||||||
This ensures that:
|
|
||||||
- Duplicate records are automatically updated rather than causing errors
|
|
||||||
- Later data sources can override earlier ones
|
|
||||||
- Database integrity is maintained during bulk loads
|
|
||||||
|
|
||||||
#### Loading Process
|
|
||||||
1. **Source Validation**: Verify data source accessibility and format
|
|
||||||
2. **Incremental Processing**: Process data in chunks to manage memory
|
|
||||||
3. **Error Handling**: Log and continue on individual record errors
|
|
||||||
4. **Statistics Reporting**: Track records processed, added, and errors
|
|
||||||
5. **Source Tracking**: Record metadata about each loaded source
|
|
||||||
|
|
||||||
#### Performance Characteristics
|
|
||||||
- **OpenFlights Airlines**: ~6,162 records in ~363ms
|
|
||||||
- **OpenFlights Airports**: ~7,698 records in ~200ms
|
|
||||||
- **OurAirports**: ~83,557 records in ~980ms
|
|
||||||
- **Error Rate**: <0.1% under normal conditions
|
|
||||||
|
|
||||||
## Configuration Integration
|
## Configuration Integration
|
||||||
|
|
||||||
### Database Configuration
|
### Database Configuration
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"database": {
|
"database": {
|
||||||
"path": "/var/lib/skyview-adsb/skyview.db",
|
"path": "/var/lib/skyview/skyview.db",
|
||||||
"max_history_days": 7,
|
"max_history_days": 7,
|
||||||
"backup_on_upgrade": true,
|
"backup_on_upgrade": true
|
||||||
"vacuum_interval": "24h",
|
|
||||||
"page_size": 4096
|
|
||||||
},
|
},
|
||||||
"callsign": {
|
"callsign": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
|
|
@ -407,26 +294,10 @@ This ensures that:
|
||||||
4. **Service**: Systemd integration with proper database access
|
4. **Service**: Systemd integration with proper database access
|
||||||
|
|
||||||
### Service User
|
### Service User
|
||||||
- User: `skyview-adsb`
|
- User: `skyview`
|
||||||
- Home: `/var/lib/skyview-adsb`
|
- Home: `/var/lib/skyview`
|
||||||
- Shell: `/bin/false` (service account)
|
- Shell: `/bin/false` (service account)
|
||||||
- Database: Read/write access to `/var/lib/skyview-adsb/`
|
- Database: Read/write access to `/var/lib/skyview/`
|
||||||
|
|
||||||
### Automatic Database Updates
|
|
||||||
The systemd service configuration includes automatic database updates on startup:
|
|
||||||
|
|
||||||
```ini
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
User=skyview-adsb
|
|
||||||
Group=skyview-adsb
|
|
||||||
# Update database before starting main service
|
|
||||||
ExecStartPre=/usr/bin/skyview-data -config /etc/skyview-adsb/config.json update
|
|
||||||
TimeoutStartSec=300
|
|
||||||
ExecStart=/usr/bin/skyview -config /etc/skyview-adsb/config.json
|
|
||||||
```
|
|
||||||
|
|
||||||
This ensures aviation data sources are refreshed before each service start, complementing the weekly timer-based updates.
|
|
||||||
|
|
||||||
## Data Retention and Cleanup
|
## Data Retention and Cleanup
|
||||||
|
|
||||||
|
|
@ -449,89 +320,6 @@ WHERE expires_at < datetime('now');
|
||||||
VACUUM;
|
VACUUM;
|
||||||
```
|
```
|
||||||
|
|
||||||
## Database Optimization
|
|
||||||
|
|
||||||
SkyView includes a comprehensive database optimization system that automatically manages storage efficiency and performance.
|
|
||||||
|
|
||||||
### Optimization Features
|
|
||||||
|
|
||||||
#### Automatic VACUUM Operations
|
|
||||||
- **Full VACUUM**: Rebuilds database to reclaim deleted space
|
|
||||||
- **Incremental VACUUM**: Gradual space reclamation with minimal performance impact
|
|
||||||
- **Scheduled Maintenance**: Configurable intervals for automatic optimization
|
|
||||||
- **Size Reporting**: Before/after statistics with space savings metrics
|
|
||||||
|
|
||||||
#### Storage Optimization
|
|
||||||
- **Page Size Optimization**: Configurable SQLite page size for optimal performance
|
|
||||||
- **Auto-Vacuum Configuration**: Enables incremental space reclamation
|
|
||||||
- **Statistics Updates**: ANALYZE operations for query plan optimization
|
|
||||||
- **Efficiency Monitoring**: Real-time storage efficiency reporting
|
|
||||||
|
|
||||||
### Using the Optimization System
|
|
||||||
|
|
||||||
#### Command Line Interface
|
|
||||||
```bash
|
|
||||||
# Run comprehensive database optimization
|
|
||||||
skyview-data optimize
|
|
||||||
|
|
||||||
# Run with force flag to skip confirmation prompts
|
|
||||||
skyview-data optimize --force
|
|
||||||
|
|
||||||
# Check current optimization statistics
|
|
||||||
skyview-data optimize --stats-only
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Optimization Output Example
|
|
||||||
```
|
|
||||||
Optimizing database for storage efficiency...
|
|
||||||
✓ Auto VACUUM: Enable incremental auto-vacuum
|
|
||||||
✓ Incremental VACUUM: Reclaim free pages incrementally
|
|
||||||
✓ Optimize: Update SQLite query planner statistics
|
|
||||||
✓ Analyze: Update table statistics for better query plans
|
|
||||||
|
|
||||||
VACUUM completed in 1.2s: 275.3 MB → 263.1 MB (saved 12.2 MB, 4.4%)
|
|
||||||
|
|
||||||
Database optimization completed successfully.
|
|
||||||
Storage efficiency: 96.8% (263.1 MB used of 272.4 MB allocated)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Configuration Options
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"database": {
|
|
||||||
"vacuum_interval": "24h",
|
|
||||||
"page_size": 4096,
|
|
||||||
"enable_compression": true,
|
|
||||||
"compression_level": 6
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Optimization Statistics
|
|
||||||
|
|
||||||
The optimization system provides detailed metrics about database performance:
|
|
||||||
|
|
||||||
#### Available Statistics
|
|
||||||
- **Database Size**: Total file size in bytes
|
|
||||||
- **Page Statistics**: Page size, count, and utilization
|
|
||||||
- **Storage Efficiency**: Percentage of allocated space actually used
|
|
||||||
- **Free Space**: Amount of reclaimable space available
|
|
||||||
- **Auto-Vacuum Status**: Current auto-vacuum configuration
|
|
||||||
- **Last Optimization**: Timestamp of most recent optimization
|
|
||||||
|
|
||||||
#### Programmatic Access
|
|
||||||
```go
|
|
||||||
// Get current optimization statistics
|
|
||||||
optimizer := NewOptimizationManager(db, config)
|
|
||||||
stats, err := optimizer.GetOptimizationStats()
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal("Failed to get stats:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Printf("Database efficiency: %.1f%%\n", stats.Efficiency)
|
|
||||||
fmt.Printf("Storage used: %.1f MB\n", float64(stats.DatabaseSize)/(1024*1024))
|
|
||||||
```
|
|
||||||
|
|
||||||
## Performance Considerations
|
## Performance Considerations
|
||||||
|
|
||||||
### Query Optimization
|
### Query Optimization
|
||||||
|
|
@ -541,10 +329,8 @@ fmt.Printf("Storage used: %.1f MB\n", float64(stats.DatabaseSize)/(1024*1024))
|
||||||
|
|
||||||
### Storage Efficiency
|
### Storage Efficiency
|
||||||
- Configurable history limits prevent unbounded growth
|
- Configurable history limits prevent unbounded growth
|
||||||
- Automatic VACUUM operations with optimization reporting
|
- Periodic VACUUM operations reclaim deleted space
|
||||||
- Compressed timestamps and efficient data types
|
- Compressed timestamps and efficient data types
|
||||||
- Page size optimization for storage efficiency
|
|
||||||
- Auto-vacuum configuration for incremental space reclamation
|
|
||||||
|
|
||||||
### Memory Usage
|
### Memory Usage
|
||||||
- WAL mode for concurrent read/write access
|
- WAL mode for concurrent read/write access
|
||||||
|
|
@ -642,76 +428,6 @@ sqlite3 /var/lib/skyview/skyview.db "SELECT * FROM schema_info;"
|
||||||
sqlite3 /var/lib/skyview/skyview.db ".dbinfo"
|
sqlite3 /var/lib/skyview/skyview.db ".dbinfo"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Testing and Quality Assurance
|
|
||||||
|
|
||||||
SkyView includes comprehensive test coverage for all database functionality to ensure reliability and data integrity.
|
|
||||||
|
|
||||||
### Test Coverage Areas
|
|
||||||
|
|
||||||
#### Core Database Functionality
|
|
||||||
- **Database Creation and Initialization**: Connection management, configuration handling
|
|
||||||
- **Migration System**: Schema versioning, upgrade/downgrade operations
|
|
||||||
- **Connection Pooling**: Concurrent access, connection lifecycle management
|
|
||||||
- **SQLite Pragma Settings**: WAL mode, foreign keys, performance optimizations
|
|
||||||
|
|
||||||
#### Data Loading and Management
|
|
||||||
- **Multi-Source Loading**: OpenFlights, OurAirports data integration
|
|
||||||
- **Conflict Resolution**: Upsert operations, duplicate handling
|
|
||||||
- **Error Handling**: Network failures, malformed data recovery
|
|
||||||
- **Performance Validation**: Loading speed, memory usage optimization
|
|
||||||
|
|
||||||
#### Callsign Enhancement System
|
|
||||||
- **Parsing Logic**: Callsign validation, airline code extraction
|
|
||||||
- **Database Integration**: Local lookups, caching operations
|
|
||||||
- **Search Functionality**: Airline filtering, country-based queries
|
|
||||||
- **Cache Management**: TTL handling, cleanup operations
|
|
||||||
|
|
||||||
#### Optimization System
|
|
||||||
- **VACUUM Operations**: Space reclamation, performance monitoring
|
|
||||||
- **Page Size Optimization**: Configuration validation, storage efficiency
|
|
||||||
- **Statistics Generation**: Metrics accuracy, reporting consistency
|
|
||||||
- **Maintenance Scheduling**: Automated optimization, interval management
|
|
||||||
|
|
||||||
### Test Infrastructure
|
|
||||||
|
|
||||||
#### Automated Test Setup
|
|
||||||
```go
|
|
||||||
// setupTestDatabase creates isolated test environment
|
|
||||||
func setupTestDatabase(t *testing.T) (*Database, func()) {
|
|
||||||
tempFile, _ := os.CreateTemp("", "test_skyview_*.db")
|
|
||||||
config := &Config{Path: tempFile.Name()}
|
|
||||||
db, _ := NewDatabase(config)
|
|
||||||
db.Initialize() // Run all migrations
|
|
||||||
|
|
||||||
cleanup := func() {
|
|
||||||
db.Close()
|
|
||||||
os.Remove(tempFile.Name())
|
|
||||||
}
|
|
||||||
return db, cleanup
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Network-Safe Testing
|
|
||||||
Tests gracefully handle network connectivity issues:
|
|
||||||
- Skip tests requiring external data sources when offline
|
|
||||||
- Provide meaningful error messages for connectivity failures
|
|
||||||
- Use local test data when external sources are unavailable
|
|
||||||
|
|
||||||
### Running Tests
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Run all database tests
|
|
||||||
go test -v ./internal/database/...
|
|
||||||
|
|
||||||
# Run tests in short mode (skip long-running network tests)
|
|
||||||
go test -v -short ./internal/database/...
|
|
||||||
|
|
||||||
# Run specific test categories
|
|
||||||
go test -v -run="TestDatabase" ./internal/database/...
|
|
||||||
go test -v -run="TestOptimization" ./internal/database/...
|
|
||||||
go test -v -run="TestCallsign" ./internal/database/...
|
|
||||||
```
|
|
||||||
|
|
||||||
## Future Enhancements
|
## Future Enhancements
|
||||||
|
|
||||||
### Planned Features
|
### Planned Features
|
||||||
|
|
@ -719,11 +435,8 @@ go test -v -run="TestCallsign" ./internal/database/...
|
||||||
- **Partitioning**: Date-based partitioning for large datasets
|
- **Partitioning**: Date-based partitioning for large datasets
|
||||||
- **Replication**: Read replica support for high-availability setups
|
- **Replication**: Read replica support for high-availability setups
|
||||||
- **Analytics**: Built-in reporting and statistics tables
|
- **Analytics**: Built-in reporting and statistics tables
|
||||||
- **Enhanced Route Data**: Integration with additional flight tracking APIs
|
|
||||||
- **Geographic Indexing**: Spatial queries for airport proximity searches
|
|
||||||
|
|
||||||
### Migration Path
|
### Migration Path
|
||||||
- All enhancements will use versioned migrations
|
- All enhancements will use versioned migrations
|
||||||
- Backward compatibility maintained for existing installations
|
- Backward compatibility maintained for existing installations
|
||||||
- Data preservation prioritized over schema optimization
|
- Data preservation prioritized over schema optimization
|
||||||
- Comprehensive testing required for all schema changes
|
|
||||||
2
go.mod
2
go.mod
|
|
@ -7,4 +7,4 @@ require (
|
||||||
github.com/gorilla/websocket v1.5.3
|
github.com/gorilla/websocket v1.5.3
|
||||||
)
|
)
|
||||||
|
|
||||||
require github.com/mattn/go-sqlite3 v1.14.32
|
require github.com/mattn/go-sqlite3 v1.14.32 // indirect
|
||||||
|
|
|
||||||
|
|
@ -43,11 +43,6 @@ type Config struct {
|
||||||
// Maintenance settings
|
// Maintenance settings
|
||||||
VacuumInterval time.Duration `json:"vacuum_interval"` // Default: 24 hours
|
VacuumInterval time.Duration `json:"vacuum_interval"` // Default: 24 hours
|
||||||
CleanupInterval time.Duration `json:"cleanup_interval"` // Default: 1 hour
|
CleanupInterval time.Duration `json:"cleanup_interval"` // Default: 1 hour
|
||||||
|
|
||||||
// Compression settings
|
|
||||||
EnableCompression bool `json:"enable_compression"` // Enable automatic compression
|
|
||||||
CompressionLevel int `json:"compression_level"` // Compression level (1-9, default: 6)
|
|
||||||
PageSize int `json:"page_size"` // SQLite page size (default: 4096)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// AircraftHistoryRecord represents a stored aircraft position update
|
// AircraftHistoryRecord represents a stored aircraft position update
|
||||||
|
|
|
||||||
|
|
@ -1,167 +0,0 @@
|
||||||
package database
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestNewDatabase(t *testing.T) {
|
|
||||||
// Create temporary database file
|
|
||||||
tempFile, err := os.CreateTemp("", "test_skyview_*.db")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to create temp database file:", err)
|
|
||||||
}
|
|
||||||
defer os.Remove(tempFile.Name())
|
|
||||||
tempFile.Close()
|
|
||||||
|
|
||||||
config := &Config{
|
|
||||||
Path: tempFile.Name(),
|
|
||||||
VacuumInterval: time.Hour,
|
|
||||||
}
|
|
||||||
|
|
||||||
db, err := NewDatabase(config)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to create database:", err)
|
|
||||||
}
|
|
||||||
defer db.Close()
|
|
||||||
|
|
||||||
if db == nil {
|
|
||||||
t.Fatal("NewDatabase() returned nil")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test connection
|
|
||||||
conn := db.GetConnection()
|
|
||||||
if conn == nil {
|
|
||||||
t.Fatal("GetConnection() returned nil")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test basic query
|
|
||||||
var result int
|
|
||||||
err = conn.QueryRow("SELECT 1").Scan(&result)
|
|
||||||
if err != nil {
|
|
||||||
t.Error("Basic query failed:", err)
|
|
||||||
}
|
|
||||||
if result != 1 {
|
|
||||||
t.Error("Basic query returned wrong result:", result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDatabaseClose(t *testing.T) {
|
|
||||||
tempFile, err := os.CreateTemp("", "test_skyview_*.db")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to create temp database file:", err)
|
|
||||||
}
|
|
||||||
defer os.Remove(tempFile.Name())
|
|
||||||
tempFile.Close()
|
|
||||||
|
|
||||||
config := &Config{Path: tempFile.Name()}
|
|
||||||
db, err := NewDatabase(config)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to create database:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Close should not error
|
|
||||||
if err := db.Close(); err != nil {
|
|
||||||
t.Error("Database Close() returned error:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Second close should be safe
|
|
||||||
if err := db.Close(); err != nil {
|
|
||||||
t.Error("Second Close() returned error:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Connection should be nil after close
|
|
||||||
conn := db.GetConnection()
|
|
||||||
if conn != nil {
|
|
||||||
t.Error("GetConnection() should return nil after Close()")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDatabaseConfig(t *testing.T) {
|
|
||||||
tempFile, err := os.CreateTemp("", "test_skyview_*.db")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to create temp database file:", err)
|
|
||||||
}
|
|
||||||
defer os.Remove(tempFile.Name())
|
|
||||||
tempFile.Close()
|
|
||||||
|
|
||||||
config := &Config{
|
|
||||||
Path: tempFile.Name(),
|
|
||||||
VacuumInterval: 2 * time.Hour,
|
|
||||||
}
|
|
||||||
|
|
||||||
db, err := NewDatabase(config)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to create database:", err)
|
|
||||||
}
|
|
||||||
defer db.Close()
|
|
||||||
|
|
||||||
// Test that config is stored correctly
|
|
||||||
if db.config != config {
|
|
||||||
t.Error("Database config not stored correctly")
|
|
||||||
}
|
|
||||||
if db.config.VacuumInterval != 2*time.Hour {
|
|
||||||
t.Error("VacuumInterval not preserved:", db.config.VacuumInterval)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDatabaseMigrations(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
conn := db.GetConnection()
|
|
||||||
|
|
||||||
// Check that essential tables exist after migrations
|
|
||||||
tables := []string{"airlines", "airports", "callsign_cache", "data_sources", "aircraft_history"}
|
|
||||||
for _, table := range tables {
|
|
||||||
var count int
|
|
||||||
query := "SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name=?"
|
|
||||||
err := conn.QueryRow(query, table).Scan(&count)
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("Failed to check for table %s: %v", table, err)
|
|
||||||
}
|
|
||||||
if count != 1 {
|
|
||||||
t.Errorf("Table %s does not exist", table)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDatabasePragmas(t *testing.T) {
|
|
||||||
tempFile, err := os.CreateTemp("", "test_skyview_*.db")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to create temp database file:", err)
|
|
||||||
}
|
|
||||||
defer os.Remove(tempFile.Name())
|
|
||||||
tempFile.Close()
|
|
||||||
|
|
||||||
config := &Config{Path: tempFile.Name()}
|
|
||||||
db, err := NewDatabase(config)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to create database:", err)
|
|
||||||
}
|
|
||||||
defer db.Close()
|
|
||||||
|
|
||||||
conn := db.GetConnection()
|
|
||||||
|
|
||||||
// Check that foreign keys are enabled
|
|
||||||
var foreignKeys int
|
|
||||||
err = conn.QueryRow("PRAGMA foreign_keys").Scan(&foreignKeys)
|
|
||||||
if err != nil {
|
|
||||||
t.Error("Failed to check foreign_keys pragma:", err)
|
|
||||||
}
|
|
||||||
if foreignKeys != 1 {
|
|
||||||
t.Error("Foreign keys should be enabled")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check journal mode
|
|
||||||
var journalMode string
|
|
||||||
err = conn.QueryRow("PRAGMA journal_mode").Scan(&journalMode)
|
|
||||||
if err != nil {
|
|
||||||
t.Error("Failed to check journal_mode:", err)
|
|
||||||
}
|
|
||||||
// Should be WAL mode for better concurrency
|
|
||||||
if journalMode != "wal" {
|
|
||||||
t.Errorf("Expected WAL journal mode, got: %s", journalMode)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -79,7 +79,7 @@ func GetAvailableDataSources() []DataSource {
|
||||||
Name: "OpenFlights Airlines",
|
Name: "OpenFlights Airlines",
|
||||||
License: "AGPL-3.0",
|
License: "AGPL-3.0",
|
||||||
URL: "https://raw.githubusercontent.com/jpatokal/openflights/master/data/airlines.dat",
|
URL: "https://raw.githubusercontent.com/jpatokal/openflights/master/data/airlines.dat",
|
||||||
RequiresConsent: false, // Runtime data consumption doesn't require explicit consent
|
RequiresConsent: true,
|
||||||
Format: "openflights",
|
Format: "openflights",
|
||||||
Version: "latest",
|
Version: "latest",
|
||||||
},
|
},
|
||||||
|
|
@ -87,7 +87,7 @@ func GetAvailableDataSources() []DataSource {
|
||||||
Name: "OpenFlights Airports",
|
Name: "OpenFlights Airports",
|
||||||
License: "AGPL-3.0",
|
License: "AGPL-3.0",
|
||||||
URL: "https://raw.githubusercontent.com/jpatokal/openflights/master/data/airports.dat",
|
URL: "https://raw.githubusercontent.com/jpatokal/openflights/master/data/airports.dat",
|
||||||
RequiresConsent: false, // Runtime data consumption doesn't require explicit consent
|
RequiresConsent: true,
|
||||||
Format: "openflights",
|
Format: "openflights",
|
||||||
Version: "latest",
|
Version: "latest",
|
||||||
},
|
},
|
||||||
|
|
@ -169,7 +169,7 @@ func (dl *DataLoader) loadOpenFlightsAirlines(reader io.Reader, source DataSourc
|
||||||
csvReader.FieldsPerRecord = -1 // Variable number of fields
|
csvReader.FieldsPerRecord = -1 // Variable number of fields
|
||||||
|
|
||||||
insertStmt, err := tx.Prepare(`
|
insertStmt, err := tx.Prepare(`
|
||||||
INSERT OR REPLACE INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
INSERT INTO airlines (id, name, alias, iata, icao, callsign, country, active, data_source)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
`)
|
`)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -255,8 +255,8 @@ func (dl *DataLoader) loadOpenFlightsAirports(reader io.Reader, source DataSourc
|
||||||
csvReader.FieldsPerRecord = -1
|
csvReader.FieldsPerRecord = -1
|
||||||
|
|
||||||
insertStmt, err := tx.Prepare(`
|
insertStmt, err := tx.Prepare(`
|
||||||
INSERT OR REPLACE INTO airports (id, name, city, country, iata_code, icao_code, latitude, longitude,
|
INSERT INTO airports (id, name, city, country, iata, icao, latitude, longitude,
|
||||||
elevation_ft, timezone_offset, dst_type, timezone, data_source)
|
altitude, timezone_offset, dst_type, timezone, data_source)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
`)
|
`)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
||||||
|
|
@ -1,177 +0,0 @@
|
||||||
package database
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestDataLoader_Creation(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
loader := NewDataLoader(db.GetConnection())
|
|
||||||
if loader == nil {
|
|
||||||
t.Fatal("NewDataLoader returned nil")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDataLoader_LoadOpenFlightsAirlines(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
loader := NewDataLoader(db.GetConnection())
|
|
||||||
|
|
||||||
// Create a test data source for OpenFlights Airlines
|
|
||||||
source := DataSource{
|
|
||||||
Name: "OpenFlights Airlines Test",
|
|
||||||
License: "ODbL 1.0",
|
|
||||||
URL: "https://raw.githubusercontent.com/jpatokal/openflights/master/data/airlines.dat",
|
|
||||||
Format: "openflights",
|
|
||||||
Version: "2024-test",
|
|
||||||
}
|
|
||||||
|
|
||||||
result, err := loader.LoadDataSource(source)
|
|
||||||
if err != nil {
|
|
||||||
// Network issues in tests are acceptable
|
|
||||||
if strings.Contains(err.Error(), "connection") ||
|
|
||||||
strings.Contains(err.Error(), "timeout") ||
|
|
||||||
strings.Contains(err.Error(), "no such host") {
|
|
||||||
t.Skipf("Skipping network test due to connectivity issue: %v", err)
|
|
||||||
}
|
|
||||||
t.Fatal("LoadDataSource failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if result == nil {
|
|
||||||
t.Fatal("Expected load result, got nil")
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Logf("Loaded airlines: Total=%d, New=%d, Errors=%d, Duration=%v",
|
|
||||||
result.RecordsTotal, result.RecordsNew, result.RecordsError, result.Duration)
|
|
||||||
|
|
||||||
// Verify some data was processed
|
|
||||||
if result.RecordsTotal == 0 {
|
|
||||||
t.Error("No records were processed")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDataLoader_LoadOurAirports(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
loader := NewDataLoader(db.GetConnection())
|
|
||||||
|
|
||||||
// Create a test data source for OurAirports
|
|
||||||
source := DataSource{
|
|
||||||
Name: "OurAirports Test",
|
|
||||||
License: "CC0 1.0",
|
|
||||||
URL: "https://davidmegginson.github.io/ourairports-data/airports.csv",
|
|
||||||
Format: "ourairports",
|
|
||||||
Version: "2024-test",
|
|
||||||
}
|
|
||||||
|
|
||||||
result, err := loader.LoadDataSource(source)
|
|
||||||
if err != nil {
|
|
||||||
// Network issues in tests are acceptable
|
|
||||||
if strings.Contains(err.Error(), "connection") ||
|
|
||||||
strings.Contains(err.Error(), "timeout") ||
|
|
||||||
strings.Contains(err.Error(), "no such host") {
|
|
||||||
t.Skipf("Skipping network test due to connectivity issue: %v", err)
|
|
||||||
}
|
|
||||||
t.Fatal("LoadDataSource failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if result != nil {
|
|
||||||
t.Logf("Loaded airports: Total=%d, New=%d, Errors=%d, Duration=%v",
|
|
||||||
result.RecordsTotal, result.RecordsNew, result.RecordsError, result.Duration)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDataLoader_GetLoadedDataSources(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
loader := NewDataLoader(db.GetConnection())
|
|
||||||
|
|
||||||
sources, err := loader.GetLoadedDataSources()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("GetLoadedDataSources failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initially should be empty or minimal
|
|
||||||
t.Logf("Found %d loaded data sources", len(sources))
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDataLoader_ClearDataSource(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
loader := NewDataLoader(db.GetConnection())
|
|
||||||
|
|
||||||
// Test clearing a non-existent source (should not error)
|
|
||||||
err := loader.ClearDataSource("nonexistent")
|
|
||||||
if err != nil {
|
|
||||||
t.Error("ClearDataSource should not error on nonexistent source:", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDataSource_Struct(t *testing.T) {
|
|
||||||
source := DataSource{
|
|
||||||
Name: "Test Source",
|
|
||||||
License: "Test License",
|
|
||||||
URL: "https://example.com/data.csv",
|
|
||||||
RequiresConsent: false,
|
|
||||||
UserAcceptedLicense: true,
|
|
||||||
Format: "csv",
|
|
||||||
Version: "1.0",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test that all fields are accessible
|
|
||||||
if source.Name != "Test Source" {
|
|
||||||
t.Error("Name field not preserved")
|
|
||||||
}
|
|
||||||
if source.License != "Test License" {
|
|
||||||
t.Error("License field not preserved")
|
|
||||||
}
|
|
||||||
if source.URL != "https://example.com/data.csv" {
|
|
||||||
t.Error("URL field not preserved")
|
|
||||||
}
|
|
||||||
if source.RequiresConsent != false {
|
|
||||||
t.Error("RequiresConsent field not preserved")
|
|
||||||
}
|
|
||||||
if source.UserAcceptedLicense != true {
|
|
||||||
t.Error("UserAcceptedLicense field not preserved")
|
|
||||||
}
|
|
||||||
if source.Format != "csv" {
|
|
||||||
t.Error("Format field not preserved")
|
|
||||||
}
|
|
||||||
if source.Version != "1.0" {
|
|
||||||
t.Error("Version field not preserved")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLoadResult_Struct(t *testing.T) {
|
|
||||||
result := LoadResult{
|
|
||||||
Source: "Test Source",
|
|
||||||
RecordsTotal: 100,
|
|
||||||
RecordsNew: 80,
|
|
||||||
RecordsError: 5,
|
|
||||||
Errors: []string{"error1", "error2"},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test that all fields are accessible
|
|
||||||
if result.Source != "Test Source" {
|
|
||||||
t.Error("Source field not preserved")
|
|
||||||
}
|
|
||||||
if result.RecordsTotal != 100 {
|
|
||||||
t.Error("RecordsTotal field not preserved")
|
|
||||||
}
|
|
||||||
if result.RecordsNew != 80 {
|
|
||||||
t.Error("RecordsNew field not preserved")
|
|
||||||
}
|
|
||||||
if result.RecordsError != 5 {
|
|
||||||
t.Error("RecordsError field not preserved")
|
|
||||||
}
|
|
||||||
if len(result.Errors) != 2 {
|
|
||||||
t.Error("Errors field not preserved")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,268 +0,0 @@
|
||||||
package database
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestCallsignManager_Creation(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
manager := NewCallsignManager(db.GetConnection())
|
|
||||||
if manager == nil {
|
|
||||||
t.Fatal("NewCallsignManager returned nil")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCallsignManager_ParseCallsign(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
manager := NewCallsignManager(db.GetConnection())
|
|
||||||
|
|
||||||
testCases := []struct {
|
|
||||||
callsign string
|
|
||||||
expectedValid bool
|
|
||||||
expectedAirline string
|
|
||||||
expectedFlight string
|
|
||||||
}{
|
|
||||||
{"UAL123", true, "UAL", "123"},
|
|
||||||
{"BA4567", true, "BA", "4567"},
|
|
||||||
{"AFR89", true, "AFR", "89"},
|
|
||||||
{"N123AB", false, "", ""}, // Aircraft registration, not callsign
|
|
||||||
{"INVALID", false, "", ""}, // No numbers
|
|
||||||
{"123", false, "", ""}, // Only numbers
|
|
||||||
{"A", false, "", ""}, // Too short
|
|
||||||
{"", false, "", ""}, // Empty
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tc := range testCases {
|
|
||||||
result := manager.ParseCallsign(tc.callsign)
|
|
||||||
if result.IsValid != tc.expectedValid {
|
|
||||||
t.Errorf("ParseCallsign(%s): expected valid=%v, got %v",
|
|
||||||
tc.callsign, tc.expectedValid, result.IsValid)
|
|
||||||
}
|
|
||||||
if result.IsValid && result.AirlineCode != tc.expectedAirline {
|
|
||||||
t.Errorf("ParseCallsign(%s): expected airline=%s, got %s",
|
|
||||||
tc.callsign, tc.expectedAirline, result.AirlineCode)
|
|
||||||
}
|
|
||||||
if result.IsValid && result.FlightNumber != tc.expectedFlight {
|
|
||||||
t.Errorf("ParseCallsign(%s): expected flight=%s, got %s",
|
|
||||||
tc.callsign, tc.expectedFlight, result.FlightNumber)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCallsignManager_GetCallsignInfo(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
manager := NewCallsignManager(db.GetConnection())
|
|
||||||
|
|
||||||
// Insert test airline data
|
|
||||||
conn := db.GetConnection()
|
|
||||||
_, err := conn.Exec(`
|
|
||||||
INSERT INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
|
||||||
VALUES (1, 'Test Airways', 'Test', 'TA', 'TST', 'TESTAIR', 'United States', 1, 'test')
|
|
||||||
`)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to insert test data:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test valid callsign
|
|
||||||
info, err := manager.GetCallsignInfo("TST123")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("GetCallsignInfo failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if info == nil {
|
|
||||||
t.Fatal("Expected callsign info, got nil")
|
|
||||||
}
|
|
||||||
|
|
||||||
if info.OriginalCallsign != "TST123" {
|
|
||||||
t.Errorf("Expected callsign TST123, got %s", info.OriginalCallsign)
|
|
||||||
}
|
|
||||||
if info.AirlineCode != "TST" {
|
|
||||||
t.Errorf("Expected airline code TST, got %s", info.AirlineCode)
|
|
||||||
}
|
|
||||||
if info.FlightNumber != "123" {
|
|
||||||
t.Errorf("Expected flight number 123, got %s", info.FlightNumber)
|
|
||||||
}
|
|
||||||
if info.AirlineName != "Test Airways" {
|
|
||||||
t.Errorf("Expected airline name 'Test Airways', got %s", info.AirlineName)
|
|
||||||
}
|
|
||||||
if info.AirlineCountry != "United States" {
|
|
||||||
t.Errorf("Expected airline country 'United States', got %s", info.AirlineCountry)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCallsignManager_GetCallsignInfo_InvalidCallsign(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
manager := NewCallsignManager(db.GetConnection())
|
|
||||||
|
|
||||||
// Test with invalid callsign format
|
|
||||||
info, err := manager.GetCallsignInfo("INVALID")
|
|
||||||
if err != nil {
|
|
||||||
t.Error("GetCallsignInfo should not error on invalid format:", err)
|
|
||||||
}
|
|
||||||
if info == nil {
|
|
||||||
t.Fatal("Expected info structure even for invalid callsign")
|
|
||||||
}
|
|
||||||
if info.IsValid {
|
|
||||||
t.Error("Invalid callsign should not be marked as valid")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test with unknown airline
|
|
||||||
info, err = manager.GetCallsignInfo("UNK123")
|
|
||||||
if err != nil {
|
|
||||||
t.Error("GetCallsignInfo should not error on unknown airline:", err)
|
|
||||||
}
|
|
||||||
if info == nil {
|
|
||||||
t.Fatal("Expected info structure for unknown airline")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCallsignManager_GetCallsignInfo_EmptyCallsign(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
manager := NewCallsignManager(db.GetConnection())
|
|
||||||
|
|
||||||
// Test with empty callsign
|
|
||||||
info, err := manager.GetCallsignInfo("")
|
|
||||||
if err == nil {
|
|
||||||
t.Error("GetCallsignInfo should error on empty callsign")
|
|
||||||
}
|
|
||||||
if info != nil {
|
|
||||||
t.Error("Expected nil info for empty callsign")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCallsignManager_ClearExpiredCache(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
manager := NewCallsignManager(db.GetConnection())
|
|
||||||
|
|
||||||
err := manager.ClearExpiredCache()
|
|
||||||
if err != nil {
|
|
||||||
t.Error("ClearExpiredCache should not error:", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCallsignManager_GetCacheStats(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
manager := NewCallsignManager(db.GetConnection())
|
|
||||||
|
|
||||||
stats, err := manager.GetCacheStats()
|
|
||||||
if err != nil {
|
|
||||||
t.Error("GetCacheStats should not error:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if stats == nil {
|
|
||||||
t.Error("Expected cache stats, got nil")
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Logf("Cache stats: %+v", stats)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCallsignManager_SearchAirlines(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
manager := NewCallsignManager(db.GetConnection())
|
|
||||||
|
|
||||||
// Insert test airline data
|
|
||||||
conn := db.GetConnection()
|
|
||||||
_, err := conn.Exec(`
|
|
||||||
INSERT INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
|
||||||
VALUES (1, 'Test Airways', 'Test', 'TA', 'TST', 'TESTAIR', 'United States', 1, 'test'),
|
|
||||||
(2, 'Another Airline', 'Another', 'AA', 'ANO', 'ANOTHER', 'Canada', 1, 'test')
|
|
||||||
`)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to insert test data:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Search for airlines
|
|
||||||
airlines, err := manager.SearchAirlines("Test")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("SearchAirlines failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
found := false
|
|
||||||
for _, airline := range airlines {
|
|
||||||
if airline.Name == "Test Airways" {
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !found {
|
|
||||||
t.Error("Expected to find Test Airways in search results")
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Logf("Found %d airlines matching 'Test'", len(airlines))
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCallsignManager_GetAirlinesByCountry(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
manager := NewCallsignManager(db.GetConnection())
|
|
||||||
|
|
||||||
// Insert test airline data
|
|
||||||
conn := db.GetConnection()
|
|
||||||
_, err := conn.Exec(`
|
|
||||||
INSERT INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
|
||||||
VALUES (1, 'US Airways', 'US', 'US', 'USA', 'USAIR', 'United States', 1, 'test'),
|
|
||||||
(2, 'Canada Air', 'CA', 'CA', 'CAN', 'CANAIR', 'Canada', 1, 'test')
|
|
||||||
`)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to insert test data:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get airlines by country
|
|
||||||
airlines, err := manager.GetAirlinesByCountry("United States")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("GetAirlinesByCountry failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
found := false
|
|
||||||
for _, airline := range airlines {
|
|
||||||
if airline.Name == "US Airways" {
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !found {
|
|
||||||
t.Error("Expected to find US Airways for United States")
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Logf("Found %d airlines in United States", len(airlines))
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCallsignParseResult_Struct(t *testing.T) {
|
|
||||||
result := &CallsignParseResult{
|
|
||||||
OriginalCallsign: "UAL123",
|
|
||||||
AirlineCode: "UAL",
|
|
||||||
FlightNumber: "123",
|
|
||||||
IsValid: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test that all fields are accessible
|
|
||||||
if result.OriginalCallsign != "UAL123" {
|
|
||||||
t.Error("OriginalCallsign field not preserved")
|
|
||||||
}
|
|
||||||
if result.AirlineCode != "UAL" {
|
|
||||||
t.Error("AirlineCode field not preserved")
|
|
||||||
}
|
|
||||||
if result.FlightNumber != "123" {
|
|
||||||
t.Error("FlightNumber field not preserved")
|
|
||||||
}
|
|
||||||
if !result.IsValid {
|
|
||||||
t.Error("IsValid field not preserved")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,208 +0,0 @@
|
||||||
package database
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
// OptimizationManager handles database storage optimization using SQLite built-in features
|
|
||||||
type OptimizationManager struct {
|
|
||||||
db *Database
|
|
||||||
config *Config
|
|
||||||
lastVacuum time.Time
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewOptimizationManager creates a new optimization manager
|
|
||||||
func NewOptimizationManager(db *Database, config *Config) *OptimizationManager {
|
|
||||||
return &OptimizationManager{
|
|
||||||
db: db,
|
|
||||||
config: config,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// PerformMaintenance runs database maintenance tasks including VACUUM
|
|
||||||
func (om *OptimizationManager) PerformMaintenance() error {
|
|
||||||
now := time.Now()
|
|
||||||
|
|
||||||
// Check if VACUUM is needed
|
|
||||||
if om.config.VacuumInterval > 0 && now.Sub(om.lastVacuum) >= om.config.VacuumInterval {
|
|
||||||
if err := om.VacuumDatabase(); err != nil {
|
|
||||||
return fmt.Errorf("vacuum failed: %w", err)
|
|
||||||
}
|
|
||||||
om.lastVacuum = now
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// VacuumDatabase performs VACUUM to reclaim space and optimize database
|
|
||||||
func (om *OptimizationManager) VacuumDatabase() error {
|
|
||||||
conn := om.db.GetConnection()
|
|
||||||
if conn == nil {
|
|
||||||
return fmt.Errorf("database connection not available")
|
|
||||||
}
|
|
||||||
|
|
||||||
start := time.Now()
|
|
||||||
|
|
||||||
// Get size before VACUUM
|
|
||||||
sizeBefore, err := om.getDatabaseSize()
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to get database size: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Perform VACUUM
|
|
||||||
if _, err := conn.Exec("VACUUM"); err != nil {
|
|
||||||
return fmt.Errorf("VACUUM operation failed: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get size after VACUUM
|
|
||||||
sizeAfter, err := om.getDatabaseSize()
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to get database size after VACUUM: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
duration := time.Since(start)
|
|
||||||
savedBytes := sizeBefore - sizeAfter
|
|
||||||
savedPercent := float64(savedBytes) / float64(sizeBefore) * 100
|
|
||||||
|
|
||||||
fmt.Printf("VACUUM completed in %v: %.1f MB → %.1f MB (saved %.1f MB, %.1f%%)\n",
|
|
||||||
duration,
|
|
||||||
float64(sizeBefore)/(1024*1024),
|
|
||||||
float64(sizeAfter)/(1024*1024),
|
|
||||||
float64(savedBytes)/(1024*1024),
|
|
||||||
savedPercent)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// OptimizeDatabase applies various SQLite optimizations for better storage efficiency
|
|
||||||
func (om *OptimizationManager) OptimizeDatabase() error {
|
|
||||||
conn := om.db.GetConnection()
|
|
||||||
if conn == nil {
|
|
||||||
return fmt.Errorf("database connection not available")
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Println("Optimizing database for storage efficiency...")
|
|
||||||
|
|
||||||
// Apply storage-friendly pragmas
|
|
||||||
optimizations := []struct{
|
|
||||||
name string
|
|
||||||
query string
|
|
||||||
description string
|
|
||||||
}{
|
|
||||||
{"Auto VACUUM", "PRAGMA auto_vacuum = INCREMENTAL", "Enable incremental auto-vacuum"},
|
|
||||||
{"Incremental VACUUM", "PRAGMA incremental_vacuum", "Reclaim free pages incrementally"},
|
|
||||||
{"Optimize", "PRAGMA optimize", "Update SQLite query planner statistics"},
|
|
||||||
{"Analyze", "ANALYZE", "Update table statistics for better query plans"},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, opt := range optimizations {
|
|
||||||
if _, err := conn.Exec(opt.query); err != nil {
|
|
||||||
fmt.Printf("Warning: %s failed: %v\n", opt.name, err)
|
|
||||||
} else {
|
|
||||||
fmt.Printf("✓ %s: %s\n", opt.name, opt.description)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// OptimizePageSize sets an optimal page size for the database (requires rebuild)
|
|
||||||
func (om *OptimizationManager) OptimizePageSize(pageSize int) error {
|
|
||||||
conn := om.db.GetConnection()
|
|
||||||
if conn == nil {
|
|
||||||
return fmt.Errorf("database connection not available")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check current page size
|
|
||||||
var currentPageSize int
|
|
||||||
if err := conn.QueryRow("PRAGMA page_size").Scan(¤tPageSize); err != nil {
|
|
||||||
return fmt.Errorf("failed to get current page size: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if currentPageSize == pageSize {
|
|
||||||
fmt.Printf("Page size already optimal: %d bytes\n", pageSize)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Printf("Optimizing page size: %d → %d bytes (requires VACUUM)\n", currentPageSize, pageSize)
|
|
||||||
|
|
||||||
// Set new page size
|
|
||||||
query := fmt.Sprintf("PRAGMA page_size = %d", pageSize)
|
|
||||||
if _, err := conn.Exec(query); err != nil {
|
|
||||||
return fmt.Errorf("failed to set page size: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// VACUUM to apply the new page size
|
|
||||||
if err := om.VacuumDatabase(); err != nil {
|
|
||||||
return fmt.Errorf("failed to apply page size change: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetOptimizationStats returns current database optimization statistics
|
|
||||||
func (om *OptimizationManager) GetOptimizationStats() (*OptimizationStats, error) {
|
|
||||||
stats := &OptimizationStats{}
|
|
||||||
|
|
||||||
// Get database size
|
|
||||||
size, err := om.getDatabaseSize()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
stats.DatabaseSize = size
|
|
||||||
|
|
||||||
// Get page statistics
|
|
||||||
conn := om.db.GetConnection()
|
|
||||||
if conn != nil {
|
|
||||||
var pageSize, pageCount, freelistCount int
|
|
||||||
conn.QueryRow("PRAGMA page_size").Scan(&pageSize)
|
|
||||||
conn.QueryRow("PRAGMA page_count").Scan(&pageCount)
|
|
||||||
conn.QueryRow("PRAGMA freelist_count").Scan(&freelistCount)
|
|
||||||
|
|
||||||
stats.PageSize = pageSize
|
|
||||||
stats.PageCount = pageCount
|
|
||||||
stats.FreePages = freelistCount
|
|
||||||
stats.UsedPages = pageCount - freelistCount
|
|
||||||
|
|
||||||
if pageCount > 0 {
|
|
||||||
stats.Efficiency = float64(stats.UsedPages) / float64(pageCount) * 100
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check auto vacuum setting
|
|
||||||
var autoVacuum int
|
|
||||||
conn.QueryRow("PRAGMA auto_vacuum").Scan(&autoVacuum)
|
|
||||||
stats.AutoVacuumEnabled = autoVacuum > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
stats.LastVacuum = om.lastVacuum
|
|
||||||
|
|
||||||
return stats, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// OptimizationStats holds database storage optimization statistics
|
|
||||||
type OptimizationStats struct {
|
|
||||||
DatabaseSize int64 `json:"database_size"`
|
|
||||||
PageSize int `json:"page_size"`
|
|
||||||
PageCount int `json:"page_count"`
|
|
||||||
UsedPages int `json:"used_pages"`
|
|
||||||
FreePages int `json:"free_pages"`
|
|
||||||
Efficiency float64 `json:"efficiency_percent"`
|
|
||||||
AutoVacuumEnabled bool `json:"auto_vacuum_enabled"`
|
|
||||||
LastVacuum time.Time `json:"last_vacuum"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// getDatabaseSize returns the current database file size in bytes
|
|
||||||
func (om *OptimizationManager) getDatabaseSize() (int64, error) {
|
|
||||||
if om.config.Path == "" {
|
|
||||||
return 0, fmt.Errorf("database path not configured")
|
|
||||||
}
|
|
||||||
|
|
||||||
stat, err := os.Stat(om.config.Path)
|
|
||||||
if err != nil {
|
|
||||||
return 0, fmt.Errorf("failed to stat database file: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return stat.Size(), nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,307 +0,0 @@
|
||||||
package database
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestOptimizationManager_VacuumDatabase(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
config := &Config{Path: db.config.Path}
|
|
||||||
optimizer := NewOptimizationManager(db, config)
|
|
||||||
|
|
||||||
err := optimizer.VacuumDatabase()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("VacuumDatabase failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify vacuum was successful by checking database integrity
|
|
||||||
conn := db.GetConnection()
|
|
||||||
var result string
|
|
||||||
err = conn.QueryRow("PRAGMA integrity_check").Scan(&result)
|
|
||||||
if err != nil {
|
|
||||||
t.Error("Failed to run integrity check:", err)
|
|
||||||
}
|
|
||||||
if result != "ok" {
|
|
||||||
t.Errorf("Database integrity check failed: %s", result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestOptimizationManager_OptimizeDatabase(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
config := &Config{Path: db.config.Path}
|
|
||||||
optimizer := NewOptimizationManager(db, config)
|
|
||||||
|
|
||||||
err := optimizer.OptimizeDatabase()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("OptimizeDatabase failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check that auto_vacuum was set
|
|
||||||
conn := db.GetConnection()
|
|
||||||
var autoVacuum int
|
|
||||||
err = conn.QueryRow("PRAGMA auto_vacuum").Scan(&autoVacuum)
|
|
||||||
if err != nil {
|
|
||||||
t.Error("Failed to check auto_vacuum setting:", err)
|
|
||||||
}
|
|
||||||
// Should be 2 (INCREMENTAL) after optimization
|
|
||||||
if autoVacuum != 2 {
|
|
||||||
t.Errorf("Expected auto_vacuum = 2 (INCREMENTAL), got %d", autoVacuum)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestOptimizationManager_OptimizePageSize(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
config := &Config{Path: db.config.Path}
|
|
||||||
optimizer := NewOptimizationManager(db, config)
|
|
||||||
|
|
||||||
// Get current page size
|
|
||||||
conn := db.GetConnection()
|
|
||||||
var currentPageSize int
|
|
||||||
err := conn.QueryRow("PRAGMA page_size").Scan(¤tPageSize)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to get current page size:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set a different page size
|
|
||||||
targetPageSize := 8192
|
|
||||||
if currentPageSize == targetPageSize {
|
|
||||||
targetPageSize = 4096 // Use different size if already at target
|
|
||||||
}
|
|
||||||
|
|
||||||
err = optimizer.OptimizePageSize(targetPageSize)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("OptimizePageSize failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify page size was changed
|
|
||||||
var newPageSize int
|
|
||||||
err = conn.QueryRow("PRAGMA page_size").Scan(&newPageSize)
|
|
||||||
if err != nil {
|
|
||||||
t.Error("Failed to get new page size:", err)
|
|
||||||
}
|
|
||||||
if newPageSize != targetPageSize {
|
|
||||||
t.Errorf("Expected page size %d, got %d", targetPageSize, newPageSize)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test setting same page size (should be no-op)
|
|
||||||
err = optimizer.OptimizePageSize(targetPageSize)
|
|
||||||
if err != nil {
|
|
||||||
t.Error("OptimizePageSize failed for same page size:", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestOptimizationManager_GetOptimizationStats(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
config := &Config{Path: db.config.Path}
|
|
||||||
optimizer := NewOptimizationManager(db, config)
|
|
||||||
|
|
||||||
// Insert some test data to make stats more meaningful
|
|
||||||
conn := db.GetConnection()
|
|
||||||
_, err := conn.Exec(`
|
|
||||||
INSERT INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
|
||||||
VALUES (1, 'Test Airways', 'Test', 'TA', 'TST', 'TESTAIR', 'United States', 1, 'test')
|
|
||||||
`)
|
|
||||||
if err != nil {
|
|
||||||
t.Error("Failed to insert test data:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
stats, err := optimizer.GetOptimizationStats()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("GetOptimizationStats failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if stats == nil {
|
|
||||||
t.Fatal("Expected stats, got nil")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check basic stats
|
|
||||||
if stats.DatabaseSize <= 0 {
|
|
||||||
t.Error("Database size should be greater than 0")
|
|
||||||
}
|
|
||||||
if stats.PageSize <= 0 {
|
|
||||||
t.Error("Page size should be greater than 0")
|
|
||||||
}
|
|
||||||
if stats.PageCount <= 0 {
|
|
||||||
t.Error("Page count should be greater than 0")
|
|
||||||
}
|
|
||||||
if stats.UsedPages < 0 {
|
|
||||||
t.Error("Used pages should be non-negative")
|
|
||||||
}
|
|
||||||
if stats.FreePages < 0 {
|
|
||||||
t.Error("Free pages should be non-negative")
|
|
||||||
}
|
|
||||||
if stats.Efficiency < 0 || stats.Efficiency > 100 {
|
|
||||||
t.Errorf("Efficiency should be between 0-100%%, got %.2f%%", stats.Efficiency)
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Logf("Database stats: Size=%d bytes, Pages=%d (used=%d, free=%d), Efficiency=%.1f%%",
|
|
||||||
stats.DatabaseSize, stats.PageCount, stats.UsedPages, stats.FreePages, stats.Efficiency)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestOptimizationManager_PerformMaintenance(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
config := &Config{
|
|
||||||
Path: db.config.Path,
|
|
||||||
VacuumInterval: time.Millisecond, // Very short interval for testing
|
|
||||||
}
|
|
||||||
optimizer := NewOptimizationManager(db, config)
|
|
||||||
|
|
||||||
// Should perform vacuum due to short interval
|
|
||||||
err := optimizer.PerformMaintenance()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("PerformMaintenance failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check that lastVacuum was updated
|
|
||||||
if optimizer.lastVacuum.IsZero() {
|
|
||||||
t.Error("lastVacuum should be set after maintenance")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Wait a bit and run again with longer interval
|
|
||||||
config.VacuumInterval = time.Hour // Long interval
|
|
||||||
err = optimizer.PerformMaintenance()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Second PerformMaintenance failed:", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestOptimizationManager_getDatabaseSize(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
config := &Config{Path: db.config.Path}
|
|
||||||
optimizer := NewOptimizationManager(db, config)
|
|
||||||
|
|
||||||
size, err := optimizer.getDatabaseSize()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("getDatabaseSize failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if size <= 0 {
|
|
||||||
t.Error("Database size should be greater than 0")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify size matches actual file size
|
|
||||||
stat, err := os.Stat(db.config.Path)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to stat database file:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if size != stat.Size() {
|
|
||||||
t.Errorf("getDatabaseSize returned %d, but file size is %d", size, stat.Size())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestOptimizationManager_InvalidPath(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
// Test with invalid path
|
|
||||||
config := &Config{Path: "/nonexistent/path/database.db"}
|
|
||||||
optimizer := NewOptimizationManager(db, config)
|
|
||||||
|
|
||||||
_, err := optimizer.getDatabaseSize()
|
|
||||||
if err == nil {
|
|
||||||
t.Error("getDatabaseSize should fail with invalid path")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestOptimizationStats_JSON(t *testing.T) {
|
|
||||||
stats := &OptimizationStats{
|
|
||||||
DatabaseSize: 1024000,
|
|
||||||
PageSize: 4096,
|
|
||||||
PageCount: 250,
|
|
||||||
UsedPages: 200,
|
|
||||||
FreePages: 50,
|
|
||||||
Efficiency: 80.0,
|
|
||||||
AutoVacuumEnabled: true,
|
|
||||||
LastVacuum: time.Now(),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test that all fields are accessible
|
|
||||||
if stats.DatabaseSize != 1024000 {
|
|
||||||
t.Error("DatabaseSize not preserved")
|
|
||||||
}
|
|
||||||
if stats.PageSize != 4096 {
|
|
||||||
t.Error("PageSize not preserved")
|
|
||||||
}
|
|
||||||
if stats.Efficiency != 80.0 {
|
|
||||||
t.Error("Efficiency not preserved")
|
|
||||||
}
|
|
||||||
if !stats.AutoVacuumEnabled {
|
|
||||||
t.Error("AutoVacuumEnabled not preserved")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestOptimizationManager_WithRealData(t *testing.T) {
|
|
||||||
db, cleanup := setupTestDatabase(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
// Load some real data to make optimization more realistic
|
|
||||||
// Skip actual data loading in tests as it requires network access
|
|
||||||
// Just insert minimal test data
|
|
||||||
conn := db.GetConnection()
|
|
||||||
_, err := conn.Exec(`INSERT INTO airlines (id, name, alias, iata_code, icao_code, callsign, country, active, data_source)
|
|
||||||
VALUES (1, 'Test Airways', 'Test', 'TA', 'TST', 'TESTAIR', 'United States', 1, 'test')`)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to insert test data:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
config := &Config{Path: db.config.Path}
|
|
||||||
optimizer := NewOptimizationManager(db, config)
|
|
||||||
|
|
||||||
// Get stats before optimization
|
|
||||||
statsBefore, err := optimizer.GetOptimizationStats()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to get stats before optimization:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run optimization
|
|
||||||
err = optimizer.OptimizeDatabase()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("OptimizeDatabase failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = optimizer.VacuumDatabase()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("VacuumDatabase failed:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get stats after optimization
|
|
||||||
statsAfter, err := optimizer.GetOptimizationStats()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to get stats after optimization:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compare efficiency
|
|
||||||
t.Logf("Optimization results: %.2f%% → %.2f%% efficiency",
|
|
||||||
statsBefore.Efficiency, statsAfter.Efficiency)
|
|
||||||
|
|
||||||
// After optimization, we should have auto-vacuum enabled
|
|
||||||
if !statsAfter.AutoVacuumEnabled {
|
|
||||||
t.Error("Auto-vacuum should be enabled after optimization")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Database should still be functional
|
|
||||||
conn = db.GetConnection()
|
|
||||||
var count int
|
|
||||||
err = conn.QueryRow("SELECT COUNT(*) FROM airlines").Scan(&count)
|
|
||||||
if err != nil {
|
|
||||||
t.Error("Database not functional after optimization:", err)
|
|
||||||
}
|
|
||||||
if count == 0 {
|
|
||||||
t.Error("Data lost during optimization")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,36 +0,0 @@
|
||||||
package database
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
// setupTestDatabase creates a temporary database for testing
|
|
||||||
func setupTestDatabase(t *testing.T) (*Database, func()) {
|
|
||||||
tempFile, err := os.CreateTemp("", "test_skyview_*.db")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to create temp database file:", err)
|
|
||||||
}
|
|
||||||
tempFile.Close()
|
|
||||||
|
|
||||||
config := &Config{Path: tempFile.Name()}
|
|
||||||
db, err := NewDatabase(config)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal("Failed to create database:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize the database (run migrations)
|
|
||||||
err = db.Initialize()
|
|
||||||
if err != nil {
|
|
||||||
db.Close()
|
|
||||||
os.Remove(tempFile.Name())
|
|
||||||
t.Fatal("Failed to initialize database:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
cleanup := func() {
|
|
||||||
db.Close()
|
|
||||||
os.Remove(tempFile.Name())
|
|
||||||
}
|
|
||||||
|
|
||||||
return db, cleanup
|
|
||||||
}
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue