892 lines
35 KiB
Python
892 lines
35 KiB
Python
"""
|
|
3D Video Animation Generator
|
|
Creates Relive-style 3D video animations from GPS route data
|
|
"""
|
|
|
|
import json
|
|
import os
|
|
import math
|
|
import requests
|
|
import cv2
|
|
import numpy as np
|
|
from PIL import Image, ImageDraw, ImageFont
|
|
import tempfile
|
|
import shutil
|
|
from datetime import datetime
|
|
|
|
def generate_3d_video_animation(project_name, resources_folder, label_widget, progress_widget, popup_widget, clock_module):
|
|
"""
|
|
Generate a 3D video animation similar to Relive
|
|
|
|
Args:
|
|
project_name: Name of the project
|
|
resources_folder: Path to resources folder
|
|
label_widget: Kivy label for status updates
|
|
progress_widget: Kivy progress bar
|
|
popup_widget: Kivy popup to dismiss when done
|
|
clock_module: Kivy Clock module for scheduling
|
|
"""
|
|
|
|
def update_progress(progress_val, status_text):
|
|
"""Update UI from background thread"""
|
|
def _update(dt):
|
|
progress_widget.value = progress_val
|
|
label_widget.text = status_text
|
|
clock_module.schedule_once(_update, 0)
|
|
|
|
def finish_generation(success, message, output_path=None):
|
|
"""Finish the generation process"""
|
|
def _finish(dt):
|
|
if popup_widget:
|
|
popup_widget.dismiss()
|
|
|
|
# Show result popup
|
|
from kivy.uix.popup import Popup
|
|
from kivy.uix.boxlayout import BoxLayout
|
|
from kivy.uix.button import Button
|
|
from kivy.uix.label import Label
|
|
|
|
result_layout = BoxLayout(orientation='vertical', spacing=10, padding=10)
|
|
|
|
if success:
|
|
result_label = Label(
|
|
text=f"3D Video Generated Successfully!\n\nSaved to:\n{output_path}",
|
|
color=(0, 1, 0, 1),
|
|
halign="center"
|
|
)
|
|
open_btn = Button(
|
|
text="Open Video Folder",
|
|
size_hint_y=None,
|
|
height=40,
|
|
background_color=(0.2, 0.7, 0.2, 1)
|
|
)
|
|
open_btn.bind(on_press=lambda x: (os.system(f"xdg-open '{os.path.dirname(output_path)}'"), result_popup.dismiss()))
|
|
result_layout.add_widget(result_label)
|
|
result_layout.add_widget(open_btn)
|
|
else:
|
|
result_label = Label(
|
|
text=f"Generation Failed:\n{message}",
|
|
color=(1, 0, 0, 1),
|
|
halign="center"
|
|
)
|
|
result_layout.add_widget(result_label)
|
|
|
|
close_btn = Button(
|
|
text="Close",
|
|
size_hint_y=None,
|
|
height=40,
|
|
background_color=(0.3, 0.3, 0.3, 1)
|
|
)
|
|
|
|
result_layout.add_widget(close_btn)
|
|
|
|
result_popup = Popup(
|
|
title="3D Video Generation Result",
|
|
content=result_layout,
|
|
size_hint=(0.9, 0.6),
|
|
auto_dismiss=False
|
|
)
|
|
|
|
close_btn.bind(on_press=lambda x: result_popup.dismiss())
|
|
result_popup.open()
|
|
|
|
clock_module.schedule_once(_finish, 0)
|
|
|
|
def run_generation():
|
|
"""Main generation function"""
|
|
try:
|
|
# Step 1: Load route data
|
|
update_progress(10, "Loading route data...")
|
|
project_folder = os.path.join(resources_folder, "projects", project_name)
|
|
positions_path = os.path.join(project_folder, "positions.json")
|
|
|
|
if not os.path.exists(positions_path):
|
|
finish_generation(False, "No route data found!")
|
|
return
|
|
|
|
with open(positions_path, "r") as f:
|
|
positions = json.load(f)
|
|
|
|
if len(positions) < 10:
|
|
finish_generation(False, "Route too short for 3D animation (minimum 10 points)")
|
|
return
|
|
|
|
# Step 2: Calculate route bounds and center
|
|
update_progress(20, "Calculating route boundaries...")
|
|
lats = [pos['latitude'] for pos in positions]
|
|
lons = [pos['longitude'] for pos in positions]
|
|
|
|
center_lat = sum(lats) / len(lats)
|
|
center_lon = sum(lons) / len(lons)
|
|
|
|
min_lat, max_lat = min(lats), max(lats)
|
|
min_lon, max_lon = min(lons), max(lons)
|
|
|
|
# Step 3: Generate frames with space entry sequence
|
|
update_progress(30, "Generating 3D frames with space entry...")
|
|
|
|
# Create temporary directory for frames
|
|
temp_dir = tempfile.mkdtemp()
|
|
frames_dir = os.path.join(temp_dir, "frames")
|
|
os.makedirs(frames_dir)
|
|
|
|
# Video settings
|
|
width, height = 1920, 1080
|
|
fps = 30
|
|
entry_frames = 90 # 3 seconds at 30fps for space entry
|
|
total_frames = entry_frames + len(positions) * 2 # Entry + route animation
|
|
|
|
frame_counter = 0
|
|
|
|
# Generate space entry sequence (3 seconds)
|
|
update_progress(30, "Creating space entry sequence...")
|
|
for i in range(entry_frames):
|
|
progress = 30 + (i / total_frames) * 40
|
|
update_progress(progress, f"Space entry frame {i+1}/{entry_frames}...")
|
|
|
|
frame = create_space_entry_frame(
|
|
positions[0], center_lat, center_lon,
|
|
min_lat, max_lat, min_lon, max_lon,
|
|
width, height, i, entry_frames
|
|
)
|
|
|
|
frame_path = os.path.join(frames_dir, f"frame_{frame_counter:06d}.png")
|
|
cv2.imwrite(frame_path, frame)
|
|
frame_counter += 1
|
|
|
|
# Generate route following frames
|
|
for i, pos in enumerate(positions):
|
|
progress = 30 + ((entry_frames + i) / total_frames) * 40
|
|
update_progress(progress, f"Route frame {i+1}/{len(positions)}...")
|
|
|
|
frame = create_3d_frame(
|
|
pos, positions, i, center_lat, center_lon,
|
|
min_lat, max_lat, min_lon, max_lon,
|
|
width, height
|
|
)
|
|
|
|
# Save frame
|
|
frame_path = os.path.join(frames_dir, f"frame_{frame_counter:06d}.png")
|
|
cv2.imwrite(frame_path, frame)
|
|
frame_counter += 1
|
|
|
|
# Step 4: Create video
|
|
update_progress(75, "Compiling video...")
|
|
|
|
# Output path
|
|
output_filename = f"{project_name}_3d_animation_{datetime.now().strftime('%Y%m%d_%H%M%S')}.mp4"
|
|
output_path = os.path.join(project_folder, output_filename)
|
|
|
|
# Create video writer
|
|
fourcc = cv2.VideoWriter_fourcc(*'mp4v')
|
|
video_writer = cv2.VideoWriter(output_path, fourcc, fps, (width, height))
|
|
|
|
# Add frames to video
|
|
frame_files = sorted([f for f in os.listdir(frames_dir) if f.endswith('.png')])
|
|
for frame_file in frame_files:
|
|
frame_path = os.path.join(frames_dir, frame_file)
|
|
frame = cv2.imread(frame_path)
|
|
video_writer.write(frame)
|
|
|
|
video_writer.release()
|
|
|
|
# Step 5: Add audio (optional)
|
|
update_progress(90, "Adding finishing touches...")
|
|
|
|
# Clean up
|
|
shutil.rmtree(temp_dir)
|
|
|
|
update_progress(100, "3D Video generated successfully!")
|
|
finish_generation(True, "Success!", output_path)
|
|
|
|
except Exception as e:
|
|
finish_generation(False, str(e))
|
|
|
|
# Start generation in background
|
|
import threading
|
|
thread = threading.Thread(target=run_generation)
|
|
thread.daemon = True
|
|
thread.start()
|
|
|
|
def create_3d_frame(current_pos, all_positions, frame_index, center_lat, center_lon,
|
|
min_lat, max_lat, min_lon, max_lon, width, height):
|
|
"""
|
|
Create a Google Earth-style 3D frame with camera following the route
|
|
"""
|
|
# Create canvas
|
|
frame = np.zeros((height, width, 3), dtype=np.uint8)
|
|
|
|
# Enhanced camera following system
|
|
camera_pos, camera_target, camera_bearing = calculate_dynamic_camera_position(
|
|
current_pos, all_positions, frame_index, min_lat, max_lat, min_lon, max_lon
|
|
)
|
|
|
|
# Google Earth-style perspective parameters with improved aerial view
|
|
base_camera_height = 1500 + 1000 * math.sin(frame_index * 0.02) # 1000-3000m range
|
|
camera_height = base_camera_height + 500 * math.sin(frame_index * 0.05) # Add variation
|
|
view_distance = 3000 # Increased view distance for better aerial perspective
|
|
tilt_angle = 65 + 8 * math.sin(frame_index * 0.03) # Dynamic tilt for cinematic effect
|
|
fov = 75 # Slightly wider field of view for aerial shots
|
|
|
|
# Create enhanced terrain background
|
|
create_terrain_background(frame, width, height, camera_pos['latitude'], camera_pos['longitude'], camera_bearing, tilt_angle)
|
|
|
|
# Transform all route points to 3D camera space
|
|
route_points_3d = []
|
|
for i, pos in enumerate(all_positions):
|
|
# Calculate distance from camera
|
|
dist_to_camera = calculate_distance(camera_pos['latitude'], camera_pos['longitude'],
|
|
pos['latitude'], pos['longitude'])
|
|
|
|
if dist_to_camera > view_distance * 2: # Skip points too far away
|
|
continue
|
|
|
|
# Get elevation for this point
|
|
elevation = get_simulated_elevation(pos['latitude'], pos['longitude'], i)
|
|
|
|
# Convert to 3D screen coordinates
|
|
screen_x, screen_y, is_visible = world_to_screen_3d(
|
|
pos['latitude'], pos['longitude'], elevation,
|
|
camera_pos['latitude'], camera_pos['longitude'], camera_height,
|
|
camera_bearing, tilt_angle, width, height, view_distance
|
|
)
|
|
|
|
if is_visible:
|
|
route_points_3d.append((screen_x, screen_y, i <= frame_index))
|
|
|
|
# Draw route with enhanced 3D effects
|
|
draw_3d_route(frame, route_points_3d, frame_index)
|
|
|
|
# Add Google Earth-style UI overlays
|
|
add_google_earth_ui(frame, current_pos, camera_bearing, width, height, frame_index, len(all_positions))
|
|
|
|
# Add atmospheric effects
|
|
add_atmospheric_perspective(frame, width, height)
|
|
|
|
return frame
|
|
|
|
def calculate_bearing(lat1, lon1, lat2, lon2):
|
|
"""Calculate bearing between two GPS points"""
|
|
lat1_rad = math.radians(lat1)
|
|
lat2_rad = math.radians(lat2)
|
|
dlon_rad = math.radians(lon2 - lon1)
|
|
|
|
y = math.sin(dlon_rad) * math.cos(lat2_rad)
|
|
x = math.cos(lat1_rad) * math.sin(lat2_rad) - math.sin(lat1_rad) * math.cos(lat2_rad) * math.cos(dlon_rad)
|
|
|
|
bearing = math.atan2(y, x)
|
|
bearing = math.degrees(bearing)
|
|
bearing = (bearing + 360) % 360
|
|
|
|
return bearing
|
|
|
|
def create_terrain_background(frame, width, height, camera_lat, camera_lon, bearing, tilt_angle):
|
|
"""Create a Google Earth-style terrain background"""
|
|
# Sky gradient (more realistic)
|
|
for y in range(int(height * 0.4)): # Sky takes upper 40%
|
|
sky_intensity = y / (height * 0.4)
|
|
# Sky colors: horizon (light blue) to zenith (darker blue)
|
|
r = int(135 + (200 - 135) * sky_intensity)
|
|
g = int(206 + (230 - 206) * sky_intensity)
|
|
b = int(235 + (255 - 235) * sky_intensity)
|
|
frame[y, :] = (b, g, r) # BGR format for OpenCV
|
|
|
|
# Terrain/ground gradient
|
|
terrain_start_y = int(height * 0.4)
|
|
for y in range(terrain_start_y, height):
|
|
# Create depth illusion
|
|
distance_factor = (y - terrain_start_y) / (height - terrain_start_y)
|
|
|
|
# Terrain colors: greens and browns
|
|
base_r = int(80 + 60 * distance_factor)
|
|
base_g = int(120 + 80 * distance_factor)
|
|
base_b = int(60 + 40 * distance_factor)
|
|
|
|
# Add terrain texture using noise
|
|
for x in range(width):
|
|
noise = (math.sin(x * 0.01 + y * 0.01) + math.sin(x * 0.05 + y * 0.02)) * 10
|
|
terrain_r = max(0, min(255, base_r + int(noise)))
|
|
terrain_g = max(0, min(255, base_g + int(noise)))
|
|
terrain_b = max(0, min(255, base_b + int(noise)))
|
|
|
|
frame[y, x] = (terrain_b, terrain_g, terrain_r)
|
|
|
|
def calculate_visible_bounds(camera_lat, camera_lon, bearing, view_distance, width, height):
|
|
"""Calculate the bounds of the visible area"""
|
|
# This is a simplified calculation for the demo
|
|
# In a real implementation, you'd use proper 3D projection math
|
|
lat_offset = view_distance / 111000 # Rough conversion to degrees
|
|
lon_offset = view_distance / (111000 * math.cos(math.radians(camera_lat)))
|
|
|
|
return {
|
|
'min_lat': camera_lat - lat_offset,
|
|
'max_lat': camera_lat + lat_offset,
|
|
'min_lon': camera_lon - lon_offset,
|
|
'max_lon': camera_lon + lon_offset
|
|
}
|
|
|
|
def world_to_screen_3d(world_lat, world_lon, elevation, camera_lat, camera_lon, camera_height,
|
|
bearing, tilt_angle, screen_width, screen_height, view_distance):
|
|
"""Transform world coordinates to 3D screen coordinates"""
|
|
# Calculate relative position
|
|
lat_diff = world_lat - camera_lat
|
|
lon_diff = world_lon - camera_lon
|
|
|
|
# Convert to meters (approximate)
|
|
x_meters = lon_diff * 111000 * math.cos(math.radians(camera_lat))
|
|
y_meters = lat_diff * 111000
|
|
z_meters = elevation - camera_height
|
|
|
|
# Rotate based on bearing
|
|
bearing_rad = math.radians(-bearing) # Negative for correct rotation
|
|
rotated_x = x_meters * math.cos(bearing_rad) - y_meters * math.sin(bearing_rad)
|
|
rotated_y = x_meters * math.sin(bearing_rad) + y_meters * math.cos(bearing_rad)
|
|
|
|
# Check if point is in front of camera
|
|
if rotated_y < 0:
|
|
return 0, 0, False
|
|
|
|
# Apply perspective projection
|
|
perspective_scale = view_distance / max(rotated_y, 1)
|
|
|
|
# Convert to screen coordinates
|
|
screen_x = int(screen_width / 2 + rotated_x * perspective_scale * 0.5)
|
|
|
|
# Apply tilt for vertical positioning
|
|
tilt_factor = math.sin(math.radians(tilt_angle))
|
|
horizon_y = screen_height * 0.4 # Horizon line
|
|
screen_y = int(horizon_y + (z_meters * perspective_scale * tilt_factor * 0.1) +
|
|
(rotated_y * perspective_scale * 0.2))
|
|
|
|
# Check if point is visible on screen
|
|
is_visible = (0 <= screen_x < screen_width and 0 <= screen_y < screen_height)
|
|
|
|
return screen_x, screen_y, is_visible
|
|
|
|
def get_simulated_elevation(lat, lon, frame_index):
|
|
"""Generate simulated elevation data"""
|
|
# Create varied terrain using sine waves
|
|
elevation = (
|
|
50 * math.sin(lat * 100) +
|
|
30 * math.sin(lon * 80) +
|
|
20 * math.sin((lat + lon) * 60) +
|
|
10 * math.sin(frame_index * 0.1) # Dynamic element
|
|
)
|
|
return max(0, elevation) # Ensure non-negative elevation
|
|
|
|
def draw_3d_route(frame, route_points_3d, current_frame_index):
|
|
"""Draw the route with 3D perspective effects"""
|
|
if len(route_points_3d) < 2:
|
|
return
|
|
|
|
# Draw route segments
|
|
for i in range(1, len(route_points_3d)):
|
|
x1, y1, is_past1 = route_points_3d[i-1]
|
|
x2, y2, is_past2 = route_points_3d[i]
|
|
|
|
# Color based on position relative to current point
|
|
if is_past1 and is_past2:
|
|
# Past route - blue to cyan gradient
|
|
color = (255, 200, 100) # Cyan-ish
|
|
thickness = 4
|
|
else:
|
|
# Future route - red gradient
|
|
color = (100, 100, 255) # Red-ish
|
|
thickness = 3
|
|
|
|
# Draw line with shadow for depth
|
|
cv2.line(frame, (x1+2, y1+2), (x2+2, y2+2), (50, 50, 50), thickness+2)
|
|
cv2.line(frame, (x1, y1), (x2, y2), color, thickness)
|
|
|
|
# Draw current position marker
|
|
if route_points_3d:
|
|
for x, y, is_past in route_points_3d:
|
|
if is_past:
|
|
current_x, current_y = x, y
|
|
|
|
# Pulsing current position marker
|
|
pulse_size = int(12 + 8 * math.sin(current_frame_index * 0.3))
|
|
|
|
# Shadow
|
|
cv2.circle(frame, (current_x+3, current_y+3), pulse_size, (0, 0, 0), -1)
|
|
# Outer ring
|
|
cv2.circle(frame, (current_x, current_y), pulse_size, (0, 255, 255), -1)
|
|
# Inner ring
|
|
cv2.circle(frame, (current_x, current_y), pulse_size-4, (255, 255, 255), 2)
|
|
# Center dot
|
|
cv2.circle(frame, (current_x, current_y), 3, (255, 0, 0), -1)
|
|
|
|
def add_google_earth_ui(frame, current_pos, bearing, width, height, frame_index, total_frames):
|
|
"""Add Google Earth-style UI elements"""
|
|
# Speed and info panel (top-left)
|
|
panel_width = 250
|
|
panel_height = 120
|
|
overlay = frame.copy()
|
|
|
|
# Semi-transparent panel
|
|
cv2.rectangle(overlay, (10, 10), (panel_width, panel_height), (50, 50, 50), -1)
|
|
cv2.addWeighted(overlay, 0.7, frame, 0.3, 0, frame)
|
|
|
|
# Panel border
|
|
cv2.rectangle(frame, (10, 10), (panel_width, panel_height), (200, 200, 200), 2)
|
|
|
|
# Text information
|
|
speed = current_pos.get('speed', 0)
|
|
timestamp = current_pos.get('deviceTime', '')
|
|
|
|
y_pos = 35
|
|
cv2.putText(frame, f"Speed: {speed:.1f} km/h", (20, y_pos),
|
|
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (255, 255, 255), 1)
|
|
|
|
y_pos += 25
|
|
cv2.putText(frame, f"Bearing: {bearing:.0f}°", (20, y_pos),
|
|
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (255, 255, 255), 1)
|
|
|
|
y_pos += 25
|
|
if timestamp:
|
|
cv2.putText(frame, f"Time: {timestamp[:16]}", (20, y_pos),
|
|
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 1)
|
|
|
|
y_pos += 25
|
|
progress = (frame_index + 1) / total_frames * 100
|
|
cv2.putText(frame, f"Progress: {progress:.1f}%", (20, y_pos),
|
|
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 1)
|
|
|
|
# Compass (top-right)
|
|
compass_center_x = width - 80
|
|
compass_center_y = 80
|
|
compass_radius = 40
|
|
|
|
# Compass background
|
|
cv2.circle(frame, (compass_center_x, compass_center_y), compass_radius, (50, 50, 50), -1)
|
|
cv2.circle(frame, (compass_center_x, compass_center_y), compass_radius, (200, 200, 200), 2)
|
|
|
|
# North indicator
|
|
north_x = compass_center_x + int((compass_radius - 10) * math.sin(math.radians(-bearing)))
|
|
north_y = compass_center_y - int((compass_radius - 10) * math.cos(math.radians(-bearing)))
|
|
cv2.arrowedLine(frame, (compass_center_x, compass_center_y), (north_x, north_y), (0, 0, 255), 3)
|
|
|
|
# N label
|
|
cv2.putText(frame, "N", (compass_center_x - 8, compass_center_y - compass_radius - 10),
|
|
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 2)
|
|
|
|
# Progress bar (bottom)
|
|
progress_bar_width = width - 40
|
|
progress_bar_height = 10
|
|
progress_bar_x = 20
|
|
progress_bar_y = height - 30
|
|
|
|
# Background
|
|
cv2.rectangle(frame, (progress_bar_x, progress_bar_y),
|
|
(progress_bar_x + progress_bar_width, progress_bar_y + progress_bar_height),
|
|
(100, 100, 100), -1)
|
|
|
|
# Progress fill
|
|
progress_width = int(progress_bar_width * progress / 100)
|
|
cv2.rectangle(frame, (progress_bar_x, progress_bar_y),
|
|
(progress_bar_x + progress_width, progress_bar_y + progress_bar_height),
|
|
(0, 255, 100), -1)
|
|
|
|
# Border
|
|
cv2.rectangle(frame, (progress_bar_x, progress_bar_y),
|
|
(progress_bar_x + progress_bar_width, progress_bar_y + progress_bar_height),
|
|
(200, 200, 200), 1)
|
|
|
|
def add_atmospheric_perspective(frame, width, height):
|
|
"""Add distance fog effect for realism"""
|
|
# Create fog gradient overlay
|
|
fog_overlay = np.zeros_like(frame)
|
|
|
|
# Fog is stronger towards the horizon
|
|
horizon_y = int(height * 0.4)
|
|
for y in range(horizon_y, height):
|
|
fog_intensity = min(0.3, (y - horizon_y) / (height - horizon_y) * 0.3)
|
|
fog_color = int(200 * fog_intensity)
|
|
fog_overlay[y, :] = (fog_color, fog_color, fog_color)
|
|
|
|
# Blend fog with frame
|
|
cv2.addWeighted(frame, 1.0, fog_overlay, 0.5, 0, frame)
|
|
|
|
def get_elevation_data(lat, lon):
|
|
"""
|
|
Get elevation data for a coordinate (optional enhancement)
|
|
"""
|
|
try:
|
|
# Using a free elevation API
|
|
url = f"https://api.open-elevation.com/api/v1/lookup?locations={lat},{lon}"
|
|
response = requests.get(url, timeout=5)
|
|
if response.status_code == 200:
|
|
data = response.json()
|
|
return data['results'][0]['elevation']
|
|
except Exception:
|
|
pass
|
|
return 0 # Default elevation
|
|
|
|
def calculate_dynamic_camera_position(current_pos, all_positions, frame_index, min_lat, max_lat, min_lon, max_lon):
|
|
"""
|
|
Calculate dynamic camera position that follows the route smoothly
|
|
"""
|
|
camera_lat = current_pos['latitude']
|
|
camera_lon = current_pos['longitude']
|
|
|
|
# Dynamic look-ahead based on speed and terrain
|
|
speed = current_pos.get('speed', 0)
|
|
base_look_ahead = max(3, min(10, int(speed / 10))) # Adjust based on speed
|
|
|
|
# Look ahead in the route for camera direction
|
|
look_ahead_frames = min(base_look_ahead, len(all_positions) - frame_index - 1)
|
|
|
|
if look_ahead_frames > 0:
|
|
target_pos = all_positions[frame_index + look_ahead_frames]
|
|
target_lat = target_pos['latitude']
|
|
target_lon = target_pos['longitude']
|
|
else:
|
|
# Use previous points to maintain direction
|
|
if frame_index > 0:
|
|
prev_pos = all_positions[frame_index - 1]
|
|
# Extrapolate forward
|
|
lat_diff = camera_lat - prev_pos['latitude']
|
|
lon_diff = camera_lon - prev_pos['longitude']
|
|
target_lat = camera_lat + lat_diff
|
|
target_lon = camera_lon + lon_diff
|
|
else:
|
|
target_lat = camera_lat
|
|
target_lon = camera_lon
|
|
|
|
# Calculate smooth bearing with momentum
|
|
bearing = calculate_bearing(camera_lat, camera_lon, target_lat, target_lon)
|
|
|
|
# Add slight camera offset for better viewing angle
|
|
offset_distance = 50 # meters
|
|
offset_angle = bearing + 45 # 45 degrees offset for better perspective
|
|
|
|
# Calculate offset position
|
|
offset_lat = camera_lat + (offset_distance / 111000) * math.cos(math.radians(offset_angle))
|
|
offset_lon = camera_lon + (offset_distance / (111000 * math.cos(math.radians(camera_lat)))) * math.sin(math.radians(offset_angle))
|
|
|
|
camera_pos = {
|
|
'latitude': offset_lat,
|
|
'longitude': offset_lon
|
|
}
|
|
|
|
camera_target = {
|
|
'latitude': target_lat,
|
|
'longitude': target_lon
|
|
}
|
|
|
|
return camera_pos, camera_target, bearing
|
|
|
|
def calculate_distance(lat1, lon1, lat2, lon2):
|
|
"""Calculate distance between two GPS points in meters"""
|
|
# Haversine formula
|
|
R = 6371000 # Earth's radius in meters
|
|
phi1 = math.radians(lat1)
|
|
phi2 = math.radians(lat2)
|
|
delta_phi = math.radians(lat2 - lat1)
|
|
delta_lambda = math.radians(lon2 - lon1)
|
|
|
|
a = math.sin(delta_phi/2)**2 + math.cos(phi1) * math.cos(phi2) * math.sin(delta_lambda/2)**2
|
|
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
|
|
|
|
return R * c
|
|
|
|
def world_to_camera_screen(world_lat, world_lon, elevation, camera_pos, camera_target, camera_height,
|
|
bearing, tilt_angle, fov, screen_width, screen_height):
|
|
"""
|
|
Advanced 3D transformation from world coordinates to screen coordinates
|
|
"""
|
|
# Convert GPS to local coordinates relative to camera
|
|
lat_diff = world_lat - camera_pos['latitude']
|
|
lon_diff = world_lon - camera_pos['longitude']
|
|
|
|
# Convert to meters (more accurate conversion)
|
|
x_meters = lon_diff * 111320 * math.cos(math.radians(camera_pos['latitude']))
|
|
y_meters = lat_diff * 110540
|
|
z_meters = elevation - camera_height
|
|
|
|
# Apply camera rotation based on bearing
|
|
bearing_rad = math.radians(-bearing)
|
|
tilt_rad = math.radians(tilt_angle)
|
|
|
|
# Rotate around Z axis (bearing)
|
|
rotated_x = x_meters * math.cos(bearing_rad) - y_meters * math.sin(bearing_rad)
|
|
rotated_y = x_meters * math.sin(bearing_rad) + y_meters * math.cos(bearing_rad)
|
|
rotated_z = z_meters
|
|
|
|
# Apply tilt rotation
|
|
final_y = rotated_y * math.cos(tilt_rad) - rotated_z * math.sin(tilt_rad)
|
|
final_z = rotated_y * math.sin(tilt_rad) + rotated_z * math.cos(tilt_rad)
|
|
final_x = rotated_x
|
|
|
|
# Check if point is in front of camera
|
|
if final_y <= 0:
|
|
return 0, 0, float('inf'), False
|
|
|
|
# Perspective projection
|
|
fov_rad = math.radians(fov)
|
|
f = (screen_width / 2) / math.tan(fov_rad / 2) # Focal length
|
|
|
|
# Project to screen
|
|
screen_x = int(screen_width / 2 + (final_x * f) / final_y)
|
|
screen_y = int(screen_height / 2 - (final_z * f) / final_y)
|
|
|
|
# Calculate depth for sorting
|
|
depth = final_y
|
|
|
|
# Check if point is visible on screen
|
|
is_visible = (0 <= screen_x < screen_width and 0 <= screen_y < screen_height)
|
|
|
|
return screen_x, screen_y, depth, is_visible
|
|
|
|
def get_enhanced_elevation(lat, lon, point_index, frame_index):
|
|
"""
|
|
Generate more realistic elevation data with variation
|
|
"""
|
|
# Base elevation using multiple harmonics
|
|
base_elevation = (
|
|
100 * math.sin(lat * 50) +
|
|
70 * math.sin(lon * 40) +
|
|
50 * math.sin((lat + lon) * 30) +
|
|
30 * math.sin(lat * 200) * math.cos(lon * 150) +
|
|
20 * math.sin(point_index * 0.1) # Smooth variation along route
|
|
)
|
|
|
|
# Add temporal variation for dynamic feel
|
|
time_variation = 10 * math.sin(frame_index * 0.05 + point_index * 0.2)
|
|
|
|
# Ensure realistic elevation range
|
|
elevation = max(0, min(500, base_elevation + time_variation))
|
|
|
|
return elevation
|
|
|
|
def create_space_entry_frame(start_pos, center_lat, center_lon, min_lat, max_lat, min_lon, max_lon,
|
|
width, height, frame_index, total_entry_frames):
|
|
"""
|
|
Create a Google Earth-style space entry frame transitioning from space to route start
|
|
"""
|
|
# Create canvas
|
|
frame = np.zeros((height, width, 3), dtype=np.uint8)
|
|
|
|
# Calculate entry progress (0 to 1)
|
|
entry_progress = frame_index / total_entry_frames
|
|
|
|
# Space entry parameters - start very high and descend
|
|
max_altitude = 50000 # Start from 50km altitude (space view)
|
|
min_altitude = 2000 # End at 2km altitude (good aerial view)
|
|
|
|
# Smooth descent curve (ease-out animation)
|
|
altitude_progress = 1 - (1 - entry_progress) ** 3 # Cubic ease-out
|
|
current_altitude = max_altitude - (max_altitude - min_altitude) * altitude_progress
|
|
|
|
# Camera position starts centered over the route
|
|
camera_lat = center_lat
|
|
camera_lon = center_lon
|
|
|
|
# Camera gradually moves toward route start
|
|
start_lat = start_pos['latitude']
|
|
start_lon = start_pos['longitude']
|
|
|
|
# Smooth transition to route start position
|
|
transition_progress = entry_progress ** 2 # Quadratic for gradual transition
|
|
camera_lat = center_lat + (start_lat - center_lat) * transition_progress
|
|
camera_lon = center_lon + (start_lon - center_lon) * transition_progress
|
|
|
|
# Create space/sky background based on altitude
|
|
create_space_sky_background(frame, width, height, current_altitude)
|
|
|
|
# Calculate view bounds based on altitude
|
|
view_radius_km = current_altitude * 0.8 # View radius increases with altitude
|
|
|
|
# Draw Earth curvature effect at high altitudes
|
|
if current_altitude > 10000:
|
|
draw_earth_curvature(frame, width, height, current_altitude)
|
|
|
|
# Draw terrain with increasing detail as we descend
|
|
draw_terrain_from_altitude(frame, camera_lat, camera_lon, view_radius_km,
|
|
width, height, current_altitude, entry_progress)
|
|
|
|
# Draw route overview (visible from space)
|
|
if entry_progress > 0.3: # Route becomes visible partway through descent
|
|
draw_route_overview_from_space(frame, min_lat, max_lat, min_lon, max_lon,
|
|
camera_lat, camera_lon, view_radius_km,
|
|
width, height, entry_progress)
|
|
|
|
# Add space entry UI
|
|
add_space_entry_ui(frame, current_altitude, entry_progress, width, height)
|
|
|
|
# Add atmospheric glow effect
|
|
add_atmospheric_glow(frame, width, height, current_altitude)
|
|
|
|
return frame
|
|
|
|
def create_space_sky_background(frame, width, height, altitude):
|
|
"""Create background that transitions from space black to sky blue"""
|
|
# Space to atmosphere transition
|
|
if altitude > 20000:
|
|
# Space: black to deep blue
|
|
space_factor = min(1.0, (altitude - 20000) / 30000)
|
|
for y in range(height):
|
|
intensity = y / height
|
|
r = int(5 * (1 - space_factor) + 0 * space_factor)
|
|
g = int(15 * (1 - space_factor) + 0 * space_factor)
|
|
b = int(30 * (1 - space_factor) + 0 * space_factor)
|
|
frame[y, :] = (b, g, r)
|
|
else:
|
|
# Atmosphere: blue gradient
|
|
for y in range(int(height * 0.6)): # Sky portion
|
|
sky_intensity = y / (height * 0.6)
|
|
r = int(135 + (200 - 135) * sky_intensity)
|
|
g = int(206 + (230 - 206) * sky_intensity)
|
|
b = int(235 + (255 - 235) * sky_intensity)
|
|
frame[y, :] = (b, g, r)
|
|
|
|
# Terrain visible below
|
|
terrain_start_y = int(height * 0.6)
|
|
for y in range(terrain_start_y, height):
|
|
distance_factor = (y - terrain_start_y) / (height - terrain_start_y)
|
|
base_r = int(80 + 60 * distance_factor)
|
|
base_g = int(120 + 80 * distance_factor)
|
|
base_b = int(60 + 40 * distance_factor)
|
|
frame[y, :] = (base_b, base_g, base_r)
|
|
|
|
def draw_earth_curvature(frame, width, height, altitude):
|
|
"""Draw Earth's curvature at high altitudes"""
|
|
if altitude < 15000:
|
|
return
|
|
|
|
# Calculate curvature based on altitude
|
|
curve_factor = min(1.0, (altitude - 15000) / 35000)
|
|
|
|
# Draw curved horizon
|
|
horizon_y = int(height * 0.5)
|
|
curve_amplitude = int(50 * curve_factor)
|
|
|
|
for x in range(width):
|
|
# Sine wave for curvature
|
|
curve_offset = int(curve_amplitude * math.sin(math.pi * x / width))
|
|
curve_y = horizon_y + curve_offset
|
|
|
|
# Draw atmospheric glow around Earth
|
|
for glow_y in range(max(0, curve_y - 20), min(height, curve_y + 5)):
|
|
glow_intensity = 1.0 - abs(glow_y - curve_y) / 20.0
|
|
if glow_intensity > 0:
|
|
frame[glow_y, x] = (
|
|
min(255, frame[glow_y, x][0] + int(100 * glow_intensity)),
|
|
min(255, frame[glow_y, x][1] + int(150 * glow_intensity)),
|
|
min(255, frame[glow_y, x][2] + int(200 * glow_intensity))
|
|
)
|
|
|
|
def draw_terrain_from_altitude(frame, camera_lat, camera_lon, view_radius_km,
|
|
width, height, altitude, progress):
|
|
"""Draw terrain detail that increases as altitude decreases"""
|
|
if altitude > 10000:
|
|
# High altitude: show landmass outlines
|
|
draw_landmass_outlines(frame, camera_lat, camera_lon, view_radius_km, width, height)
|
|
else:
|
|
# Lower altitude: show detailed terrain
|
|
detail_factor = 1.0 - (altitude / 10000)
|
|
draw_detailed_terrain(frame, camera_lat, camera_lon, view_radius_km,
|
|
width, height, detail_factor)
|
|
|
|
def draw_landmass_outlines(frame, camera_lat, camera_lon, view_radius_km, width, height):
|
|
"""Draw simplified landmass outlines for space view"""
|
|
# Simplified representation - in real implementation you'd use actual geographic data
|
|
center_x, center_y = width // 2, height // 2
|
|
|
|
# Draw some landmass shapes
|
|
for i in range(5):
|
|
angle = i * 72 # 360/5 degrees
|
|
radius = int(100 + 50 * math.sin(angle * math.pi / 180))
|
|
land_x = center_x + int(radius * math.cos(math.radians(angle)))
|
|
land_y = center_y + int(radius * math.sin(math.radians(angle)))
|
|
|
|
# Draw landmass blob
|
|
cv2.circle(frame, (land_x, land_y), 30, (139, 69, 19), -1) # Brown landmass
|
|
|
|
def draw_detailed_terrain(frame, camera_lat, camera_lon, view_radius_km,
|
|
width, height, detail_factor):
|
|
"""Draw detailed terrain features"""
|
|
# Create terrain texture
|
|
for y in range(height):
|
|
for x in range(width):
|
|
# Generate terrain using noise
|
|
noise1 = math.sin(x * 0.01 * detail_factor) * math.sin(y * 0.01 * detail_factor)
|
|
noise2 = math.sin(x * 0.05 * detail_factor) * math.sin(y * 0.03 * detail_factor)
|
|
|
|
terrain_height = (noise1 + noise2) * 0.5
|
|
|
|
# Color based on terrain height
|
|
if terrain_height > 0.3:
|
|
# Mountains - grey/brown
|
|
color = (100, 120, 140)
|
|
elif terrain_height > 0:
|
|
# Hills - green
|
|
color = (60, 140, 80)
|
|
else:
|
|
# Valleys/water - blue
|
|
color = (120, 100, 60)
|
|
|
|
frame[y, x] = color
|
|
|
|
def draw_route_overview_from_space(frame, min_lat, max_lat, min_lon, max_lon,
|
|
camera_lat, camera_lon, view_radius_km,
|
|
width, height, progress):
|
|
"""Draw route overview visible from space"""
|
|
# Simple route line for space view
|
|
# Map route bounds to screen coordinates
|
|
route_width = max_lon - min_lon
|
|
route_height = max_lat - min_lat
|
|
|
|
if route_width == 0 or route_height == 0:
|
|
return
|
|
|
|
# Calculate route position on screen
|
|
lat_offset = (min_lat + max_lat) / 2 - camera_lat
|
|
lon_offset = (min_lon + max_lon) / 2 - camera_lon
|
|
|
|
# Convert to screen coordinates (simplified)
|
|
route_x = int(width / 2 + lon_offset * width / 2)
|
|
route_y = int(height / 2 + lat_offset * height / 2)
|
|
|
|
route_screen_width = int(route_width * width / 4)
|
|
route_screen_height = int(route_height * height / 4)
|
|
|
|
# Draw route area highlight
|
|
if (0 < route_x < width and 0 < route_y < height):
|
|
# Pulsing route highlight
|
|
pulse = int(20 + 10 * math.sin(progress * 10))
|
|
cv2.rectangle(frame,
|
|
(route_x - route_screen_width, route_y - route_screen_height),
|
|
(route_x + route_screen_width, route_y + route_screen_height),
|
|
(0, 255, 255), 2) # Cyan highlight
|
|
|
|
def add_space_entry_ui(frame, altitude, progress, width, height):
|
|
"""Add UI elements for space entry sequence"""
|
|
# Altitude indicator
|
|
altitude_text = f"Altitude: {altitude/1000:.1f} km"
|
|
cv2.putText(frame, altitude_text, (20, 50),
|
|
cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255, 255, 255), 2)
|
|
|
|
# Entry progress
|
|
progress_text = f"Descent: {progress*100:.0f}%"
|
|
cv2.putText(frame, progress_text, (20, 90),
|
|
cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255, 255, 255), 2)
|
|
|
|
# "Approaching Route" text when near the end
|
|
if progress > 0.7:
|
|
cv2.putText(frame, "Approaching Route...", (width//2 - 120, height//2),
|
|
cv2.FONT_HERSHEY_SIMPLEX, 1.0, (0, 255, 255), 2)
|
|
|
|
def add_atmospheric_glow(frame, width, height, altitude):
|
|
"""Add atmospheric glow effect"""
|
|
if altitude > 5000:
|
|
# Create atmospheric glow overlay
|
|
glow_intensity = min(0.3, altitude / 50000)
|
|
|
|
# Horizontal glow bands
|
|
for y in range(height):
|
|
distance_from_horizon = abs(y - height // 2) / (height // 2)
|
|
if distance_from_horizon < 0.5:
|
|
glow = int(50 * glow_intensity * (1 - distance_from_horizon * 2))
|
|
frame[y, :, 2] = np.minimum(255, frame[y, :, 2] + glow) # Add blue glow
|