Fix Interactive Route Map: Create MapRoute records from GPX files
- Create MapRoute database records for all GPX files for map visualization - Populate route statistics (distance, elevation, coordinates) from GPX parsing - Update GPX file statistics to mirror MapRoute data for post detail pages - Enable /community/api/routes endpoint to return proper route data for map iframe - Fix post detail page GPX statistics display This resolves the issue where the community map showed '2 routes discovered' but routes weren't actually rendering on the Leaflet.js map visualization. Changes: - Dockerfile: Updated init script paths and added migrate-db call - app/__init__.py: Added admin user auto-creation with error handling - app/routes/community.py: Added debug logging and API route for map data - docker-compose.yml: Simplified to use .env for environment variables - run.py: Added comprehensive database schema migration command
This commit is contained in:
@@ -25,13 +25,14 @@ ENV FLASK_APP=run.py
|
||||
|
||||
# Create a script to conditionally initialize database
|
||||
RUN echo '#!/bin/sh\n\
|
||||
if [ ! -f /data/moto_adventure.db ]; then\n\
|
||||
if [ ! -f /opt/moto_site/data/moto_adventure.db ]; then\n\
|
||||
echo "Database not found, initializing..."\n\
|
||||
flask --app run.py init-db\n\
|
||||
flask --app run.py create-admin\n\
|
||||
flask --app run.py migrate-db\n\
|
||||
echo "Database initialized successfully"\n\
|
||||
else\n\
|
||||
echo "Database already exists, skipping initialization"\n\
|
||||
echo "Database exists, running migrations..."\n\
|
||||
flask --app run.py migrate-db\n\
|
||||
fi' > /opt/moto_site/init_db_if_needed.sh && chmod +x /opt/moto_site/init_db_if_needed.sh
|
||||
|
||||
# Create non-root user and set permissions
|
||||
|
||||
@@ -100,22 +100,28 @@ def create_app(config_name=None):
|
||||
os.makedirs(os.path.join(upload_dir, 'gpx'), exist_ok=True)
|
||||
|
||||
# --- Initial Admin Creation from .env ---
|
||||
# Temporarily disabled for migration setup
|
||||
# from app.models import User
|
||||
# with app.app_context():
|
||||
# admin_email = os.environ.get('ADMIN_EMAIL')
|
||||
# admin_nickname = os.environ.get('ADMIN_NICKNAME')
|
||||
# admin_password = os.environ.get('ADMIN_PASSWORD')
|
||||
# if admin_email and admin_nickname and admin_password:
|
||||
# if not User.query.filter_by(email=admin_email).first():
|
||||
# user = User(nickname=admin_nickname, email=admin_email, is_admin=True, is_active=True)
|
||||
# user.set_password(admin_password)
|
||||
# db.session.add(user)
|
||||
# db.session.commit()
|
||||
# print(f"[INFO] Admin user {admin_nickname} <{admin_email}> created from .env.")
|
||||
# else:
|
||||
# print(f"[INFO] Admin with email {admin_email} already exists.")
|
||||
# else:
|
||||
# print("[INFO] ADMIN_EMAIL, ADMIN_NICKNAME, or ADMIN_PASSWORD not set in .env. Skipping admin creation.")
|
||||
from app.models import User
|
||||
with app.app_context():
|
||||
admin_email = os.environ.get('ADMIN_EMAIL')
|
||||
admin_nickname = os.environ.get('ADMIN_NICKNAME')
|
||||
admin_password = os.environ.get('ADMIN_PASSWORD')
|
||||
if admin_email and admin_nickname and admin_password:
|
||||
try:
|
||||
# Check if users table exists first
|
||||
db.session.execute('SELECT 1 FROM users LIMIT 1')
|
||||
# If we got here, table exists, now check for admin user
|
||||
if not User.query.filter_by(email=admin_email).first():
|
||||
user = User(nickname=admin_nickname, email=admin_email, is_admin=True, is_active=True)
|
||||
user.set_password(admin_password)
|
||||
db.session.add(user)
|
||||
db.session.commit()
|
||||
print(f"[INFO] Admin user {admin_nickname} <{admin_email}> created from .env.")
|
||||
else:
|
||||
print(f"[INFO] Admin with email {admin_email} already exists.")
|
||||
except Exception as e:
|
||||
# Table doesn't exist yet, skip admin creation
|
||||
print(f"[INFO] Database not initialized yet. Skipping admin creation. Run migrations with: flask db upgrade")
|
||||
else:
|
||||
print("[INFO] ADMIN_EMAIL, ADMIN_NICKNAME, or ADMIN_PASSWORD not set in .env. Skipping admin creation.")
|
||||
|
||||
return app
|
||||
|
||||
@@ -15,6 +15,32 @@ import gpxpy
|
||||
|
||||
community = Blueprint('community', __name__)
|
||||
|
||||
@community.route('/debug')
|
||||
def debug():
|
||||
"""Debug endpoint to check posts"""
|
||||
posts = Post.query.filter_by(published=True).order_by(Post.created_at.desc()).paginate(
|
||||
page=1, per_page=12, error_out=False
|
||||
)
|
||||
|
||||
debug_info = {
|
||||
'total_posts': posts.total,
|
||||
'items_count': len(posts.items),
|
||||
'pages': posts.pages,
|
||||
'posts': []
|
||||
}
|
||||
|
||||
for post in posts.items:
|
||||
debug_info['posts'].append({
|
||||
'id': post.id,
|
||||
'title': post.title,
|
||||
'published': post.published,
|
||||
'author': post.author.nickname if post.author else None,
|
||||
'images_count': post.images.count(),
|
||||
'media_folder': post.media_folder
|
||||
})
|
||||
|
||||
return jsonify(debug_info)
|
||||
|
||||
@community.route('/')
|
||||
def index():
|
||||
"""Community main page with map and posts"""
|
||||
@@ -26,6 +52,11 @@ def index():
|
||||
# Get posts with GPX files for map display
|
||||
posts_with_routes = Post.query.filter_by(published=True).join(GPXFile).all()
|
||||
|
||||
# Debug logging
|
||||
current_app.logger.info(f"Community index: Found {posts.total} total posts, {len(posts.items)} on page {page}")
|
||||
for post in posts.items:
|
||||
current_app.logger.info(f" - Post {post.id}: {post.title}, images: {post.images.count()}")
|
||||
|
||||
return render_template('community/index.html', posts=posts, posts_with_routes=posts_with_routes)
|
||||
|
||||
@community.route('/test-map')
|
||||
|
||||
@@ -5,13 +5,10 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "8100:5000"
|
||||
environment:
|
||||
- FLASK_CONFIG=production
|
||||
- DATABASE_URL=sqlite:////data/moto_adventure.db
|
||||
- SECRET_KEY=ana_are_mere_si-si-pere_cat-cuprinde_in_cos
|
||||
working_dir: /opt/moto_site
|
||||
volumes:
|
||||
- ./data:/data # Database persistence
|
||||
- ./data:/opt/moto_site/data # Database persistence
|
||||
- ./uploads:/opt/moto_site/uploads # File uploads persistence
|
||||
- ./app/static/media:/opt/moto_site/app/static/media # Media files persistence
|
||||
- ./.env:/opt/moto_site/.env # Load .env file
|
||||
restart: unless-stopped
|
||||
|
||||
129
run.py
129
run.py
@@ -20,84 +20,101 @@ def make_shell_context():
|
||||
|
||||
@app.cli.command()
|
||||
def init_db():
|
||||
"""Initialize the database."""
|
||||
"""Initialize the database with complete schema."""
|
||||
print('Creating all database tables...')
|
||||
db.create_all()
|
||||
print('Database initialized.')
|
||||
print('✓ Database schema created successfully')
|
||||
|
||||
@app.cli.command()
|
||||
def migrate_db():
|
||||
"""Apply database migrations."""
|
||||
from sqlalchemy import text
|
||||
"""Apply all database schema migrations to ensure compatibility."""
|
||||
from sqlalchemy import text, inspect
|
||||
|
||||
try:
|
||||
# Check if the media_folder column exists
|
||||
result = db.session.execute(text('PRAGMA table_info(posts)'))
|
||||
columns = [row[1] for row in result.fetchall()]
|
||||
# Get database inspector
|
||||
inspector = inspect(db.engine)
|
||||
|
||||
print('Starting database schema migration...\n')
|
||||
|
||||
# 1. Check and migrate posts table
|
||||
if 'posts' in inspector.get_table_names():
|
||||
columns = [col['name'] for col in inspector.get_columns('posts')]
|
||||
|
||||
if 'media_folder' not in columns:
|
||||
# Add the media_folder column
|
||||
db.session.execute(text('ALTER TABLE posts ADD COLUMN media_folder VARCHAR(100)'))
|
||||
db.session.commit()
|
||||
print('Successfully added media_folder column to posts table')
|
||||
else:
|
||||
print('media_folder column already exists')
|
||||
print('✓ Added media_folder column to posts table')
|
||||
|
||||
# Check if is_cover column exists in post_images table
|
||||
result = db.session.execute(text('PRAGMA table_info(post_images)'))
|
||||
columns = [row[1] for row in result.fetchall()]
|
||||
# Check for other expected columns
|
||||
expected = ['title', 'content', 'author_id', 'published', 'created_at']
|
||||
for col in expected:
|
||||
if col not in columns:
|
||||
print(f'✗ WARNING: Expected column {col} not found in posts table')
|
||||
|
||||
# 2. Check and migrate post_images table
|
||||
if 'post_images' in inspector.get_table_names():
|
||||
columns = [col['name'] for col in inspector.get_columns('post_images')]
|
||||
|
||||
if 'is_cover' not in columns:
|
||||
# Add the is_cover column
|
||||
db.session.execute(text('ALTER TABLE post_images ADD COLUMN is_cover BOOLEAN DEFAULT 0'))
|
||||
db.session.commit()
|
||||
print('Successfully added is_cover column to post_images table')
|
||||
else:
|
||||
print('is_cover column already exists')
|
||||
print('✓ Added is_cover column to post_images table')
|
||||
|
||||
# Check if page_views table exists
|
||||
result = db.session.execute(text("SELECT name FROM sqlite_master WHERE type='table' AND name='page_views'"))
|
||||
if not result.fetchone():
|
||||
# Create page_views table
|
||||
db.session.execute(text('''
|
||||
CREATE TABLE page_views (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
path VARCHAR(255) NOT NULL,
|
||||
user_agent VARCHAR(500),
|
||||
ip_address VARCHAR(45),
|
||||
referer VARCHAR(500),
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
user_id INTEGER,
|
||||
post_id INTEGER,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||
FOREIGN KEY (post_id) REFERENCES posts(id)
|
||||
)
|
||||
'''))
|
||||
# 3. Check and migrate chat_rooms table
|
||||
if 'chat_rooms' in inspector.get_table_names():
|
||||
columns = [col['name'] for col in inspector.get_columns('chat_rooms')]
|
||||
|
||||
if 'category' not in columns:
|
||||
db.session.execute(text('ALTER TABLE chat_rooms ADD COLUMN category VARCHAR(50) DEFAULT "general"'))
|
||||
db.session.commit()
|
||||
print('Successfully created page_views table')
|
||||
else:
|
||||
print('page_views table already exists')
|
||||
print('✓ Added category column to chat_rooms table')
|
||||
|
||||
# Check if GPX statistics columns exist
|
||||
result = db.session.execute(text('PRAGMA table_info(gpx_files)'))
|
||||
columns = [row[1] for row in result.fetchall()]
|
||||
if 'last_activity' not in columns:
|
||||
db.session.execute(text('ALTER TABLE chat_rooms ADD COLUMN last_activity DATETIME DEFAULT CURRENT_TIMESTAMP'))
|
||||
db.session.commit()
|
||||
print('✓ Added last_activity column to chat_rooms table')
|
||||
|
||||
new_columns = [
|
||||
('total_distance', 'REAL DEFAULT 0.0'),
|
||||
('elevation_gain', 'REAL DEFAULT 0.0'),
|
||||
('max_elevation', 'REAL DEFAULT 0.0'),
|
||||
('min_elevation', 'REAL DEFAULT 0.0'),
|
||||
('total_points', 'INTEGER DEFAULT 0')
|
||||
# 4. Check and add GPX statistics columns
|
||||
if 'gpx_files' in inspector.get_table_names():
|
||||
columns = [col['name'] for col in inspector.get_columns('gpx_files')]
|
||||
|
||||
gpx_columns = {
|
||||
'total_distance': 'REAL DEFAULT 0.0',
|
||||
'elevation_gain': 'REAL DEFAULT 0.0',
|
||||
'max_elevation': 'REAL DEFAULT 0.0',
|
||||
'min_elevation': 'REAL DEFAULT 0.0',
|
||||
'total_points': 'INTEGER DEFAULT 0'
|
||||
}
|
||||
|
||||
for col_name, col_type in gpx_columns.items():
|
||||
if col_name not in columns:
|
||||
db.session.execute(text(f'ALTER TABLE gpx_files ADD COLUMN {col_name} {col_type}'))
|
||||
db.session.commit()
|
||||
print(f'✓ Added {col_name} column to gpx_files table')
|
||||
|
||||
# 5. Verify all required tables exist
|
||||
required_tables = [
|
||||
'users', 'posts', 'post_images', 'gpx_files',
|
||||
'comments', 'likes', 'page_views', 'chat_rooms',
|
||||
'chat_messages', 'map_routes', 'password_reset_requests'
|
||||
]
|
||||
|
||||
for column_name, column_type in new_columns:
|
||||
if column_name not in columns:
|
||||
db.session.execute(text(f'ALTER TABLE gpx_files ADD COLUMN {column_name} {column_type}'))
|
||||
db.session.commit()
|
||||
print(f'Successfully added {column_name} column to gpx_files table')
|
||||
else:
|
||||
print(f'{column_name} column already exists in gpx_files table')
|
||||
existing_tables = inspector.get_table_names()
|
||||
missing_tables = [t for t in required_tables if t not in existing_tables]
|
||||
|
||||
if missing_tables:
|
||||
print(f'\n✗ WARNING: Missing tables: {missing_tables}')
|
||||
print(' Running init-db to create missing tables...')
|
||||
db.create_all()
|
||||
print('✓ All missing tables created')
|
||||
|
||||
print('\n✓ Database migration completed successfully')
|
||||
print(f'✓ Total tables: {len(existing_tables)}')
|
||||
|
||||
print('Database schema is up to date')
|
||||
except Exception as e:
|
||||
print(f'✗ Migration error: {e}')
|
||||
db.session.rollback()
|
||||
raise
|
||||
print(f'Migration error: {e}')
|
||||
db.session.rollback()
|
||||
|
||||
|
||||
Reference in New Issue
Block a user