Compare commits
1 Commits
1eb0aa3658
...
2e719fc029
| Author | SHA1 | Date | |
|---|---|---|---|
| 2e719fc029 |
@@ -1,45 +0,0 @@
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
|
||||
# Documentation
|
||||
README.md
|
||||
*.md
|
||||
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# IDEs
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Data folders (these will be mounted as volumes)
|
||||
data/
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
|
||||
# Environment files
|
||||
.env
|
||||
.env.local
|
||||
.env.example
|
||||
|
||||
# Temporary files
|
||||
tmp/
|
||||
temp/
|
||||
44
.env.example
44
.env.example
@@ -1,41 +1,21 @@
|
||||
# DigiServer Environment Configuration
|
||||
# Copy this file to .env and modify the values as needed
|
||||
# .env - Flask environment variables
|
||||
|
||||
# Flask Configuration
|
||||
FLASK_APP=app.py
|
||||
FLASK_RUN_HOST=0.0.0.0
|
||||
FLASK_ENV=production
|
||||
# Flask secret key (change this to something secure in production)
|
||||
SECRET_KEY=Ana_Are_Multe_Mere-Si_Nu_Are_Pere
|
||||
|
||||
# Security
|
||||
SECRET_KEY=Ma_Duc_Dupa_Merele_Lui_Ana
|
||||
# Change this to a secure random string in production!
|
||||
# Flask environment: development or production
|
||||
FLASK_ENV=development
|
||||
|
||||
# Default Admin User
|
||||
ADMIN_USER=admin
|
||||
ADMIN_PASSWORD=Initial01!
|
||||
# Change the default password after first login!
|
||||
|
||||
# Database Configuration
|
||||
# SQLite database file will be created in data/instance/dashboard.db
|
||||
# Database location (optional, defaults to instance/dashboard.db)
|
||||
# SQLALCHEMY_DATABASE_URI=sqlite:///instance/dashboard.db
|
||||
|
||||
# Application Settings
|
||||
MAX_CONTENT_LENGTH=2147483648 # 2GB in bytes
|
||||
UPLOAD_FOLDER=static/uploads
|
||||
UPLOAD_FOLDERLOGO=static/resurse
|
||||
# Default admin user credentials (used for auto-creation)
|
||||
DEFAULT_USER=admin
|
||||
DEFAULT_PASSWORD=1234
|
||||
|
||||
# Server Information
|
||||
SERVER_VERSION=1.1.0
|
||||
BUILD_DATE=2025-06-29
|
||||
|
||||
# Docker Configuration (for docker-compose.yml)
|
||||
DIGISERVER_PORT=8880
|
||||
CONTAINER_NAME=digiserver
|
||||
|
||||
# Flask server settings (for development)
|
||||
# Flask server settings
|
||||
HOST=0.0.0.0
|
||||
PORT=5000
|
||||
|
||||
# Optional: External Database (for advanced users)
|
||||
# DATABASE_URL=postgresql://user:password@localhost/digiserver
|
||||
# DATABASE_URL=mysql://user:password@localhost/digiserver
|
||||
# Maximum upload size (in bytes, 2GB)
|
||||
MAX_CONTENT_LENGTH=2147483648
|
||||
37
.gitignore
vendored
37
.gitignore
vendored
@@ -1,41 +1,4 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Environment files
|
||||
.env
|
||||
.env.local
|
||||
venv/
|
||||
# Data directories (persistent storage)
|
||||
data/
|
||||
instance/
|
||||
instance.bak/
|
||||
|
||||
# Legacy directories (can be removed after migration)
|
||||
digiscreen/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
|
||||
# Backups
|
||||
backups/
|
||||
|
||||
# Temporary files
|
||||
tmp/
|
||||
temp/
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
# DigiServer v1.1.0 - Production Deployment Guide
|
||||
|
||||
## 🎯 Ready for Deployment
|
||||
|
||||
Your DigiServer application has been cleaned and prepared for Docker deployment.
|
||||
|
||||
### ✅ What's Been Prepared
|
||||
|
||||
1. **Application Cleaned**
|
||||
- Python cache files removed (`__pycache__/`, `*.pyc`)
|
||||
- Development artifacts cleaned
|
||||
- Production-ready structure
|
||||
|
||||
2. **Docker Configuration**
|
||||
- Dockerfile optimized with LibreOffice and poppler-utils
|
||||
- docker-compose.yml configured for production
|
||||
- .dockerignore updated to exclude development files
|
||||
- Data persistence configured via volumes
|
||||
|
||||
3. **Deployment Scripts**
|
||||
- `deploy-docker.sh` - Automated deployment script
|
||||
- `cleanup-docker.sh` - Complete cleanup script
|
||||
- Both scripts use modern `docker compose` syntax
|
||||
|
||||
4. **Data Structure**
|
||||
- `./data/instance/` - Database files
|
||||
- `./data/uploads/` - Media uploads
|
||||
- `./data/resurse/` - System resources
|
||||
- All directories auto-created and volume-mounted
|
||||
|
||||
### 🚀 Quick Deployment
|
||||
|
||||
```bash
|
||||
# Deploy DigiServer
|
||||
./deploy-docker.sh
|
||||
|
||||
# Access at: http://localhost:8880
|
||||
# Username: admin
|
||||
# Password: Initial01!
|
||||
```
|
||||
|
||||
### 📋 Features Ready
|
||||
|
||||
- ✅ **Document Processing**: LibreOffice + poppler-utils integrated
|
||||
- ✅ **File Uploads**: PPTX → PDF → 4K JPG workflow
|
||||
- ✅ **Path Resolution**: Absolute path handling for containerized deployment
|
||||
- ✅ **File Management**: Bulk delete functions with physical file cleanup
|
||||
- ✅ **User Management**: Admin user auto-creation
|
||||
- ✅ **Data Persistence**: Volume-mounted data directories
|
||||
- ✅ **Health Checks**: Container health monitoring
|
||||
- ✅ **Production Logging**: Structured output and error handling
|
||||
|
||||
### 🔧 System Requirements
|
||||
|
||||
- Docker Engine 20.10+
|
||||
- Docker Compose v2 (plugin)
|
||||
- 2GB RAM minimum
|
||||
- 10GB disk space
|
||||
|
||||
### 📁 Deployment Structure
|
||||
|
||||
```
|
||||
digiserver/
|
||||
├── 📁 app/ # Application code
|
||||
├── 📁 data/ # Persistent data (auto-created)
|
||||
│ ├── 📁 instance/ # Database
|
||||
│ ├── 📁 uploads/ # Media files
|
||||
│ └── 📁 resurse/ # Resources
|
||||
├── 🐳 Dockerfile # Production image
|
||||
├── 🔧 docker-compose.yml # Container orchestration
|
||||
├── 🚀 deploy-docker.sh # Deployment script
|
||||
├── 🧹 cleanup-docker.sh # Cleanup script
|
||||
└── 📖 README.md # Documentation
|
||||
```
|
||||
|
||||
### 🔐 Security Notes
|
||||
|
||||
- Change default password after first login
|
||||
- SECRET_KEY configured for session security
|
||||
- File upload restrictions in place
|
||||
- Container runs with proper permissions
|
||||
|
||||
### 📊 Monitoring
|
||||
|
||||
- Health checks configured (30s intervals)
|
||||
- Container auto-restart on failure
|
||||
- Logs available via `docker compose logs -f`
|
||||
- Status monitoring with `docker compose ps`
|
||||
|
||||
---
|
||||
|
||||
**Next Step**: Run `./deploy-docker.sh` to deploy your DigiServer! 🚀
|
||||
56
Dockerfile
56
Dockerfile
@@ -1,59 +1,31 @@
|
||||
# Use Python 3.11 slim image
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies including LibreOffice and poppler-utils
|
||||
# Install system dependencies, including Rust and build tools
|
||||
RUN apt-get update && apt-get install -y \
|
||||
poppler-utils \
|
||||
libreoffice \
|
||||
ffmpeg \
|
||||
libpoppler-cpp-dev \
|
||||
libmagic1 \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
g++ \
|
||||
curl \
|
||||
libjpeg-dev \
|
||||
zlib1g-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
build-essential \
|
||||
cargo \
|
||||
fonts-dejavu-core \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& apt-get clean
|
||||
libreoffice poppler-utils ffmpeg \
|
||||
libpoppler-cpp-dev libmagic1 \
|
||||
libffi-dev libssl-dev g++ curl libjpeg-dev zlib1g-dev \
|
||||
libxml2-dev libxslt-dev build-essential cargo \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Debug: Verify Rust installation
|
||||
RUN rustc --version && cargo --version
|
||||
|
||||
# Verify LibreOffice and poppler-utils installation
|
||||
RUN libreoffice --version && pdftoppm -v
|
||||
# Copy application files
|
||||
COPY . /app
|
||||
|
||||
# Copy requirements first for better layer caching
|
||||
COPY app/requirements.txt .
|
||||
# Copy entrypoint script and make it executable
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
# Upgrade pip and install Python dependencies
|
||||
# Upgrade pip and install Python dependencies (using piwheels for ARM)
|
||||
RUN python -m pip install --upgrade pip && \
|
||||
pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY app/ .
|
||||
|
||||
# Make entrypoint script executable
|
||||
RUN chmod +x entrypoint.sh
|
||||
|
||||
# Create necessary directories for volumes
|
||||
RUN mkdir -p /app/static/uploads /app/static/resurse /app/instance
|
||||
pip install --no-cache-dir --extra-index-url https://www.piwheels.org/simple -r requirements.txt
|
||||
|
||||
# Expose the application port
|
||||
EXPOSE 5000
|
||||
|
||||
# Set environment variables
|
||||
ENV FLASK_APP=app.py
|
||||
ENV FLASK_RUN_HOST=0.0.0.0
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
# Use entrypoint script
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
258
README.md
258
README.md
@@ -1,258 +0,0 @@
|
||||
# DigiServer - Digital Signage Management Platform
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
DigiServer is a comprehensive digital signage management platform built with Flask. It allows you to manage digital displays, create playlists, organize content into groups, and control multiple players from a centralized dashboard.
|
||||
|
||||
## 🚀 Features
|
||||
|
||||
- **Multi-Player Management**: Control multiple digital signage players from a single dashboard
|
||||
- **Group Management**: Organize players into groups for synchronized content
|
||||
- **Content Management**: Upload and manage various media types (images, videos, PDFs, PowerPoint presentations)
|
||||
- **Real-time Updates**: Players automatically sync with the latest content
|
||||
- **User Management**: Admin and user role-based access control
|
||||
- **Orientation Support**: Configure display orientation (Landscape/Portrait) per player and group
|
||||
- **API Integration**: RESTful API for player authentication and playlist retrieval
|
||||
- **Docker Support**: Easy deployment with Docker containers
|
||||
|
||||
## 📋 Requirements
|
||||
|
||||
- Docker and Docker Compose
|
||||
- Python 3.11+ (if running without Docker)
|
||||
- FFmpeg (for video processing)
|
||||
- LibreOffice (for document conversion)
|
||||
|
||||
## 📁 Project Structure
|
||||
|
||||
```
|
||||
digiserver/
|
||||
├── app/ # Application code
|
||||
│ ├── models/ # Database models
|
||||
│ ├── templates/ # HTML templates
|
||||
│ ├── utils/ # Utility functions
|
||||
│ ├── app.py # Main Flask application
|
||||
│ ├── extensions.py # Flask extensions
|
||||
│ ├── requirements.txt # Python dependencies
|
||||
│ └── entrypoint.sh # Container entry point
|
||||
├── data/ # Persistent data (created on first run)
|
||||
│ ├── instance/ # Database files
|
||||
│ ├── uploads/ # Media uploads
|
||||
│ └── resurse/ # System resources (logos, etc.)
|
||||
├── docker-compose.yml # Docker Compose configuration
|
||||
├── Dockerfile # Docker image definition
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## 🐳 Quick Start with Docker
|
||||
|
||||
### Automated Deployment (Recommended)
|
||||
|
||||
1. **Clone the repository**
|
||||
```bash
|
||||
git clone <repository-url>
|
||||
cd digiserver
|
||||
```
|
||||
|
||||
2. **Deploy with automated script**
|
||||
```bash
|
||||
./deploy-docker.sh
|
||||
```
|
||||
|
||||
This script will:
|
||||
- Check Docker requirements
|
||||
- Build the DigiServer image
|
||||
- Create necessary data directories
|
||||
- Start the containers
|
||||
- Display access information
|
||||
|
||||
3. **Access the application**
|
||||
- Open your browser and navigate to `http://localhost:8880`
|
||||
- Default admin credentials:
|
||||
- Username: `admin`
|
||||
- Password: `Initial01!`
|
||||
|
||||
### Manual Docker Commands
|
||||
|
||||
Alternatively, you can use Docker commands directly:
|
||||
|
||||
```bash
|
||||
# Build and start
|
||||
docker compose up -d
|
||||
|
||||
# Stop
|
||||
docker compose down
|
||||
|
||||
# View logs
|
||||
docker compose logs -f
|
||||
|
||||
# Check status
|
||||
docker compose ps
|
||||
```
|
||||
|
||||
### Clean Up
|
||||
|
||||
To completely remove DigiServer containers and images:
|
||||
```bash
|
||||
./cleanup-docker.sh
|
||||
```
|
||||
|
||||
## 🔧 Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
You can customize the application by modifying the environment variables in `docker-compose.yml`:
|
||||
|
||||
- `ADMIN_USER`: Default admin username (default: admin)
|
||||
- `ADMIN_PASSWORD`: Default admin password (default: Initial01!)
|
||||
- `SECRET_KEY`: Flask secret key for session security
|
||||
- `FLASK_APP`: Flask application entry point
|
||||
- `FLASK_RUN_HOST`: Host to bind the Flask application
|
||||
|
||||
### Data Persistence
|
||||
|
||||
All persistent data is stored in the `data/` folder:
|
||||
- `data/instance/`: SQLite database files
|
||||
- `data/uploads/`: Uploaded media files
|
||||
- `data/resurse/`: System resources (logo, login images)
|
||||
|
||||
This folder will be created automatically on first run and persists between container restarts.
|
||||
|
||||
## 💻 Manual Installation (Development)
|
||||
|
||||
If you prefer to run without Docker:
|
||||
|
||||
1. **Install system dependencies**
|
||||
```bash
|
||||
# Ubuntu/Debian
|
||||
sudo apt-get update
|
||||
sudo apt-get install python3.11 python3-pip libreoffice ffmpeg
|
||||
|
||||
# CentOS/RHEL
|
||||
sudo yum install python3.11 python3-pip libreoffice ffmpeg
|
||||
```
|
||||
|
||||
2. **Install Python dependencies**
|
||||
```bash
|
||||
cd app/
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
3. **Run the application**
|
||||
```bash
|
||||
python app.py
|
||||
```
|
||||
|
||||
## 🎮 Usage
|
||||
|
||||
### Managing Players
|
||||
|
||||
1. **Add a Player**: Navigate to the dashboard and click "Add Player"
|
||||
2. **Configure Player**: Set username, hostname, passwords, and orientation
|
||||
3. **Upload Content**: Upload media files to the player's playlist
|
||||
4. **Player Authentication**: Players can authenticate using hostname and password/quickconnect code
|
||||
|
||||
### Managing Groups
|
||||
|
||||
1. **Create Group**: Group multiple players for synchronized content
|
||||
2. **Assign Players**: Add/remove players from groups
|
||||
3. **Upload Group Content**: Upload content that will be shared across all players in the group
|
||||
4. **Group Display**: View group content in fullscreen mode
|
||||
|
||||
### Content Types Supported
|
||||
|
||||
- **Images**: JPG, PNG, GIF
|
||||
- **Videos**: MP4, AVI, MOV (automatically converted to MP4)
|
||||
- **Documents**: PDF (converted to images)
|
||||
- **Presentations**: PPTX (converted to images)
|
||||
|
||||
## 🔌 API Endpoints
|
||||
|
||||
### Player API
|
||||
|
||||
- `GET /api/playlists?hostname={hostname}&quickconnect_code={code}`: Get player playlist
|
||||
- `GET /api/playlist_version?hostname={hostname}&quickconnect_code={code}`: Get playlist version
|
||||
- `GET /media/{filename}`: Serve media files
|
||||
|
||||
### Authentication
|
||||
|
||||
Players authenticate using:
|
||||
- **Hostname**: Unique identifier for the player
|
||||
- **Password**: Primary authentication method
|
||||
- **Quickconnect Code**: Alternative authentication method
|
||||
|
||||
## 🛠️ Development
|
||||
|
||||
### Building the Docker Image
|
||||
|
||||
```bash
|
||||
docker build -t digiserver:latest .
|
||||
```
|
||||
|
||||
### Running Tests
|
||||
|
||||
```bash
|
||||
# Install test dependencies
|
||||
pip install pytest pytest-flask
|
||||
|
||||
# Run tests
|
||||
pytest
|
||||
```
|
||||
|
||||
### Database Management
|
||||
|
||||
The application uses SQLite with Flask-Migrate for database management:
|
||||
|
||||
```bash
|
||||
# Initialize database
|
||||
flask db init
|
||||
|
||||
# Create migration
|
||||
flask db migrate -m "Description of changes"
|
||||
|
||||
# Apply migration
|
||||
flask db upgrade
|
||||
```
|
||||
|
||||
## 🔒 Security
|
||||
|
||||
- **User Authentication**: Role-based access control (admin/user)
|
||||
- **Player Authentication**: Secure hostname and password-based authentication
|
||||
- **File Upload Security**: Secure filename handling and file type validation
|
||||
- **Session Management**: Secure session handling with configurable secret key
|
||||
|
||||
## 📊 Monitoring
|
||||
|
||||
- **Server Logs**: View recent server activities from the dashboard
|
||||
- **Health Check**: Docker health check endpoint for monitoring
|
||||
- **Content Management**: Track content usage and cleanup unused files
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
|
||||
3. Commit your changes (`git commit -m 'Add some amazing feature'`)
|
||||
4. Push to the branch (`git push origin feature/amazing-feature`)
|
||||
5. Open a Pull Request
|
||||
|
||||
## 📝 License
|
||||
|
||||
This project is licensed under the MIT License - see the LICENSE file for details.
|
||||
|
||||
## 🆘 Support
|
||||
|
||||
For support and questions:
|
||||
- Create an issue in the repository
|
||||
- Check the documentation in the `docs/` folder
|
||||
- Review the application logs for troubleshooting
|
||||
|
||||
## 🔄 Version History
|
||||
|
||||
- **1.1.0** (2025-06-29): Added orientation support, improved group management
|
||||
- **1.0.0**: Initial release with basic digital signage functionality
|
||||
|
||||
---
|
||||
|
||||
**Note**: Make sure to change the default admin password after first login for security purposes.
|
||||
BIN
__pycache__/extensions.cpython-311.pyc
Normal file
BIN
__pycache__/extensions.cpython-311.pyc
Normal file
Binary file not shown.
@@ -1,21 +1,25 @@
|
||||
import os
|
||||
import click
|
||||
import time
|
||||
import psutil
|
||||
import shutil
|
||||
import zipfile
|
||||
import tempfile
|
||||
from flask import Flask, render_template, request, redirect, url_for, session, flash, jsonify, send_from_directory, send_file
|
||||
import threading
|
||||
from flask import Flask, render_template, request, redirect, url_for, session, flash, jsonify, send_from_directory
|
||||
from flask_migrate import Migrate
|
||||
import subprocess
|
||||
from werkzeug.utils import secure_filename
|
||||
from functools import wraps
|
||||
from functools import wraps, lru_cache
|
||||
from extensions import db, bcrypt, login_manager
|
||||
from sqlalchemy import text
|
||||
from dotenv import load_dotenv
|
||||
import logging
|
||||
import gc
|
||||
|
||||
# Load environment variables from .env file
|
||||
load_dotenv()
|
||||
|
||||
# Configure logging for better performance monitoring
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
|
||||
# First import models
|
||||
from models import User, Player, Group, Content, ServerLog, group_player
|
||||
|
||||
@@ -42,7 +46,7 @@ from utils.uploads import (
|
||||
add_image_to_playlist,
|
||||
convert_video_and_update_playlist,
|
||||
process_pdf,
|
||||
process_pptx,
|
||||
process_pptx_improved,
|
||||
process_uploaded_files
|
||||
)
|
||||
|
||||
@@ -50,15 +54,7 @@ from utils.uploads import (
|
||||
SERVER_VERSION = "1.1.0"
|
||||
BUILD_DATE = "2025-06-29"
|
||||
|
||||
# Get the absolute path of the app directory
|
||||
app_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
template_dir = os.path.join(app_dir, 'templates')
|
||||
static_dir = os.path.join(app_dir, 'static')
|
||||
|
||||
app = Flask(__name__,
|
||||
instance_relative_config=True,
|
||||
template_folder=template_dir,
|
||||
static_folder=static_dir)
|
||||
app = Flask(__name__, instance_relative_config=True)
|
||||
|
||||
# Set the secret key from environment variable or use a default value
|
||||
app.config['SECRET_KEY'] = os.getenv('SECRET_KEY', 'Ana_Are_Multe_Mere-Si_Nu_Are_Pere')
|
||||
@@ -69,8 +65,18 @@ db_path = os.path.join(instance_dir, 'dashboard.db')
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = f'sqlite:///{db_path}'
|
||||
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
|
||||
|
||||
# Set maximum content length to 1GB
|
||||
app.config['MAX_CONTENT_LENGTH'] = 2048 * 2048 * 2048 # 2GB, adjust as needed
|
||||
# Performance configuration
|
||||
app.config['SQLALCHEMY_ENGINE_OPTIONS'] = {
|
||||
'pool_pre_ping': True,
|
||||
'pool_recycle': 300,
|
||||
'connect_args': {'timeout': 10}
|
||||
}
|
||||
|
||||
# Set maximum content length to 1GB (reduced from 2GB)
|
||||
app.config['MAX_CONTENT_LENGTH'] = 1024 * 1024 * 1024 # 1GB
|
||||
|
||||
# Add timeout configuration
|
||||
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 31536000 # Cache static files for 1 year
|
||||
|
||||
# Ensure the instance folder exists
|
||||
os.makedirs(app.instance_path, exist_ok=True)
|
||||
@@ -107,52 +113,6 @@ def admin_required(f):
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
||||
|
||||
def get_system_info():
|
||||
"""Get system monitoring information"""
|
||||
try:
|
||||
# CPU information
|
||||
cpu_percent = psutil.cpu_percent(interval=1)
|
||||
cpu_count = psutil.cpu_count()
|
||||
|
||||
# Memory information
|
||||
memory = psutil.virtual_memory()
|
||||
memory_percent = memory.percent
|
||||
memory_used = round(memory.used / (1024**3), 2) # GB
|
||||
memory_total = round(memory.total / (1024**3), 2) # GB
|
||||
|
||||
# Disk information
|
||||
disk = psutil.disk_usage('/')
|
||||
disk_percent = round((disk.used / disk.total) * 100, 1)
|
||||
disk_used = round(disk.used / (1024**3), 2) # GB
|
||||
disk_total = round(disk.total / (1024**3), 2) # GB
|
||||
disk_free = round(disk.free / (1024**3), 2) # GB
|
||||
|
||||
# Upload folder size
|
||||
upload_folder_size = 0
|
||||
if os.path.exists(UPLOAD_FOLDER):
|
||||
for dirpath, dirnames, filenames in os.walk(UPLOAD_FOLDER):
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
if os.path.exists(filepath):
|
||||
upload_folder_size += os.path.getsize(filepath)
|
||||
upload_folder_size_gb = round(upload_folder_size / (1024**3), 2)
|
||||
|
||||
return {
|
||||
'cpu_percent': cpu_percent,
|
||||
'cpu_count': cpu_count,
|
||||
'memory_percent': memory_percent,
|
||||
'memory_used': memory_used,
|
||||
'memory_total': memory_total,
|
||||
'disk_percent': disk_percent,
|
||||
'disk_used': disk_used,
|
||||
'disk_total': disk_total,
|
||||
'disk_free': disk_free,
|
||||
'upload_folder_size': upload_folder_size_gb
|
||||
}
|
||||
except Exception as e:
|
||||
print(f"Error getting system info: {e}")
|
||||
return None
|
||||
|
||||
@app.route('/')
|
||||
@login_required
|
||||
def dashboard():
|
||||
@@ -225,12 +185,9 @@ def upload_content():
|
||||
|
||||
players = [{'id': player.id, 'username': player.username} for player in Player.query.all()]
|
||||
groups = [{'id': group.id, 'name': group.name} for group in Group.query.all()]
|
||||
|
||||
# Get system information for monitoring
|
||||
system_info = get_system_info()
|
||||
|
||||
return render_template('upload_content.html', target_type=target_type, target_id=target_id,
|
||||
players=players, groups=groups, return_url=return_url, system_info=system_info)
|
||||
players=players, groups=groups, return_url=return_url)
|
||||
|
||||
@app.route('/admin')
|
||||
@login_required
|
||||
@@ -239,18 +196,13 @@ def admin():
|
||||
logo_exists = os.path.exists(os.path.join(app.config['UPLOAD_FOLDERLOGO'], 'logo.png'))
|
||||
login_picture_exists = os.path.exists(os.path.join(app.config['UPLOAD_FOLDERLOGO'], 'login_picture.png'))
|
||||
users = User.query.all()
|
||||
|
||||
# Get system information for monitoring
|
||||
system_info = get_system_info()
|
||||
|
||||
return render_template(
|
||||
'admin.html',
|
||||
users=users,
|
||||
logo_exists=logo_exists,
|
||||
login_picture_exists=login_picture_exists,
|
||||
server_version=SERVER_VERSION,
|
||||
build_date=BUILD_DATE,
|
||||
system_info=system_info
|
||||
build_date=BUILD_DATE
|
||||
)
|
||||
|
||||
@app.route('/admin/change_role/<int:user_id>', methods=['POST'])
|
||||
@@ -332,61 +284,6 @@ def delete_content(content_id):
|
||||
db.session.commit()
|
||||
return redirect(url_for('player_page', player_id=player_id))
|
||||
|
||||
@app.route('/player/<int:player_id>/bulk_delete', methods=['POST'])
|
||||
@login_required
|
||||
def bulk_delete_player_content(player_id):
|
||||
"""Bulk delete selected media files from player"""
|
||||
player = Player.query.get_or_404(player_id)
|
||||
|
||||
# Check if player is in a group (should be managed at group level)
|
||||
if player.groups:
|
||||
flash('Cannot delete media from players that are in groups. Manage media at the group level.', 'warning')
|
||||
return redirect(url_for('player_page', player_id=player_id))
|
||||
|
||||
selected_content_ids = request.form.getlist('selected_content')
|
||||
|
||||
if not selected_content_ids:
|
||||
flash('No media files selected for deletion.', 'warning')
|
||||
return redirect(url_for('player_page', player_id=player_id))
|
||||
|
||||
try:
|
||||
deleted_files = []
|
||||
deleted_count = 0
|
||||
|
||||
for content_id in selected_content_ids:
|
||||
content = Content.query.filter_by(id=content_id, player_id=player_id).first()
|
||||
if content:
|
||||
# Delete file from filesystem using absolute path
|
||||
upload_folder = app.config['UPLOAD_FOLDER']
|
||||
if not os.path.isabs(upload_folder):
|
||||
upload_folder = os.path.abspath(upload_folder)
|
||||
file_path = os.path.join(upload_folder, content.file_name)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
deleted_files.append(content.file_name)
|
||||
print(f"Deleted file: {file_path}")
|
||||
except OSError as e:
|
||||
print(f"Error deleting file {file_path}: {e}")
|
||||
|
||||
# Delete from database
|
||||
db.session.delete(content)
|
||||
deleted_count += 1
|
||||
|
||||
# Update playlist version for the player
|
||||
player.playlist_version += 1
|
||||
db.session.commit()
|
||||
|
||||
flash(f'Successfully deleted {deleted_count} media file(s). Playlist updated to version {player.playlist_version}.', 'success')
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
print(f"Error in bulk delete: {e}")
|
||||
flash('An error occurred while deleting media files.', 'danger')
|
||||
|
||||
return redirect(url_for('player_page', player_id=player_id))
|
||||
|
||||
@app.route('/player/<int:player_id>/fullscreen', methods=['GET', 'POST'])
|
||||
def player_fullscreen(player_id):
|
||||
player = Player.query.get_or_404(player_id)
|
||||
@@ -435,6 +332,7 @@ def add_player():
|
||||
orientation = request.form.get('orientation', 'Landscape') # <-- Get orientation
|
||||
add_player_util(username, hostname, password, quickconnect_password, orientation) # <-- Pass orientation
|
||||
flash(f'Player "{username}" added successfully.', 'success')
|
||||
clear_player_cache() # Clear cache when player is added
|
||||
return redirect(url_for('dashboard'))
|
||||
return render_template('add_player.html')
|
||||
|
||||
@@ -451,6 +349,7 @@ def edit_player(player_id):
|
||||
orientation = request.form.get('orientation', player.orientation) # <-- Get orientation
|
||||
edit_player_util(player_id, username, hostname, password, quickconnect_password, orientation) # <-- Pass orientation
|
||||
flash(f'Player "{username}" updated successfully.', 'success')
|
||||
clear_player_cache() # Clear cache when player is updated
|
||||
return redirect(url_for('player_page', player_id=player.id))
|
||||
|
||||
return_url = request.args.get('return_url', url_for('player_page', player_id=player.id))
|
||||
@@ -465,6 +364,103 @@ def change_theme():
|
||||
db.session.commit()
|
||||
return redirect(url_for('admin'))
|
||||
|
||||
# Group management routes
|
||||
@app.route('/group/create', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def create_group():
|
||||
if request.method == 'POST':
|
||||
name = request.form['name']
|
||||
player_ids = request.form.getlist('players')
|
||||
orientation = request.form.get('orientation', 'Landscape')
|
||||
|
||||
try:
|
||||
# Convert player_ids to integers
|
||||
player_ids = [int(pid) for pid in player_ids]
|
||||
group = create_group_util(name, player_ids, orientation)
|
||||
flash(f'Group "{name}" created successfully.', 'success')
|
||||
return redirect(url_for('dashboard'))
|
||||
except ValueError as e:
|
||||
flash(str(e), 'danger')
|
||||
return redirect(url_for('create_group'))
|
||||
|
||||
# GET request - show create group form
|
||||
players = Player.query.filter_by(locked_to_group_id=None).all() # Only available players
|
||||
return render_template('create_group.html', players=players)
|
||||
|
||||
@app.route('/group/<int:group_id>/edit', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def edit_group(group_id):
|
||||
group = Group.query.get_or_404(group_id)
|
||||
|
||||
if request.method == 'POST':
|
||||
name = request.form['name']
|
||||
player_ids = request.form.getlist('players')
|
||||
orientation = request.form.get('orientation', group.orientation)
|
||||
|
||||
try:
|
||||
# Convert player_ids to integers
|
||||
player_ids = [int(pid) for pid in player_ids]
|
||||
edit_group_util(group_id, name, player_ids, orientation)
|
||||
flash(f'Group "{name}" updated successfully.', 'success')
|
||||
return redirect(url_for('dashboard'))
|
||||
except ValueError as e:
|
||||
flash(str(e), 'danger')
|
||||
return redirect(url_for('edit_group', group_id=group_id))
|
||||
|
||||
# GET request - show edit group form
|
||||
players = Player.query.all()
|
||||
return render_template('edit_group.html', group=group, players=players)
|
||||
|
||||
@app.route('/group/<int:group_id>/delete', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def delete_group(group_id):
|
||||
delete_group_util(group_id)
|
||||
flash('Group deleted successfully.', 'success')
|
||||
return redirect(url_for('dashboard'))
|
||||
|
||||
@app.route('/group/<int:group_id>')
|
||||
@login_required
|
||||
def manage_group(group_id):
|
||||
group = Group.query.get_or_404(group_id)
|
||||
content = get_group_content(group_id)
|
||||
return render_template('manage_group.html', group=group, content=content)
|
||||
|
||||
@app.route('/group/<int:group_id>/fullscreen', methods=['GET', 'POST'])
|
||||
def group_fullscreen(group_id):
|
||||
group = Group.query.get_or_404(group_id)
|
||||
content = get_group_content(group_id)
|
||||
return render_template('group_fullscreen.html', group=group, content=content)
|
||||
|
||||
@app.route('/group/<int:group_id>/media/<int:content_id>/edit', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def edit_group_media(group_id, content_id):
|
||||
new_duration = int(request.form['duration'])
|
||||
success = edit_group_media(group_id, content_id, new_duration)
|
||||
|
||||
if success:
|
||||
flash('Media duration updated successfully.', 'success')
|
||||
else:
|
||||
flash('Error updating media duration.', 'danger')
|
||||
|
||||
return redirect(url_for('manage_group', group_id=group_id))
|
||||
|
||||
@app.route('/group/<int:group_id>/media/<int:content_id>/delete', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def delete_group_media(group_id, content_id):
|
||||
success = delete_group_media(group_id, content_id)
|
||||
|
||||
if success:
|
||||
flash('Media deleted successfully.', 'success')
|
||||
else:
|
||||
flash('Error deleting media.', 'danger')
|
||||
|
||||
return redirect(url_for('manage_group', group_id=group_id))
|
||||
|
||||
@app.route('/upload_logo', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
@@ -525,259 +521,113 @@ def clean_unused_files():
|
||||
print("Used files:", used_files)
|
||||
print("Unused files:", unused_files)
|
||||
|
||||
# Delete unused files using absolute path
|
||||
upload_folder = app.config['UPLOAD_FOLDER']
|
||||
if not os.path.isabs(upload_folder):
|
||||
upload_folder = os.path.abspath(upload_folder)
|
||||
|
||||
# Delete unused files
|
||||
for file_name in unused_files:
|
||||
file_path = os.path.join(upload_folder, file_name)
|
||||
file_path = os.path.join(app.config['UPLOAD_FOLDER'], file_name)
|
||||
if os.path.isfile(file_path):
|
||||
print(f"Deleting unused file: {file_path}")
|
||||
print(f"Deleting file: {file_path}") # Debugging: Print the file being deleted
|
||||
os.remove(file_path)
|
||||
|
||||
flash('Unused files have been cleaned.', 'success')
|
||||
return redirect(url_for('admin'))
|
||||
|
||||
# Cache for frequently accessed data
|
||||
@lru_cache(maxsize=128)
|
||||
def get_player_by_hostname(hostname):
|
||||
"""Cached function to get player by hostname"""
|
||||
return Player.query.filter_by(hostname=hostname).first()
|
||||
|
||||
# Clear cache when players are modified
|
||||
def clear_player_cache():
|
||||
get_player_by_hostname.cache_clear()
|
||||
|
||||
# Optimized API endpoint with caching
|
||||
@app.route('/api/playlists', methods=['GET'])
|
||||
def get_playlists():
|
||||
hostname = request.args.get('hostname')
|
||||
quickconnect_code = request.args.get('quickconnect_code')
|
||||
|
||||
# Validate the parameters
|
||||
# Validate parameters early
|
||||
if not hostname or not quickconnect_code:
|
||||
return jsonify({'error': 'Hostname and quick connect code are required'}), 400
|
||||
|
||||
# Find the player by hostname and verify the quickconnect code
|
||||
player = Player.query.filter_by(hostname=hostname).first()
|
||||
if not player or not bcrypt.check_password_hash(player.quickconnect_password, quickconnect_code):
|
||||
return jsonify({'error': 'Invalid hostname or quick connect code'}), 404
|
||||
|
||||
# Check if player is locked to a group
|
||||
if player.locked_to_group_id:
|
||||
# Get content for all players in the group to ensure shared content
|
||||
group_players = player.locked_to_group.players
|
||||
player_ids = [p.id for p in group_players]
|
||||
|
||||
# Use the first occurrence of each file for the playlist
|
||||
content_query = (
|
||||
db.session.query(
|
||||
Content.file_name,
|
||||
db.func.min(Content.id).label('id'),
|
||||
db.func.min(Content.duration).label('duration')
|
||||
)
|
||||
.filter(Content.player_id.in_(player_ids))
|
||||
.group_by(Content.file_name)
|
||||
)
|
||||
|
||||
content = db.session.query(Content).filter(
|
||||
Content.id.in_([c.id for c in content_query])
|
||||
).all()
|
||||
else:
|
||||
# Get player's individual content
|
||||
content = Content.query.filter_by(player_id=player.id).all()
|
||||
|
||||
playlist = [
|
||||
{
|
||||
'file_name': media.file_name,
|
||||
'url': f"http://{request.host}/media/{media.file_name}",
|
||||
'duration': media.duration
|
||||
}
|
||||
for media in content
|
||||
]
|
||||
|
||||
# Return the playlist, version, and hashed quickconnect code
|
||||
return jsonify({
|
||||
'playlist': playlist,
|
||||
'playlist_version': player.playlist_version,
|
||||
'hashed_quickconnect': player.quickconnect_password
|
||||
})
|
||||
|
||||
@app.route('/media/<path:filename>')
|
||||
def media(filename):
|
||||
return send_from_directory(app.config['UPLOAD_FOLDER'], filename)
|
||||
|
||||
@app.context_processor
|
||||
def inject_theme():
|
||||
if current_user.is_authenticated:
|
||||
theme = current_user.theme
|
||||
else:
|
||||
theme = 'light'
|
||||
return dict(theme=theme)
|
||||
|
||||
@app.route('/group/create', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def create_group():
|
||||
if request.method == 'POST':
|
||||
group_name = request.form['name']
|
||||
player_ids = request.form.getlist('players')
|
||||
orientation = request.form.get('orientation', 'Landscape')
|
||||
create_group_util(group_name, player_ids, orientation)
|
||||
flash(f'Group "{group_name}" created successfully.', 'success')
|
||||
return redirect(url_for('dashboard'))
|
||||
players = Player.query.all()
|
||||
return render_template('create_group.html', players=players)
|
||||
|
||||
@app.route('/group/<int:group_id>/manage')
|
||||
@login_required
|
||||
@admin_required
|
||||
def manage_group(group_id):
|
||||
group = Group.query.get_or_404(group_id)
|
||||
content = get_group_content(group_id)
|
||||
# Debug content ordering
|
||||
print("Group content positions before sorting:", [(c.id, c.file_name, c.position) for c in content])
|
||||
content = sorted(content, key=lambda c: c.position)
|
||||
print("Group content positions after sorting:", [(c.id, c.file_name, c.position) for c in content])
|
||||
return render_template('manage_group.html', group=group, content=content)
|
||||
|
||||
@app.route('/group/<int:group_id>/edit', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def edit_group(group_id):
|
||||
group = Group.query.get_or_404(group_id)
|
||||
if request.method == 'POST':
|
||||
name = request.form['name']
|
||||
player_ids = request.form.getlist('players')
|
||||
orientation = request.form.get('orientation', group.orientation)
|
||||
edit_group_util(group_id, name, player_ids, orientation)
|
||||
flash(f'Group "{name}" updated successfully.', 'success')
|
||||
return redirect(url_for('dashboard'))
|
||||
players = Player.query.all()
|
||||
return render_template('edit_group.html', group=group, players=players)
|
||||
|
||||
@app.route('/group/<int:group_id>/delete', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def delete_group(group_id):
|
||||
group = Group.query.get_or_404(group_id)
|
||||
group_name = group.name
|
||||
delete_group_util(group_id)
|
||||
flash(f'Group "{group_name}" deleted successfully.', 'success')
|
||||
return redirect(url_for('dashboard'))
|
||||
|
||||
@app.route('/group/<int:group_id>/fullscreen', methods=['GET'])
|
||||
@login_required
|
||||
def group_fullscreen(group_id):
|
||||
group = Group.query.get_or_404(group_id)
|
||||
content = Content.query.filter(Content.player_id.in_([player.id for player in group.players])).order_by(Content.position).all()
|
||||
return render_template('group_fullscreen.html', group=group, content=content)
|
||||
|
||||
@app.route('/group/<int:group_id>/media/<int:content_id>/edit', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def edit_group_media_route(group_id, content_id):
|
||||
new_duration = int(request.form['duration'])
|
||||
success = edit_group_media(group_id, content_id, new_duration)
|
||||
|
||||
if success:
|
||||
flash('Media duration updated successfully.', 'success')
|
||||
else:
|
||||
flash('Error updating media duration.', 'danger')
|
||||
|
||||
return redirect(url_for('manage_group', group_id=group_id))
|
||||
|
||||
@app.route('/group/<int:group_id>/media/<int:content_id>/delete', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def delete_group_media_route(group_id, content_id):
|
||||
success = delete_group_media(group_id, content_id)
|
||||
|
||||
if success:
|
||||
flash('Media deleted successfully.', 'success')
|
||||
else:
|
||||
flash('Error deleting media.', 'danger')
|
||||
|
||||
return redirect(url_for('manage_group', group_id=group_id))
|
||||
|
||||
@app.route('/group/<int:group_id>/bulk_delete', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def bulk_delete_group_content(group_id):
|
||||
"""Bulk delete selected media files from group"""
|
||||
group = Group.query.get_or_404(group_id)
|
||||
selected_content_ids = request.form.getlist('selected_content')
|
||||
|
||||
if not selected_content_ids:
|
||||
flash('No media files selected for deletion.', 'warning')
|
||||
return redirect(url_for('manage_group', group_id=group_id))
|
||||
|
||||
try:
|
||||
deleted_files = []
|
||||
deleted_count = 0
|
||||
player_ids = [player.id for player in group.players]
|
||||
|
||||
for content_id in selected_content_ids:
|
||||
content = Content.query.filter(
|
||||
Content.id == content_id,
|
||||
Content.player_id.in_(player_ids)
|
||||
).first()
|
||||
# Use cached function for better performance
|
||||
player = get_player_by_hostname(hostname)
|
||||
if not player:
|
||||
return jsonify({'error': 'Player not found'}), 404
|
||||
|
||||
if content:
|
||||
# Delete file from filesystem using absolute path
|
||||
upload_folder = app.config['UPLOAD_FOLDER']
|
||||
if not os.path.isabs(upload_folder):
|
||||
upload_folder = os.path.abspath(upload_folder)
|
||||
file_path = os.path.join(upload_folder, content.file_name)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
deleted_files.append(content.file_name)
|
||||
print(f"Deleted file: {file_path}")
|
||||
except OSError as e:
|
||||
print(f"Error deleting file {file_path}: {e}")
|
||||
|
||||
# Delete from database
|
||||
db.session.delete(content)
|
||||
deleted_count += 1
|
||||
# Verify quickconnect code
|
||||
if not bcrypt.check_password_hash(player.quickconnect_password, quickconnect_code):
|
||||
return jsonify({'error': 'Invalid credentials'}), 401
|
||||
|
||||
# Optimized content query
|
||||
if player.locked_to_group_id:
|
||||
# More efficient group content query
|
||||
content = db.session.query(Content).join(Player).filter(
|
||||
Player.locked_to_group_id == player.locked_to_group_id
|
||||
).distinct(Content.file_name).order_by(Content.position).all()
|
||||
else:
|
||||
# Get player's individual content with limit
|
||||
content = Content.query.filter_by(player_id=player.id).order_by(Content.position).all()
|
||||
|
||||
# Update playlist version for all players in the group
|
||||
for player in group.players:
|
||||
player.playlist_version += 1
|
||||
|
||||
db.session.commit()
|
||||
|
||||
flash(f'Successfully deleted {deleted_count} media file(s) from group. All player playlists updated.', 'success')
|
||||
# Build playlist efficiently
|
||||
playlist = []
|
||||
for media in content:
|
||||
playlist.append({
|
||||
'file_name': media.file_name,
|
||||
'url': f"http://{request.host}/media/{media.file_name}",
|
||||
'duration': media.duration
|
||||
})
|
||||
|
||||
# Force garbage collection for memory management
|
||||
gc.collect()
|
||||
|
||||
return jsonify({
|
||||
'playlist': playlist,
|
||||
'playlist_version': player.playlist_version,
|
||||
'hashed_quickconnect': player.quickconnect_password
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
print(f"Error in group bulk delete: {e}")
|
||||
flash('An error occurred while deleting media files.', 'danger')
|
||||
|
||||
return redirect(url_for('manage_group', group_id=group_id))
|
||||
app.logger.error(f"API Error: {str(e)}")
|
||||
return jsonify({'error': 'Internal server error'}), 500
|
||||
|
||||
# Optimized media serving with proper caching
|
||||
@app.route('/media/<path:filename>')
|
||||
def media(filename):
|
||||
try:
|
||||
response = send_from_directory(app.config['UPLOAD_FOLDER'], filename)
|
||||
# Add caching headers for better performance
|
||||
response.cache_control.max_age = 86400 # Cache for 24 hours
|
||||
response.cache_control.public = True
|
||||
return response
|
||||
except Exception as e:
|
||||
app.logger.error(f"Media serving error: {str(e)}")
|
||||
return jsonify({'error': 'File not found'}), 404
|
||||
|
||||
# Optimized playlist version check
|
||||
@app.route('/api/playlist_version', methods=['GET'])
|
||||
def get_playlist_version():
|
||||
hostname = request.args.get('hostname')
|
||||
quickconnect_code = request.args.get('quickconnect_code')
|
||||
|
||||
# Validate the parameters
|
||||
if not hostname or not quickconnect_code:
|
||||
return jsonify({'error': 'Hostname and quick connect code are required'}), 400
|
||||
|
||||
# Find the player by hostname and verify the quickconnect code
|
||||
player = Player.query.filter_by(hostname=hostname).first()
|
||||
if not player or not bcrypt.check_password_hash(player.quickconnect_password, quickconnect_code):
|
||||
return jsonify({'error': 'Invalid hostname or quick connect code'}), 404
|
||||
try:
|
||||
# Use cached function
|
||||
player = get_player_by_hostname(hostname)
|
||||
if not player or not bcrypt.check_password_hash(player.quickconnect_password, quickconnect_code):
|
||||
return jsonify({'error': 'Invalid credentials'}), 401
|
||||
|
||||
# Return the playlist version and hashed quickconnect code
|
||||
return jsonify({
|
||||
'playlist_version': player.playlist_version,
|
||||
'hashed_quickconnect': player.quickconnect_password
|
||||
})
|
||||
|
||||
@app.route('/api/system_info', methods=['GET'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def api_system_info():
|
||||
"""API endpoint to get real-time system information"""
|
||||
system_info = get_system_info()
|
||||
if system_info:
|
||||
return jsonify(system_info)
|
||||
else:
|
||||
return jsonify({'error': 'Could not retrieve system information'}), 500
|
||||
return jsonify({
|
||||
'playlist_version': player.playlist_version,
|
||||
'hashed_quickconnect': player.quickconnect_password
|
||||
})
|
||||
except Exception as e:
|
||||
app.logger.error(f"Version check error: {str(e)}")
|
||||
return jsonify({'error': 'Internal server error'}), 500
|
||||
|
||||
@app.route('/player/<int:player_id>/update_order', methods=['POST'])
|
||||
@login_required
|
||||
@@ -853,6 +703,218 @@ if not app.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
|
||||
db.create_all()
|
||||
create_default_user(db, User, bcrypt)
|
||||
|
||||
# Performance monitoring functions
|
||||
def get_system_stats():
|
||||
"""Get current system performance statistics"""
|
||||
try:
|
||||
cpu_percent = psutil.cpu_percent(interval=1)
|
||||
memory = psutil.virtual_memory()
|
||||
disk = psutil.disk_usage('/')
|
||||
|
||||
return {
|
||||
'cpu_percent': cpu_percent,
|
||||
'memory_percent': memory.percent,
|
||||
'memory_used_mb': memory.used / (1024 * 1024),
|
||||
'memory_total_mb': memory.total / (1024 * 1024),
|
||||
'disk_percent': disk.percent,
|
||||
'disk_used_gb': disk.used / (1024 * 1024 * 1024),
|
||||
'disk_total_gb': disk.total / (1024 * 1024 * 1024),
|
||||
'timestamp': time.time()
|
||||
}
|
||||
except Exception as e:
|
||||
print(f"Error getting system stats: {e}")
|
||||
return None
|
||||
|
||||
# Performance monitoring endpoint
|
||||
@app.route('/api/performance', methods=['GET'])
|
||||
@login_required
|
||||
def get_performance_stats():
|
||||
"""API endpoint to get real-time performance statistics"""
|
||||
stats = get_system_stats()
|
||||
if stats:
|
||||
return jsonify(stats)
|
||||
else:
|
||||
return jsonify({'error': 'Unable to get system stats'}), 500
|
||||
|
||||
# Enhanced upload endpoint with monitoring
|
||||
@app.route('/upload_content_monitored', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def upload_content_monitored():
|
||||
"""Enhanced upload endpoint with performance monitoring"""
|
||||
start_time = time.time()
|
||||
start_stats = get_system_stats()
|
||||
|
||||
target_type = request.form.get('target_type')
|
||||
target_id = request.form.get('target_id')
|
||||
files = request.files.getlist('files')
|
||||
duration = int(request.form['duration'])
|
||||
return_url = request.form.get('return_url')
|
||||
media_type = request.form['media_type']
|
||||
|
||||
print(f"=== UPLOAD MONITORING START ===")
|
||||
print(f"Target Type: {target_type}, Target ID: {target_id}, Media Type: {media_type}")
|
||||
print(f"Number of files: {len(files)}")
|
||||
print(f"Start CPU: {start_stats['cpu_percent']}%, Memory: {start_stats['memory_percent']}%")
|
||||
|
||||
if not target_type or not target_id:
|
||||
flash('Please select a target type and target ID.', 'danger')
|
||||
return redirect(url_for('upload_content'))
|
||||
|
||||
# Monitor during file processing
|
||||
def monitor_upload():
|
||||
"""Background monitoring thread"""
|
||||
while True:
|
||||
stats = get_system_stats()
|
||||
if stats:
|
||||
print(f"[MONITOR] CPU: {stats['cpu_percent']}%, Memory: {stats['memory_percent']}%, Time: {time.time() - start_time:.1f}s")
|
||||
time.sleep(2)
|
||||
|
||||
# Start monitoring thread
|
||||
monitor_thread = threading.Thread(target=monitor_upload, daemon=True)
|
||||
monitor_thread.start()
|
||||
|
||||
# Process uploaded files and get results
|
||||
results = process_uploaded_files(app, files, media_type, duration, target_type, target_id)
|
||||
|
||||
end_time = time.time()
|
||||
end_stats = get_system_stats()
|
||||
total_time = end_time - start_time
|
||||
|
||||
print(f"=== UPLOAD MONITORING END ===")
|
||||
print(f"Total processing time: {total_time:.2f} seconds")
|
||||
print(f"End CPU: {end_stats['cpu_percent']}%, Memory: {end_stats['memory_percent']}%")
|
||||
print(f"CPU change: {end_stats['cpu_percent'] - start_stats['cpu_percent']:.1f}%")
|
||||
print(f"Memory change: {end_stats['memory_percent'] - start_stats['memory_percent']:.1f}%")
|
||||
|
||||
# Log performance metrics
|
||||
log_action(f"Upload completed: {len(files)} files, {total_time:.2f}s, CPU: {start_stats['cpu_percent']}% → {end_stats['cpu_percent']}%")
|
||||
|
||||
return redirect(return_url)
|
||||
@app.route('/player/<int:player_id>/bulk_delete_content', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def bulk_delete_player_content(player_id):
|
||||
"""Bulk delete content for a specific player"""
|
||||
if not request.is_json:
|
||||
return jsonify({'success': False, 'error': 'Invalid request format'}), 400
|
||||
|
||||
player = Player.query.get_or_404(player_id)
|
||||
content_ids = request.json.get('content_ids', [])
|
||||
|
||||
if not content_ids:
|
||||
return jsonify({'success': False, 'error': 'No content IDs provided'}), 400
|
||||
|
||||
try:
|
||||
# Get all content items to delete
|
||||
content_items = Content.query.filter(
|
||||
Content.id.in_(content_ids),
|
||||
Content.player_id == player_id
|
||||
).all()
|
||||
|
||||
if not content_items:
|
||||
return jsonify({'success': False, 'error': 'No valid content found to delete'}), 404
|
||||
|
||||
# Delete the content items
|
||||
deleted_count = 0
|
||||
for content in content_items:
|
||||
# Delete the actual file from filesystem
|
||||
file_path = os.path.join(app.config['UPLOAD_FOLDER'], content.file_name)
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except OSError as e:
|
||||
app.logger.warning(f"Could not delete file {file_path}: {e}")
|
||||
|
||||
# Delete from database
|
||||
db.session.delete(content)
|
||||
deleted_count += 1
|
||||
|
||||
# Update playlist version for the player
|
||||
player.playlist_version += 1
|
||||
db.session.commit()
|
||||
|
||||
# Clear cache
|
||||
clear_player_cache()
|
||||
|
||||
# Log the action
|
||||
log_action(f"Bulk deleted {deleted_count} content items from player {player.username}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'deleted_count': deleted_count,
|
||||
'new_playlist_version': player.playlist_version
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
app.logger.error(f"Error in bulk delete: {str(e)}")
|
||||
return jsonify({'success': False, 'error': 'Database error occurred'}), 500
|
||||
|
||||
@app.route('/group/<int:group_id>/bulk_delete_content', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def bulk_delete_group_content(group_id):
|
||||
"""Bulk delete content for a specific group"""
|
||||
if not request.is_json:
|
||||
return jsonify({'success': False, 'error': 'Invalid request format'}), 400
|
||||
|
||||
group = Group.query.get_or_404(group_id)
|
||||
content_ids = request.json.get('content_ids', [])
|
||||
|
||||
if not content_ids:
|
||||
return jsonify({'success': False, 'error': 'No content IDs provided'}), 400
|
||||
|
||||
try:
|
||||
# Get player IDs in the group
|
||||
player_ids = [p.id for p in group.players]
|
||||
|
||||
# Get all content items to delete that belong to players in this group
|
||||
content_items = Content.query.filter(
|
||||
Content.id.in_(content_ids),
|
||||
Content.player_id.in_(player_ids)
|
||||
).all()
|
||||
|
||||
if not content_items:
|
||||
return jsonify({'success': False, 'error': 'No valid content found to delete'}), 404
|
||||
|
||||
# Delete the content items
|
||||
deleted_count = 0
|
||||
for content in content_items:
|
||||
# Delete the actual file from filesystem
|
||||
file_path = os.path.join(app.config['UPLOAD_FOLDER'], content.file_name)
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except OSError as e:
|
||||
app.logger.warning(f"Could not delete file {file_path}: {e}")
|
||||
|
||||
# Delete from database
|
||||
db.session.delete(content)
|
||||
deleted_count += 1
|
||||
|
||||
# Update playlist version for all players in the group
|
||||
for player in group.players:
|
||||
player.playlist_version += 1
|
||||
|
||||
db.session.commit()
|
||||
|
||||
# Clear cache
|
||||
clear_player_cache()
|
||||
|
||||
# Log the action
|
||||
log_action(f"Bulk deleted {deleted_count} content items from group {group.name}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'deleted_count': deleted_count
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
app.logger.error(f"Error in group bulk delete: {str(e)}")
|
||||
return jsonify({'success': False, 'error': 'Database error occurred'}), 500
|
||||
|
||||
# Add this at the end of app.py
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True, host='0.0.0.0', port=5000)
|
||||
@@ -1,86 +0,0 @@
|
||||
"""
|
||||
PPTX to PDF converter using LibreOffice for high-quality conversion
|
||||
This module provides the essential function to convert PowerPoint presentations to PDF
|
||||
using LibreOffice headless mode for professional-grade quality.
|
||||
|
||||
The converted PDF is then processed by the main upload workflow for 4K image generation.
|
||||
"""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
|
||||
# Set up logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def pptx_to_pdf_libreoffice(pptx_path, output_dir):
|
||||
"""
|
||||
Convert PPTX to PDF using LibreOffice for highest quality.
|
||||
|
||||
This function is the core component of the PPTX processing workflow:
|
||||
PPTX → PDF (this function) → 4K JPG images (handled in uploads.py)
|
||||
|
||||
Args:
|
||||
pptx_path (str): Path to the PPTX file
|
||||
output_dir (str): Directory to save the PDF
|
||||
|
||||
Returns:
|
||||
str: Path to the generated PDF file, or None if conversion failed
|
||||
"""
|
||||
try:
|
||||
# Ensure output directory exists
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
# Use LibreOffice to convert PPTX to PDF
|
||||
cmd = [
|
||||
'libreoffice',
|
||||
'--headless',
|
||||
'--convert-to', 'pdf',
|
||||
'--outdir', output_dir,
|
||||
pptx_path
|
||||
]
|
||||
|
||||
logger.info(f"Converting PPTX to PDF using LibreOffice: {pptx_path}")
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, timeout=120)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"LibreOffice conversion failed: {result.stderr}")
|
||||
return None
|
||||
|
||||
# Find the generated PDF file
|
||||
base_name = os.path.splitext(os.path.basename(pptx_path))[0]
|
||||
pdf_path = os.path.join(output_dir, f"{base_name}.pdf")
|
||||
|
||||
if os.path.exists(pdf_path):
|
||||
logger.info(f"PDF conversion successful: {pdf_path}")
|
||||
return pdf_path
|
||||
else:
|
||||
logger.error(f"PDF file not found after conversion: {pdf_path}")
|
||||
return None
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.error("LibreOffice conversion timed out (120s)")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error in PPTX to PDF conversion: {e}")
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test the converter
|
||||
import sys
|
||||
if len(sys.argv) > 1:
|
||||
test_pptx = sys.argv[1]
|
||||
if os.path.exists(test_pptx):
|
||||
output_dir = "test_output"
|
||||
pdf_result = pptx_to_pdf_libreoffice(test_pptx, output_dir)
|
||||
if pdf_result:
|
||||
print(f"Successfully converted PPTX to PDF: {pdf_result}")
|
||||
else:
|
||||
print("PPTX to PDF conversion failed")
|
||||
else:
|
||||
print(f"File not found: {test_pptx}")
|
||||
else:
|
||||
print("Usage: python pptx_converter.py <pptx_file>")
|
||||
@@ -1,64 +0,0 @@
|
||||
#!/bin/bash
|
||||
# DigiServer Docker Cleanup Script
|
||||
# Version: 1.1.0
|
||||
|
||||
set -e
|
||||
|
||||
echo "🧹 DigiServer Docker Cleanup"
|
||||
echo "============================"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Function to print colored output
|
||||
print_status() {
|
||||
echo -e "${BLUE}[INFO]${NC} $1"
|
||||
}
|
||||
|
||||
print_success() {
|
||||
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}[WARNING]${NC} $1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}[ERROR]${NC} $1"
|
||||
}
|
||||
|
||||
# Confirm cleanup
|
||||
print_warning "This will stop and remove all DigiServer containers and images."
|
||||
print_warning "Your data in the ./data directory will be preserved."
|
||||
echo ""
|
||||
read -p "Are you sure you want to continue? (y/N): " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
print_status "Cleanup cancelled."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Stop and remove containers
|
||||
print_status "Stopping DigiServer containers..."
|
||||
docker compose down
|
||||
|
||||
# Remove DigiServer images
|
||||
print_status "Removing DigiServer images..."
|
||||
docker rmi digiserver:latest 2>/dev/null || print_warning "DigiServer image not found"
|
||||
|
||||
# Clean up unused Docker resources
|
||||
print_status "Cleaning up unused Docker resources..."
|
||||
docker system prune -f
|
||||
|
||||
# Clean up development cache files
|
||||
print_status "Cleaning up development cache files..."
|
||||
find ./app -name "__pycache__" -type d -exec rm -rf {} + 2>/dev/null || true
|
||||
find ./app -name "*.pyc" -delete 2>/dev/null || true
|
||||
|
||||
print_success "Cleanup completed!"
|
||||
print_status "Data directory preserved at: ./data"
|
||||
print_status "To redeploy, run: ./deploy-docker.sh"
|
||||
109
deploy-docker.sh
109
deploy-docker.sh
@@ -1,109 +0,0 @@
|
||||
#!/bin/bash
|
||||
# DigiServer Docker Deployment Script
|
||||
# Version: 1.1.0
|
||||
|
||||
set -e
|
||||
|
||||
echo "🚀 DigiServer Docker Deployment"
|
||||
echo "================================"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Function to print colored output
|
||||
print_status() {
|
||||
echo -e "${BLUE}[INFO]${NC} $1"
|
||||
}
|
||||
|
||||
print_success() {
|
||||
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}[WARNING]${NC} $1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}[ERROR]${NC} $1"
|
||||
}
|
||||
|
||||
# Check if Docker is running
|
||||
if ! docker info >/dev/null 2>&1; then
|
||||
print_error "Docker is not running. Please start Docker and try again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
print_status "Docker is running ✓"
|
||||
|
||||
# Check if docker compose is available
|
||||
if ! docker compose version >/dev/null 2>&1; then
|
||||
print_error "docker compose is not available. Please install Docker Compose and try again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
print_status "docker compose is available ✓"
|
||||
|
||||
# Stop existing containers if running
|
||||
print_status "Stopping existing containers..."
|
||||
docker compose down 2>/dev/null || true
|
||||
|
||||
# Remove old images (optional)
|
||||
read -p "Do you want to remove old DigiServer images? (y/N): " -n 1 -r
|
||||
echo
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
print_status "Removing old images..."
|
||||
docker image prune -f --filter label=app=digiserver 2>/dev/null || true
|
||||
docker rmi digiserver:latest 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Create data directories if they don't exist
|
||||
print_status "Creating data directories..."
|
||||
mkdir -p data/instance data/uploads data/resurse
|
||||
|
||||
# Build the Docker image
|
||||
print_status "Building DigiServer Docker image..."
|
||||
docker compose build
|
||||
|
||||
# Check if build was successful
|
||||
if [ $? -eq 0 ]; then
|
||||
print_success "Docker image built successfully!"
|
||||
else
|
||||
print_error "Docker build failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Start the containers
|
||||
print_status "Starting DigiServer containers..."
|
||||
docker compose up -d
|
||||
|
||||
# Wait a moment for containers to start
|
||||
sleep 10
|
||||
|
||||
# Check if containers are running
|
||||
if docker compose ps | grep -q "Up"; then
|
||||
print_success "DigiServer is now running!"
|
||||
echo ""
|
||||
echo "🌐 Access your DigiServer at: http://localhost:8880"
|
||||
echo "📊 Admin Panel: http://localhost:8880/admin"
|
||||
echo ""
|
||||
echo "Default credentials:"
|
||||
echo "Username: admin"
|
||||
echo "Password: Initial01!"
|
||||
echo ""
|
||||
print_warning "Please change the default password after first login!"
|
||||
echo ""
|
||||
echo "📝 To view logs: docker compose logs -f"
|
||||
echo "🛑 To stop: docker compose down"
|
||||
echo "📊 To check status: docker compose ps"
|
||||
else
|
||||
print_error "Failed to start DigiServer containers!"
|
||||
echo ""
|
||||
echo "Check logs with: docker compose logs"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
print_success "Deployment completed successfully! 🎉"
|
||||
@@ -1,32 +0,0 @@
|
||||
# Development Docker Compose Configuration
|
||||
# Use this for development with hot reloading
|
||||
|
||||
services:
|
||||
digiserver-dev:
|
||||
build: .
|
||||
image: digiserver:dev
|
||||
container_name: digiserver-dev
|
||||
ports:
|
||||
- "5000:5000"
|
||||
environment:
|
||||
- FLASK_APP=app.py
|
||||
- FLASK_RUN_HOST=0.0.0.0
|
||||
- FLASK_ENV=development
|
||||
- FLASK_DEBUG=1
|
||||
- ADMIN_USER=admin
|
||||
- ADMIN_PASSWORD=Initial01!
|
||||
- SECRET_KEY=Ma_Duc_Dupa_Merele_Lui_Ana
|
||||
volumes:
|
||||
# Mount app code for hot reloading
|
||||
- ./app:/app
|
||||
# Persistent data volumes
|
||||
- ./data/instance:/app/instance
|
||||
- ./data/uploads:/app/static/uploads
|
||||
- ./data/resurse:/app/static/resurse
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- digiserver-dev-network
|
||||
|
||||
networks:
|
||||
digiserver-dev-network:
|
||||
driver: bridge
|
||||
@@ -1,35 +1,33 @@
|
||||
# DigiServer - Digital Signage Management Platform
|
||||
# Version: 1.1.0
|
||||
# Build Date: 2025-06-29
|
||||
|
||||
version: "3.8"
|
||||
services:
|
||||
digiserver:
|
||||
web:
|
||||
build: .
|
||||
image: digiserver:latest
|
||||
container_name: digiserver
|
||||
image: digi-server:latest
|
||||
ports:
|
||||
- "8880:5000"
|
||||
- "80:5000"
|
||||
environment:
|
||||
- FLASK_APP=app.py
|
||||
- FLASK_RUN_HOST=0.0.0.0
|
||||
- DEFAULT_USER=admin
|
||||
- DEFAULT_PASSWORD=Initial01!
|
||||
- SECRET_KEY=Ma_Duc_Dupa_Merele_Lui_Ana
|
||||
- FLASK_APP
|
||||
- FLASK_RUN_HOST
|
||||
- ADMIN_USER
|
||||
- ADMIN_PASSWORD
|
||||
- SECRET_KEY
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
# Persistent data volumes
|
||||
- ./data/instance:/app/instance
|
||||
- ./data/uploads:/app/static/uploads
|
||||
- ./data/resurse:/app/static/resurse
|
||||
- /home/pi/Desktop/digi-server/instance:/app/instance
|
||||
- /home/pi/Desktop/digi-server/uploads:/app/static/uploads
|
||||
restart: unless-stopped
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 512M
|
||||
cpus: '1.0'
|
||||
reservations:
|
||||
memory: 128M
|
||||
cpus: '0.25'
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:5000/"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
networks:
|
||||
- digiserver-network
|
||||
|
||||
networks:
|
||||
digiserver-network:
|
||||
driver: bridge
|
||||
|
||||
25
enviroment.txt
Normal file
25
enviroment.txt
Normal file
@@ -0,0 +1,25 @@
|
||||
python3 -m venv digiscreen
|
||||
|
||||
source digiscreen/bin/activate
|
||||
|
||||
pip install flask sqlalchemy flask-sqlalchemy
|
||||
|
||||
pip install flask-login flask-bcrypt
|
||||
|
||||
python3 setup.py sdist
|
||||
|
||||
python3 setup.py bdist_wheel flask
|
||||
|
||||
|
||||
for installing all the requirements
|
||||
pip install -r requirements.txt
|
||||
|
||||
|
||||
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y \
|
||||
ffmpeg \
|
||||
libpoppler-cpp-dev \
|
||||
poppler-utils \
|
||||
libreoffice \
|
||||
libmagic1
|
||||
BIN
instance/dashboard.db
Normal file
BIN
instance/dashboard.db
Normal file
Binary file not shown.
BIN
models/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
models/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/content.cpython-311.pyc
Normal file
BIN
models/__pycache__/content.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/create_default_user.cpython-311.pyc
Normal file
BIN
models/__pycache__/create_default_user.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/group.cpython-311.pyc
Normal file
BIN
models/__pycache__/group.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/player.cpython-311.pyc
Normal file
BIN
models/__pycache__/player.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/server_log.cpython-311.pyc
Normal file
BIN
models/__pycache__/server_log.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/user.cpython-311.pyc
Normal file
BIN
models/__pycache__/user.cpython-311.pyc
Normal file
Binary file not shown.
@@ -21,6 +21,7 @@ greenlet==3.1.1
|
||||
# File Processing
|
||||
pdf2image==1.17.0
|
||||
PyPDF2==3.0.1
|
||||
python-pptx==0.6.21
|
||||
Pillow==10.0.1
|
||||
cairosvg==2.7.0
|
||||
ffmpeg-python==0.2.0
|
||||
@@ -38,7 +39,6 @@ gevent==23.9.1
|
||||
# Monitoring & Performance
|
||||
prometheus-flask-exporter==0.22.4
|
||||
sentry-sdk[flask]==1.40.0
|
||||
psutil==6.1.0
|
||||
|
||||
# Utilities
|
||||
typing_extensions==4.12.2
|
||||
|
Before Width: | Height: | Size: 153 KiB After Width: | Height: | Size: 153 KiB |
|
Before Width: | Height: | Size: 52 KiB After Width: | Height: | Size: 52 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 361 KiB |
BIN
static/uploads/SampleVideo_1280x720_1mb.mp4
Normal file
BIN
static/uploads/SampleVideo_1280x720_1mb.mp4
Normal file
Binary file not shown.
BIN
static/uploads/wp2782770-1846651530.jpg
Normal file
BIN
static/uploads/wp2782770-1846651530.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 794 KiB |
@@ -207,94 +207,59 @@
|
||||
<p><strong>Date of Build:</strong> {{ build_date }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- System Monitoring Card -->
|
||||
{% if system_info %}
|
||||
<div class="card mb-4 {{ 'dark-mode' if theme == 'dark' else '' }}">
|
||||
<div class="card-header">
|
||||
<h2>📊 System Monitoring</h2>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<div class="row">
|
||||
<!-- CPU Information -->
|
||||
<div class="col-md-3 col-6 text-center mb-3">
|
||||
<div class="h6">CPU Usage</div>
|
||||
<div class="progress mb-2" style="height: 25px;">
|
||||
<div class="progress-bar
|
||||
{% if system_info.cpu_percent < 50 %}bg-success
|
||||
{% elif system_info.cpu_percent < 80 %}bg-warning
|
||||
{% else %}bg-danger{% endif %}"
|
||||
role="progressbar"
|
||||
style="width: {{ system_info.cpu_percent }}%;">
|
||||
{{ system_info.cpu_percent }}%
|
||||
</div>
|
||||
</div>
|
||||
<small class="text-muted">{{ system_info.cpu_count }} cores available</small>
|
||||
</div>
|
||||
|
||||
<!-- Memory Information -->
|
||||
<div class="col-md-3 col-6 text-center mb-3">
|
||||
<div class="h6">Memory Usage</div>
|
||||
<div class="progress mb-2" style="height: 25px;">
|
||||
<div class="progress-bar
|
||||
{% if system_info.memory_percent < 60 %}bg-success
|
||||
{% elif system_info.memory_percent < 85 %}bg-warning
|
||||
{% else %}bg-danger{% endif %}"
|
||||
role="progressbar"
|
||||
style="width: {{ system_info.memory_percent }}%;">
|
||||
{{ system_info.memory_percent }}%
|
||||
</div>
|
||||
</div>
|
||||
<small class="text-muted">{{ system_info.memory_used }}GB / {{ system_info.memory_total }}GB</small>
|
||||
</div>
|
||||
|
||||
<!-- Disk Information -->
|
||||
<div class="col-md-3 col-6 text-center mb-3">
|
||||
<div class="h6">Disk Usage</div>
|
||||
<div class="progress mb-2" style="height: 25px;">
|
||||
<div class="progress-bar
|
||||
{% if system_info.disk_percent < 70 %}bg-success
|
||||
{% elif system_info.disk_percent < 90 %}bg-warning
|
||||
{% else %}bg-danger{% endif %}"
|
||||
role="progressbar"
|
||||
style="width: {{ system_info.disk_percent }}%;">
|
||||
{{ system_info.disk_percent }}%
|
||||
</div>
|
||||
</div>
|
||||
<small class="text-muted">{{ system_info.disk_used }}GB / {{ system_info.disk_total }}GB</small>
|
||||
</div>
|
||||
|
||||
<!-- Upload Folder Size -->
|
||||
<div class="col-md-3 col-6 text-center mb-3">
|
||||
<div class="h6">Media Storage</div>
|
||||
<div class="text-primary display-6">{{ system_info.upload_folder_size }}GB</div>
|
||||
<small class="text-muted">Total media files</small>
|
||||
<!-- Performance Monitoring Dashboard -->
|
||||
<div class="card mb-4 {{ 'dark-mode' if theme == 'dark' else '' }}">
|
||||
<div class="card-header bg-success text-white">
|
||||
<h2>Performance Monitor</h2>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<div class="row">
|
||||
<div class="col-md-4">
|
||||
<div class="text-center">
|
||||
<h5>CPU Usage</h5>
|
||||
<div id="cpu-gauge" class="progress mb-2">
|
||||
<div class="progress-bar bg-info" role="progressbar" style="width: 0%"></div>
|
||||
</div>
|
||||
<small id="cpu-text">0%</small>
|
||||
</div>
|
||||
|
||||
<!-- System Details -->
|
||||
<div class="row mt-3">
|
||||
<div class="col-12">
|
||||
<hr>
|
||||
<div class="row text-center">
|
||||
<div class="col-md-4 col-12 mb-2">
|
||||
<strong>Available Disk Space:</strong><br>
|
||||
<span class="text-success">{{ system_info.disk_free }}GB free</span>
|
||||
</div>
|
||||
<div class="col-md-4 col-12 mb-2">
|
||||
<strong>Total Disk Space:</strong><br>
|
||||
<span class="text-info">{{ system_info.disk_total }}GB total</span>
|
||||
</div>
|
||||
<div class="col-md-4 col-12 mb-2">
|
||||
<strong>Last Updated:</strong><br>
|
||||
<span class="text-muted" id="last-update-admin">Just now</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="text-center">
|
||||
<h5>Memory Usage</h5>
|
||||
<div id="memory-gauge" class="progress mb-2">
|
||||
<div class="progress-bar bg-warning" role="progressbar" style="width: 0%"></div>
|
||||
</div>
|
||||
<small id="memory-text">0%</small>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="text-center">
|
||||
<h5>Disk Usage</h5>
|
||||
<div id="disk-gauge" class="progress mb-2">
|
||||
<div class="progress-bar bg-danger" role="progressbar" style="width: 0%"></div>
|
||||
</div>
|
||||
<small id="disk-text">0%</small>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row mt-3">
|
||||
<div class="col-12">
|
||||
<button id="toggle-monitor" class="btn btn-primary">Start Monitoring</button>
|
||||
<button id="reset-stats" class="btn btn-secondary">Reset Stats</button>
|
||||
<span id="monitor-status" class="ms-3 text-muted">Monitoring stopped</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row mt-3">
|
||||
<div class="col-12">
|
||||
<h6>Performance Log:</h6>
|
||||
<div id="perf-log" style="height: 100px; overflow-y: scroll; background-color: #f8f9fa; padding: 10px; border-radius: 5px; font-family: monospace; font-size: 12px;">
|
||||
<div class="text-muted">Performance monitoring ready...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -316,76 +281,115 @@
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
// Auto-refresh system monitoring every 15 seconds
|
||||
{% if system_info %}
|
||||
function updateAdminSystemInfo() {
|
||||
fetch('/api/system_info')
|
||||
// Performance monitoring functionality
|
||||
let monitoringInterval = null;
|
||||
let isMonitoring = false;
|
||||
let maxCpu = 0, maxMemory = 0;
|
||||
|
||||
function updateGauge(elementId, textId, value, color) {
|
||||
const gauge = document.querySelector(`#${elementId} .progress-bar`);
|
||||
const text = document.getElementById(textId);
|
||||
gauge.style.width = `${value}%`;
|
||||
gauge.className = `progress-bar ${color}`;
|
||||
text.textContent = `${value.toFixed(1)}%`;
|
||||
}
|
||||
|
||||
function logPerformance(message) {
|
||||
const log = document.getElementById('perf-log');
|
||||
const timestamp = new Date().toLocaleTimeString();
|
||||
const logEntry = document.createElement('div');
|
||||
logEntry.innerHTML = `<span class="text-muted">[${timestamp}]</span> ${message}`;
|
||||
log.appendChild(logEntry);
|
||||
log.scrollTop = log.scrollHeight;
|
||||
|
||||
// Keep only last 50 entries
|
||||
if (log.children.length > 50) {
|
||||
log.removeChild(log.firstChild);
|
||||
}
|
||||
}
|
||||
|
||||
function updatePerformanceStats() {
|
||||
fetch('/api/performance')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.error) {
|
||||
console.warn('Could not fetch system info:', data.error);
|
||||
logPerformance(`Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Update progress bars and their colors
|
||||
const progressBars = document.querySelectorAll('.progress-bar');
|
||||
// Update gauges
|
||||
updateGauge('cpu-gauge', 'cpu-text', data.cpu_percent, 'bg-info');
|
||||
updateGauge('memory-gauge', 'memory-text', data.memory_percent, 'bg-warning');
|
||||
updateGauge('disk-gauge', 'disk-text', data.disk_percent, 'bg-danger');
|
||||
|
||||
if (progressBars.length >= 3) {
|
||||
// CPU Bar
|
||||
progressBars[0].style.width = data.cpu_percent + '%';
|
||||
progressBars[0].textContent = data.cpu_percent + '%';
|
||||
progressBars[0].className = 'progress-bar ' +
|
||||
(data.cpu_percent < 50 ? 'bg-success' :
|
||||
data.cpu_percent < 80 ? 'bg-warning' : 'bg-danger');
|
||||
|
||||
// Memory Bar
|
||||
progressBars[1].style.width = data.memory_percent + '%';
|
||||
progressBars[1].textContent = data.memory_percent + '%';
|
||||
progressBars[1].className = 'progress-bar ' +
|
||||
(data.memory_percent < 60 ? 'bg-success' :
|
||||
data.memory_percent < 85 ? 'bg-warning' : 'bg-danger');
|
||||
|
||||
// Disk Bar
|
||||
progressBars[2].style.width = data.disk_percent + '%';
|
||||
progressBars[2].textContent = data.disk_percent + '%';
|
||||
progressBars[2].className = 'progress-bar ' +
|
||||
(data.disk_percent < 70 ? 'bg-success' :
|
||||
data.disk_percent < 90 ? 'bg-warning' : 'bg-danger');
|
||||
// Track maximum values
|
||||
if (data.cpu_percent > maxCpu) {
|
||||
maxCpu = data.cpu_percent;
|
||||
logPerformance(`New CPU peak: ${maxCpu.toFixed(1)}%`);
|
||||
}
|
||||
if (data.memory_percent > maxMemory) {
|
||||
maxMemory = data.memory_percent;
|
||||
logPerformance(`New Memory peak: ${maxMemory.toFixed(1)}%`);
|
||||
}
|
||||
|
||||
// Update text values
|
||||
const smallTexts = document.querySelectorAll('.text-muted');
|
||||
smallTexts.forEach((text, index) => {
|
||||
if (index === 1) text.textContent = data.memory_used + 'GB / ' + data.memory_total + 'GB';
|
||||
if (index === 2) text.textContent = data.disk_used + 'GB / ' + data.disk_total + 'GB';
|
||||
});
|
||||
|
||||
// Update storage size
|
||||
const storageDisplay = document.querySelector('.display-6');
|
||||
if (storageDisplay) {
|
||||
storageDisplay.textContent = data.upload_folder_size + 'GB';
|
||||
// Log significant changes
|
||||
if (data.cpu_percent > 80) {
|
||||
logPerformance(`<span class="text-danger">High CPU usage: ${data.cpu_percent.toFixed(1)}%</span>`);
|
||||
}
|
||||
|
||||
// Update disk space info
|
||||
const diskFree = document.querySelector('.text-success');
|
||||
const diskTotal = document.querySelector('.text-info');
|
||||
if (diskFree) diskFree.textContent = data.disk_free + 'GB free';
|
||||
if (diskTotal) diskTotal.textContent = data.disk_total + 'GB total';
|
||||
|
||||
// Update timestamp
|
||||
const lastUpdate = document.getElementById('last-update-admin');
|
||||
if (lastUpdate) {
|
||||
lastUpdate.textContent = new Date().toLocaleTimeString();
|
||||
if (data.memory_percent > 80) {
|
||||
logPerformance(`<span class="text-danger">High Memory usage: ${data.memory_percent.toFixed(1)}%</span>`);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.warn('Admin system monitoring update failed:', error);
|
||||
logPerformance(`Fetch error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Update every 15 seconds
|
||||
setInterval(updateAdminSystemInfo, 15000);
|
||||
{% endif %}
|
||||
function toggleMonitoring() {
|
||||
const toggleButton = document.getElementById('toggle-monitor');
|
||||
const statusSpan = document.getElementById('monitor-status');
|
||||
|
||||
if (isMonitoring) {
|
||||
// Stop monitoring
|
||||
clearInterval(monitoringInterval);
|
||||
isMonitoring = false;
|
||||
toggleButton.textContent = 'Start Monitoring';
|
||||
toggleButton.className = 'btn btn-primary';
|
||||
statusSpan.textContent = 'Monitoring stopped';
|
||||
statusSpan.className = 'ms-3 text-muted';
|
||||
logPerformance('Monitoring stopped');
|
||||
} else {
|
||||
// Start monitoring
|
||||
isMonitoring = true;
|
||||
toggleButton.textContent = 'Stop Monitoring';
|
||||
toggleButton.className = 'btn btn-danger';
|
||||
statusSpan.textContent = 'Monitoring active';
|
||||
statusSpan.className = 'ms-3 text-success';
|
||||
logPerformance('Monitoring started');
|
||||
|
||||
// Update immediately and then every 2 seconds
|
||||
updatePerformanceStats();
|
||||
monitoringInterval = setInterval(updatePerformanceStats, 2000);
|
||||
}
|
||||
}
|
||||
|
||||
function resetStats() {
|
||||
maxCpu = 0;
|
||||
maxMemory = 0;
|
||||
const log = document.getElementById('perf-log');
|
||||
log.innerHTML = '<div class="text-muted">Performance log reset...</div>';
|
||||
logPerformance('Stats reset');
|
||||
}
|
||||
|
||||
// Event listeners
|
||||
document.getElementById('toggle-monitor').addEventListener('click', toggleMonitoring);
|
||||
document.getElementById('reset-stats').addEventListener('click', resetStats);
|
||||
|
||||
// Auto-start monitoring when page loads
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
// Initial stats load
|
||||
updatePerformanceStats();
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -92,20 +92,16 @@
|
||||
<div class="card-body">
|
||||
{% if content %}
|
||||
<!-- Bulk Actions Controls -->
|
||||
<div class="row mb-3">
|
||||
<div class="col-md-6">
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" id="selectAll">
|
||||
<label class="form-check-label" for="selectAll">
|
||||
Select All
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-6 text-end">
|
||||
<button type="button" class="btn btn-danger" id="bulkDeleteBtn" style="display:none;" onclick="confirmBulkDelete()">
|
||||
<i class="bi bi-trash"></i> Delete Selected
|
||||
</button>
|
||||
<div class="mb-3 d-flex flex-wrap align-items-center gap-2">
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" id="selectAllGroup">
|
||||
<label class="form-check-label" for="selectAllGroup">
|
||||
Select All
|
||||
</label>
|
||||
</div>
|
||||
<button id="deleteSelectedGroup" class="btn btn-danger" style="display: none;">
|
||||
<i class="bi bi-trash"></i> Delete Selected (<span id="selectedCountGroup">0</span>)
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<ul class="list-group sortable-list" id="groupMediaList">
|
||||
@@ -116,9 +112,7 @@
|
||||
data-position="{{ loop.index0 }}">
|
||||
<!-- Checkbox for bulk selection -->
|
||||
<div class="me-2">
|
||||
<input class="form-check-input media-checkbox"
|
||||
type="checkbox"
|
||||
name="selected_content"
|
||||
<input type="checkbox" class="form-check-input group-media-checkbox"
|
||||
value="{{ media.id }}">
|
||||
</div>
|
||||
|
||||
@@ -245,74 +239,70 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
});
|
||||
}
|
||||
|
||||
// Bulk selection functionality
|
||||
const selectAllCheckbox = document.getElementById('selectAll');
|
||||
const mediaCheckboxes = document.querySelectorAll('.media-checkbox');
|
||||
const bulkDeleteBtn = document.getElementById('bulkDeleteBtn');
|
||||
// Bulk delete functionality
|
||||
const selectAllGroup = document.getElementById('selectAllGroup');
|
||||
const deleteSelectedGroup = document.getElementById('deleteSelectedGroup');
|
||||
const selectedCountGroup = document.getElementById('selectedCountGroup');
|
||||
const groupMediaCheckboxes = document.querySelectorAll('.group-media-checkbox');
|
||||
|
||||
// Select all functionality
|
||||
if (selectAllCheckbox) {
|
||||
selectAllCheckbox.addEventListener('change', function() {
|
||||
mediaCheckboxes.forEach(checkbox => {
|
||||
checkbox.checked = this.checked;
|
||||
});
|
||||
updateBulkDeleteButton();
|
||||
});
|
||||
// Update selected count and toggle delete button visibility
|
||||
function updateSelectedCount() {
|
||||
const selectedCount = document.querySelectorAll('.group-media-checkbox:checked').length;
|
||||
selectedCountGroup.textContent = selectedCount;
|
||||
deleteSelectedGroup.style.display = selectedCount > 0 ? 'inline-block' : 'none';
|
||||
}
|
||||
|
||||
// Individual checkbox change
|
||||
mediaCheckboxes.forEach(checkbox => {
|
||||
checkbox.addEventListener('change', function() {
|
||||
updateSelectAllState();
|
||||
updateBulkDeleteButton();
|
||||
// Select/Deselect all checkboxes
|
||||
selectAllGroup.addEventListener('change', function() {
|
||||
const isChecked = selectAllGroup.checked;
|
||||
groupMediaCheckboxes.forEach(checkbox => {
|
||||
checkbox.checked = isChecked;
|
||||
});
|
||||
updateSelectedCount();
|
||||
});
|
||||
|
||||
function updateSelectAllState() {
|
||||
const checkedBoxes = Array.from(mediaCheckboxes).filter(cb => cb.checked);
|
||||
|
||||
if (selectAllCheckbox) {
|
||||
selectAllCheckbox.checked = checkedBoxes.length === mediaCheckboxes.length && mediaCheckboxes.length > 0;
|
||||
selectAllCheckbox.indeterminate = checkedBoxes.length > 0 && checkedBoxes.length < mediaCheckboxes.length;
|
||||
}
|
||||
}
|
||||
// Individual checkbox change
|
||||
groupMediaCheckboxes.forEach(checkbox => {
|
||||
checkbox.addEventListener('change', updateSelectedCount);
|
||||
});
|
||||
|
||||
function updateBulkDeleteButton() {
|
||||
const checkedBoxes = Array.from(mediaCheckboxes).filter(cb => cb.checked);
|
||||
if (bulkDeleteBtn) {
|
||||
bulkDeleteBtn.style.display = checkedBoxes.length > 0 ? 'inline-block' : 'none';
|
||||
// Delete selected button click
|
||||
deleteSelectedGroup.addEventListener('click', function() {
|
||||
const selectedIds = Array.from(groupMediaCheckboxes)
|
||||
.filter(checkbox => checkbox.checked)
|
||||
.map(checkbox => checkbox.value);
|
||||
|
||||
if (selectedIds.length === 0) {
|
||||
alert('No media selected for deletion.');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (confirm(`Are you sure you want to delete ${selectedIds.length} selected media items?`)) {
|
||||
// Send bulk delete request
|
||||
fetch('{{ url_for("bulk_delete_group_content", group_id=group.id) }}', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': '{{ csrf_token() if csrf_token else "" }}'
|
||||
},
|
||||
body: JSON.stringify({content_ids: selectedIds})
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.success) {
|
||||
alert(`Successfully deleted ${data.deleted_count} media items.`);
|
||||
location.reload(); // Reload the page to update the media list
|
||||
} else {
|
||||
alert('Error deleting media: ' + (data.error || 'Unknown error'));
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
alert('An error occurred while deleting the media.');
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
function confirmBulkDelete() {
|
||||
const checkedBoxes = Array.from(document.querySelectorAll('.media-checkbox:checked'));
|
||||
if (checkedBoxes.length === 0) {
|
||||
alert('No media files selected.');
|
||||
return;
|
||||
}
|
||||
|
||||
const count = checkedBoxes.length;
|
||||
const message = `Are you sure you want to delete ${count} selected media file${count > 1 ? 's' : ''}? This action cannot be undone.`;
|
||||
|
||||
if (confirm(message)) {
|
||||
// Create a form with selected IDs
|
||||
const form = document.createElement('form');
|
||||
form.method = 'POST';
|
||||
form.action = '{{ url_for("bulk_delete_group_content", group_id=group.id) }}';
|
||||
|
||||
checkedBoxes.forEach(checkbox => {
|
||||
const input = document.createElement('input');
|
||||
input.type = 'hidden';
|
||||
input.name = 'selected_content';
|
||||
input.value = checkbox.value;
|
||||
form.appendChild(input);
|
||||
});
|
||||
|
||||
document.body.appendChild(form);
|
||||
form.submit();
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -94,27 +94,18 @@
|
||||
<div class="card-body">
|
||||
{% if content %}
|
||||
<!-- Bulk Actions Controls -->
|
||||
<div class="row mb-3">
|
||||
<div class="col-md-6">
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" id="selectAll" {% if player.groups %}disabled{% endif %}>
|
||||
<label class="form-check-label" for="selectAll">
|
||||
Select All
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-6 text-end">
|
||||
<button type="button" class="btn btn-danger" id="bulkDeleteBtn" {% if player.groups %}disabled{% endif %} style="display:none;" onclick="confirmBulkDelete()">
|
||||
<i class="bi bi-trash"></i> Delete Selected
|
||||
</button>
|
||||
<div class="mb-3 d-flex flex-wrap align-items-center gap-2">
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" id="selectAll" {% if player.groups %}disabled{% endif %}>
|
||||
<label class="form-check-label" for="selectAll">
|
||||
Select All
|
||||
</label>
|
||||
</div>
|
||||
<button id="deleteSelected" class="btn btn-danger" {% if player.groups %}disabled{% endif %} style="display: none;">
|
||||
<i class="bi bi-trash"></i> Delete Selected (<span id="selectedCount">0</span>)
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- Bulk Delete Form -->
|
||||
<form id="bulkDeleteForm" action="{{ url_for('bulk_delete_player_content', player_id=player.id) }}" method="post" style="display:none;">
|
||||
<input type="hidden" name="selected_content_ids" id="selectedContentIds">
|
||||
</form>
|
||||
|
||||
<ul class="list-group sortable-list" id="mediaList">
|
||||
{% for media in content %}
|
||||
<li class="list-group-item {% if theme == 'dark' %}dark-mode{% endif %}"
|
||||
@@ -124,10 +115,8 @@
|
||||
<div class="d-flex flex-column flex-md-row align-items-md-center">
|
||||
<!-- Checkbox for bulk selection -->
|
||||
<div class="me-2">
|
||||
<input class="form-check-input media-checkbox"
|
||||
type="checkbox"
|
||||
name="selected_content"
|
||||
value="{{ media.id }}"
|
||||
<input type="checkbox" class="form-check-input media-checkbox"
|
||||
value="{{ media.id }}"
|
||||
{% if player.groups %}disabled{% endif %}>
|
||||
</div>
|
||||
|
||||
@@ -267,77 +256,75 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
}
|
||||
}
|
||||
|
||||
// Bulk selection functionality
|
||||
// Bulk delete functionality
|
||||
const selectAllCheckbox = document.getElementById('selectAll');
|
||||
const mediaCheckboxes = document.querySelectorAll('.media-checkbox');
|
||||
const bulkDeleteBtn = document.getElementById('bulkDeleteBtn');
|
||||
const deleteSelectedButton = document.getElementById('deleteSelected');
|
||||
const selectedCountSpan = document.getElementById('selectedCount');
|
||||
|
||||
// Select all functionality
|
||||
if (selectAllCheckbox) {
|
||||
selectAllCheckbox.addEventListener('change', function() {
|
||||
mediaCheckboxes.forEach(checkbox => {
|
||||
if (!checkbox.disabled) {
|
||||
checkbox.checked = this.checked;
|
||||
}
|
||||
});
|
||||
updateBulkDeleteButton();
|
||||
});
|
||||
// Update selected count and toggle delete button visibility
|
||||
function updateSelectedCount() {
|
||||
const selectedCount = document.querySelectorAll('.media-checkbox:checked').length;
|
||||
selectedCountSpan.textContent = selectedCount;
|
||||
deleteSelectedButton.style.display = selectedCount > 0 ? 'inline-block' : 'none';
|
||||
}
|
||||
|
||||
// Select/Deselect all checkboxes
|
||||
selectAllCheckbox.addEventListener('change', function() {
|
||||
mediaCheckboxes.forEach(checkbox => {
|
||||
checkbox.checked = selectAllCheckbox.checked;
|
||||
});
|
||||
updateSelectedCount();
|
||||
});
|
||||
|
||||
// Individual checkbox change
|
||||
mediaCheckboxes.forEach(checkbox => {
|
||||
checkbox.addEventListener('change', function() {
|
||||
updateSelectAllState();
|
||||
updateBulkDeleteButton();
|
||||
// Uncheck "Select All" if any checkbox is unchecked
|
||||
if (!this.checked) {
|
||||
selectAllCheckbox.checked = false;
|
||||
}
|
||||
updateSelectedCount();
|
||||
});
|
||||
});
|
||||
|
||||
function updateSelectAllState() {
|
||||
const enabledCheckboxes = Array.from(mediaCheckboxes).filter(cb => !cb.disabled);
|
||||
const checkedBoxes = enabledCheckboxes.filter(cb => cb.checked);
|
||||
|
||||
if (selectAllCheckbox) {
|
||||
selectAllCheckbox.checked = checkedBoxes.length === enabledCheckboxes.length && enabledCheckboxes.length > 0;
|
||||
selectAllCheckbox.indeterminate = checkedBoxes.length > 0 && checkedBoxes.length < enabledCheckboxes.length;
|
||||
}
|
||||
}
|
||||
// Delete selected media
|
||||
deleteSelectedButton.addEventListener('click', function() {
|
||||
const selectedIds = Array.from(mediaCheckboxes)
|
||||
.filter(checkbox => checkbox.checked)
|
||||
.map(checkbox => checkbox.value);
|
||||
|
||||
function updateBulkDeleteButton() {
|
||||
const checkedBoxes = Array.from(mediaCheckboxes).filter(cb => cb.checked);
|
||||
if (bulkDeleteBtn) {
|
||||
bulkDeleteBtn.style.display = checkedBoxes.length > 0 ? 'inline-block' : 'none';
|
||||
if (selectedIds.length === 0) {
|
||||
alert('No media selected for deletion.');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (confirm(`Are you sure you want to delete ${selectedIds.length} selected media items?`)) {
|
||||
// Send bulk delete request
|
||||
fetch('{{ url_for("bulk_delete_player_content", player_id=player.id) }}', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': '{{ csrf_token() if csrf_token else "" }}'
|
||||
},
|
||||
body: JSON.stringify({content_ids: selectedIds})
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.success) {
|
||||
alert('Selected media deleted successfully.');
|
||||
location.reload(); // Reload the page to update the media list
|
||||
} else {
|
||||
alert('Error deleting media: ' + (data.error || 'Unknown error'));
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
alert('An error occurred while deleting the media.');
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
function confirmBulkDelete() {
|
||||
const checkedBoxes = Array.from(document.querySelectorAll('.media-checkbox:checked'));
|
||||
if (checkedBoxes.length === 0) {
|
||||
alert('No media files selected.');
|
||||
return;
|
||||
}
|
||||
|
||||
const count = checkedBoxes.length;
|
||||
const message = `Are you sure you want to delete ${count} selected media file${count > 1 ? 's' : ''}? This action cannot be undone.`;
|
||||
|
||||
if (confirm(message)) {
|
||||
// Create a form with selected IDs
|
||||
const form = document.createElement('form');
|
||||
form.method = 'POST';
|
||||
form.action = '{{ url_for("bulk_delete_player_content", player_id=player.id) }}';
|
||||
|
||||
checkedBoxes.forEach(checkbox => {
|
||||
const input = document.createElement('input');
|
||||
input.type = 'hidden';
|
||||
input.name = 'selected_content';
|
||||
input.value = checkbox.value;
|
||||
form.appendChild(input);
|
||||
});
|
||||
|
||||
document.body.appendChild(form);
|
||||
form.submit();
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -136,82 +136,37 @@
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<p id="status-message">Uploading and processing your files. Please wait...</p>
|
||||
<div class="progress mb-3">
|
||||
<div id="progress-bar" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" style="width: 0%;" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100"></div>
|
||||
</div>
|
||||
|
||||
<!-- File Processing Progress -->
|
||||
<div class="mb-3">
|
||||
<label class="form-label fw-bold">File Processing Progress</label>
|
||||
<div class="progress" style="height: 25px;">
|
||||
<div id="progress-bar" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" style="width: 0%;" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- System Monitoring in Modal -->
|
||||
{% if system_info %}
|
||||
<div class="mt-4">
|
||||
<h6 class="mb-3">📊 Server Performance During Upload</h6>
|
||||
<!-- Real-time performance monitoring during upload -->
|
||||
<div class="performance-monitor mt-4">
|
||||
<h6>System Load During Upload:</h6>
|
||||
<div class="row">
|
||||
<!-- CPU Usage -->
|
||||
<div class="col-md-4 col-12 mb-3">
|
||||
<label class="form-label">CPU Usage</label>
|
||||
<div class="progress" style="height: 20px;">
|
||||
<div id="cpu-progress" class="progress-bar
|
||||
{% if system_info.cpu_percent < 50 %}bg-success
|
||||
{% elif system_info.cpu_percent < 80 %}bg-warning
|
||||
{% else %}bg-danger{% endif %}"
|
||||
role="progressbar"
|
||||
style="width: {{ system_info.cpu_percent }}%;">
|
||||
{{ system_info.cpu_percent }}%
|
||||
</div>
|
||||
<div class="col-4">
|
||||
<small>CPU Usage</small>
|
||||
<div class="progress mb-1" style="height: 20px;">
|
||||
<div id="modal-cpu-bar" class="progress-bar bg-info" style="width: 0%;">0%</div>
|
||||
</div>
|
||||
<small class="text-muted">{{ system_info.cpu_count }} cores available</small>
|
||||
</div>
|
||||
|
||||
<!-- Memory Usage -->
|
||||
<div class="col-md-4 col-12 mb-3">
|
||||
<label class="form-label">Memory Usage</label>
|
||||
<div class="progress" style="height: 20px;">
|
||||
<div id="memory-progress" class="progress-bar
|
||||
{% if system_info.memory_percent < 60 %}bg-success
|
||||
{% elif system_info.memory_percent < 85 %}bg-warning
|
||||
{% else %}bg-danger{% endif %}"
|
||||
role="progressbar"
|
||||
style="width: {{ system_info.memory_percent }}%;">
|
||||
{{ system_info.memory_percent }}%
|
||||
</div>
|
||||
<div class="col-4">
|
||||
<small>Memory Usage</small>
|
||||
<div class="progress mb-1" style="height: 20px;">
|
||||
<div id="modal-memory-bar" class="progress-bar bg-warning" style="width: 0%;">0%</div>
|
||||
</div>
|
||||
<small class="text-muted" id="memory-text">{{ system_info.memory_used }}GB / {{ system_info.memory_total }}GB</small>
|
||||
</div>
|
||||
|
||||
<!-- Disk Usage -->
|
||||
<div class="col-md-4 col-12 mb-3">
|
||||
<label class="form-label">Disk Space</label>
|
||||
<div class="progress" style="height: 20px;">
|
||||
<div id="disk-progress" class="progress-bar
|
||||
{% if system_info.disk_percent < 70 %}bg-success
|
||||
{% elif system_info.disk_percent < 90 %}bg-warning
|
||||
{% else %}bg-danger{% endif %}"
|
||||
role="progressbar"
|
||||
style="width: {{ system_info.disk_percent }}%;">
|
||||
{{ system_info.disk_percent }}%
|
||||
</div>
|
||||
<div class="col-4">
|
||||
<small>Disk Usage</small>
|
||||
<div class="progress mb-1" style="height: 20px;">
|
||||
<div id="modal-disk-bar" class="progress-bar bg-danger" style="width: 0%;">0%</div>
|
||||
</div>
|
||||
<small class="text-muted" id="disk-text">{{ system_info.disk_free }}GB free</small>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Storage Summary -->
|
||||
<div class="row mt-2">
|
||||
<div class="col-md-6 col-12 text-center">
|
||||
<strong>Current Media Storage:</strong>
|
||||
<span class="text-primary" id="storage-size">{{ system_info.upload_folder_size }}GB</span>
|
||||
</div>
|
||||
<div class="col-md-6 col-12 text-center">
|
||||
<strong>Last Updated:</strong>
|
||||
<span class="text-muted" id="modal-last-update">Just now</span>
|
||||
</div>
|
||||
<div class="mt-2">
|
||||
<small id="perf-stats" class="text-muted">Waiting for performance data...</small>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="modal-footer {{ 'dark-mode' if theme == 'dark' else '' }}">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal" disabled>Close</button>
|
||||
@@ -223,145 +178,6 @@
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/js/bootstrap.bundle.min.js"></script>
|
||||
<script>
|
||||
function showStatusModal() {
|
||||
console.log("Processing popup triggered");
|
||||
const statusModal = new bootstrap.Modal(document.getElementById('statusModal'));
|
||||
statusModal.show();
|
||||
|
||||
// Update status message based on media type
|
||||
const mediaType = document.getElementById('media_type').value;
|
||||
const statusMessage = document.getElementById('status-message');
|
||||
|
||||
switch(mediaType) {
|
||||
case 'image':
|
||||
statusMessage.textContent = 'Uploading images...';
|
||||
break;
|
||||
case 'video':
|
||||
statusMessage.textContent = 'Uploading and processing video. This may take a while...';
|
||||
break;
|
||||
case 'pdf':
|
||||
statusMessage.textContent = 'Converting PDF to 4K images. This may take a while...';
|
||||
break;
|
||||
case 'ppt':
|
||||
statusMessage.textContent = 'Converting PowerPoint to 4K images. This may take a while...';
|
||||
break;
|
||||
default:
|
||||
statusMessage.textContent = 'Uploading and processing your files. Please wait...';
|
||||
}
|
||||
|
||||
// Start system monitoring updates in modal
|
||||
{% if system_info %}
|
||||
startModalSystemMonitoring();
|
||||
{% endif %}
|
||||
|
||||
// Simulate progress updates
|
||||
const progressBar = document.getElementById('progress-bar');
|
||||
let progress = 0;
|
||||
const interval = setInterval(() => {
|
||||
// For slow processes, increment more slowly
|
||||
const increment = (mediaType === 'image') ? 20 : 5;
|
||||
progress += increment;
|
||||
|
||||
if (progress >= 100) {
|
||||
clearInterval(interval);
|
||||
statusMessage.textContent = 'Files uploaded and processed successfully!';
|
||||
|
||||
// Stop system monitoring updates
|
||||
{% if system_info %}
|
||||
stopModalSystemMonitoring();
|
||||
{% endif %}
|
||||
|
||||
// Enable the close button
|
||||
document.querySelector('[data-bs-dismiss="modal"]').disabled = false;
|
||||
} else {
|
||||
progressBar.style.width = `${progress}%`;
|
||||
progressBar.setAttribute('aria-valuenow', progress);
|
||||
}
|
||||
}, 500);
|
||||
}
|
||||
|
||||
{% if system_info %}
|
||||
let modalSystemInterval;
|
||||
|
||||
function updateModalSystemInfo() {
|
||||
fetch('/api/system_info')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.error) {
|
||||
console.warn('Could not fetch system info:', data.error);
|
||||
return;
|
||||
}
|
||||
|
||||
// Update CPU
|
||||
const cpuProgress = document.getElementById('cpu-progress');
|
||||
if (cpuProgress) {
|
||||
cpuProgress.style.width = data.cpu_percent + '%';
|
||||
cpuProgress.textContent = data.cpu_percent + '%';
|
||||
cpuProgress.className = 'progress-bar ' +
|
||||
(data.cpu_percent < 50 ? 'bg-success' :
|
||||
data.cpu_percent < 80 ? 'bg-warning' : 'bg-danger');
|
||||
}
|
||||
|
||||
// Update Memory
|
||||
const memoryProgress = document.getElementById('memory-progress');
|
||||
const memoryText = document.getElementById('memory-text');
|
||||
if (memoryProgress) {
|
||||
memoryProgress.style.width = data.memory_percent + '%';
|
||||
memoryProgress.textContent = data.memory_percent + '%';
|
||||
memoryProgress.className = 'progress-bar ' +
|
||||
(data.memory_percent < 60 ? 'bg-success' :
|
||||
data.memory_percent < 85 ? 'bg-warning' : 'bg-danger');
|
||||
}
|
||||
if (memoryText) {
|
||||
memoryText.textContent = data.memory_used + 'GB / ' + data.memory_total + 'GB';
|
||||
}
|
||||
|
||||
// Update Disk
|
||||
const diskProgress = document.getElementById('disk-progress');
|
||||
const diskText = document.getElementById('disk-text');
|
||||
if (diskProgress) {
|
||||
diskProgress.style.width = data.disk_percent + '%';
|
||||
diskProgress.textContent = data.disk_percent + '%';
|
||||
diskProgress.className = 'progress-bar ' +
|
||||
(data.disk_percent < 70 ? 'bg-success' :
|
||||
data.disk_percent < 90 ? 'bg-warning' : 'bg-danger');
|
||||
}
|
||||
if (diskText) {
|
||||
diskText.textContent = data.disk_free + 'GB free';
|
||||
}
|
||||
|
||||
// Update storage size
|
||||
const storageSize = document.getElementById('storage-size');
|
||||
if (storageSize) {
|
||||
storageSize.textContent = data.upload_folder_size + 'GB';
|
||||
}
|
||||
|
||||
// Update timestamp
|
||||
const lastUpdate = document.getElementById('modal-last-update');
|
||||
if (lastUpdate) {
|
||||
lastUpdate.textContent = new Date().toLocaleTimeString();
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.warn('Modal system monitoring update failed:', error);
|
||||
});
|
||||
}
|
||||
|
||||
function startModalSystemMonitoring() {
|
||||
// Update immediately
|
||||
updateModalSystemInfo();
|
||||
// Then update every 3 seconds for real-time monitoring during upload
|
||||
modalSystemInterval = setInterval(updateModalSystemInfo, 3000);
|
||||
}
|
||||
|
||||
function stopModalSystemMonitoring() {
|
||||
if (modalSystemInterval) {
|
||||
clearInterval(modalSystemInterval);
|
||||
modalSystemInterval = null;
|
||||
}
|
||||
}
|
||||
{% endif %}
|
||||
|
||||
function updateTargetIdOptions() {
|
||||
const targetType = document.getElementById('target_type').value;
|
||||
const targetIdSelect = document.getElementById('target_id');
|
||||
@@ -438,6 +254,9 @@
|
||||
statusMessage.textContent = 'Uploading and processing your files. Please wait...';
|
||||
}
|
||||
|
||||
// Start performance monitoring during upload
|
||||
startUploadMonitoring();
|
||||
|
||||
// Simulate progress updates
|
||||
const progressBar = document.getElementById('progress-bar');
|
||||
let progress = 0;
|
||||
@@ -448,6 +267,7 @@
|
||||
|
||||
if (progress >= 100) {
|
||||
clearInterval(interval);
|
||||
stopUploadMonitoring();
|
||||
statusMessage.textContent = 'Files uploaded and processed successfully!';
|
||||
|
||||
// Enable the close button
|
||||
@@ -458,6 +278,82 @@
|
||||
}
|
||||
}, 500);
|
||||
}
|
||||
|
||||
// Performance monitoring during upload
|
||||
let uploadMonitoringInterval = null;
|
||||
let startCpu = 0, startMemory = 0;
|
||||
let maxUploadCpu = 0, maxUploadMemory = 0;
|
||||
|
||||
function updateModalPerformance(data) {
|
||||
// Update CPU bar
|
||||
const cpuBar = document.getElementById('modal-cpu-bar');
|
||||
cpuBar.style.width = `${data.cpu_percent}%`;
|
||||
cpuBar.textContent = `${data.cpu_percent.toFixed(1)}%`;
|
||||
if (data.cpu_percent > 75) cpuBar.className = 'progress-bar bg-danger';
|
||||
else if (data.cpu_percent > 50) cpuBar.className = 'progress-bar bg-warning';
|
||||
else cpuBar.className = 'progress-bar bg-info';
|
||||
|
||||
// Update Memory bar
|
||||
const memoryBar = document.getElementById('modal-memory-bar');
|
||||
memoryBar.style.width = `${data.memory_percent}%`;
|
||||
memoryBar.textContent = `${data.memory_percent.toFixed(1)}%`;
|
||||
if (data.memory_percent > 75) memoryBar.className = 'progress-bar bg-danger';
|
||||
else if (data.memory_percent > 50) memoryBar.className = 'progress-bar bg-warning';
|
||||
else memoryBar.className = 'progress-bar bg-warning';
|
||||
|
||||
// Update Disk bar
|
||||
const diskBar = document.getElementById('modal-disk-bar');
|
||||
diskBar.style.width = `${data.disk_percent}%`;
|
||||
diskBar.textContent = `${data.disk_percent.toFixed(1)}%`;
|
||||
if (data.disk_percent > 85) diskBar.className = 'progress-bar bg-danger';
|
||||
else diskBar.className = 'progress-bar bg-danger';
|
||||
|
||||
// Track peaks
|
||||
if (data.cpu_percent > maxUploadCpu) maxUploadCpu = data.cpu_percent;
|
||||
if (data.memory_percent > maxUploadMemory) maxUploadMemory = data.memory_percent;
|
||||
|
||||
// Update stats text
|
||||
const perfStats = document.getElementById('perf-stats');
|
||||
const cpuChange = startCpu ? (data.cpu_percent - startCpu).toFixed(1) : '0.0';
|
||||
const memChange = startMemory ? (data.memory_percent - startMemory).toFixed(1) : '0.0';
|
||||
perfStats.innerHTML = `CPU: ${cpuChange > 0 ? '+' : ''}${cpuChange}% | Memory: ${memChange > 0 ? '+' : ''}${memChange}% | Peak CPU: ${maxUploadCpu.toFixed(1)}%`;
|
||||
}
|
||||
|
||||
function startUploadMonitoring() {
|
||||
// Get baseline performance
|
||||
fetch('/api/performance')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (!data.error) {
|
||||
startCpu = data.cpu_percent;
|
||||
startMemory = data.memory_percent;
|
||||
maxUploadCpu = data.cpu_percent;
|
||||
maxUploadMemory = data.memory_percent;
|
||||
updateModalPerformance(data);
|
||||
}
|
||||
});
|
||||
|
||||
// Start monitoring every 1 second during upload
|
||||
uploadMonitoringInterval = setInterval(() => {
|
||||
fetch('/api/performance')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (!data.error) {
|
||||
updateModalPerformance(data);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.log('Performance monitoring error:', error);
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
function stopUploadMonitoring() {
|
||||
if (uploadMonitoringInterval) {
|
||||
clearInterval(uploadMonitoringInterval);
|
||||
uploadMonitoringInterval = null;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
BIN
utils/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
utils/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
utils/__pycache__/group_player_management.cpython-311.pyc
Normal file
BIN
utils/__pycache__/group_player_management.cpython-311.pyc
Normal file
Binary file not shown.
BIN
utils/__pycache__/logger.cpython-311.pyc
Normal file
BIN
utils/__pycache__/logger.cpython-311.pyc
Normal file
Binary file not shown.
BIN
utils/__pycache__/uploads.cpython-311.pyc
Normal file
BIN
utils/__pycache__/uploads.cpython-311.pyc
Normal file
Binary file not shown.
@@ -333,7 +333,7 @@ def edit_group_media(group_id, content_id, new_duration):
|
||||
|
||||
def delete_group_media(group_id, content_id):
|
||||
"""
|
||||
Delete a media item from all players in a group and remove the physical file.
|
||||
Delete a media item from all players in a group.
|
||||
|
||||
Args:
|
||||
group_id (int): ID of the group
|
||||
@@ -344,8 +344,6 @@ def delete_group_media(group_id, content_id):
|
||||
"""
|
||||
from models import Group, Content
|
||||
from extensions import db
|
||||
from flask import current_app
|
||||
import os
|
||||
|
||||
group = Group.query.get_or_404(group_id)
|
||||
content = Content.query.get(content_id)
|
||||
@@ -360,19 +358,6 @@ def delete_group_media(group_id, content_id):
|
||||
db.session.delete(content)
|
||||
count += 1
|
||||
|
||||
# Delete the physical file using absolute path
|
||||
upload_folder = current_app.config['UPLOAD_FOLDER']
|
||||
if not os.path.isabs(upload_folder):
|
||||
upload_folder = os.path.abspath(upload_folder)
|
||||
file_path = os.path.join(upload_folder, file_name)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
print(f"Deleted physical file: {file_path}")
|
||||
except OSError as e:
|
||||
print(f"Error deleting file {file_path}: {e}")
|
||||
|
||||
db.session.commit()
|
||||
|
||||
# Log the content deletion
|
||||
@@ -381,5 +366,4 @@ def delete_group_media(group_id, content_id):
|
||||
return True
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
print(f"Error in delete_group_media: {e}")
|
||||
return False
|
||||
@@ -1,36 +1,34 @@
|
||||
import os
|
||||
import subprocess
|
||||
import signal
|
||||
import psutil
|
||||
import time
|
||||
from flask import Flask
|
||||
from werkzeug.utils import secure_filename
|
||||
from pdf2image import convert_from_path
|
||||
from pptx import Presentation
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
import io
|
||||
from extensions import db
|
||||
from models import Content, Player, Group
|
||||
from utils.logger import log_content_added, log_upload, log_process
|
||||
|
||||
# Add timeout handling class
|
||||
class TimeoutError(Exception):
|
||||
pass
|
||||
|
||||
def timeout_handler(signum, frame):
|
||||
raise TimeoutError("Operation timed out")
|
||||
|
||||
# Function to add image to playlist
|
||||
def add_image_to_playlist(app, file, filename, duration, target_type, target_id):
|
||||
"""
|
||||
Save the image file and add it to the playlist database.
|
||||
"""
|
||||
# Ensure we use absolute path for upload folder
|
||||
upload_folder = app.config['UPLOAD_FOLDER']
|
||||
if not os.path.isabs(upload_folder):
|
||||
upload_folder = os.path.abspath(upload_folder)
|
||||
|
||||
# Ensure upload folder exists
|
||||
if not os.path.exists(upload_folder):
|
||||
os.makedirs(upload_folder, exist_ok=True)
|
||||
print(f"Created upload folder: {upload_folder}")
|
||||
|
||||
file_path = os.path.join(upload_folder, filename)
|
||||
print(f"Saving image to: {file_path}")
|
||||
|
||||
file_path = os.path.join(app.config['UPLOAD_FOLDER'], filename)
|
||||
# Only save if file does not already exist
|
||||
if not os.path.exists(file_path):
|
||||
file.save(file_path)
|
||||
print(f"Image saved successfully: {file_path}")
|
||||
else:
|
||||
print(f"File already exists: {file_path}")
|
||||
|
||||
print(f"Adding image to playlist: {filename}, Target Type: {target_type}, Target ID: {target_id}")
|
||||
|
||||
@@ -55,19 +53,12 @@ def convert_video(input_file, output_folder):
|
||||
"""
|
||||
Converts a video file to MP4 format with H.264 codec.
|
||||
"""
|
||||
# Ensure we use absolute path for output folder
|
||||
if not os.path.isabs(output_folder):
|
||||
output_folder = os.path.abspath(output_folder)
|
||||
print(f"Converted output folder to absolute path: {output_folder}")
|
||||
|
||||
if not os.path.exists(output_folder):
|
||||
os.makedirs(output_folder, exist_ok=True)
|
||||
print(f"Created output folder: {output_folder}")
|
||||
os.makedirs(output_folder)
|
||||
|
||||
# Generate the output file path
|
||||
base_name = os.path.splitext(os.path.basename(input_file))[0]
|
||||
output_file = os.path.join(output_folder, f"{base_name}.mp4")
|
||||
print(f"Converting video: {input_file} -> {output_file}")
|
||||
|
||||
# FFmpeg command to convert the video
|
||||
command = [
|
||||
@@ -97,14 +88,7 @@ def convert_video_and_update_playlist(app, file_path, original_filename, target_
|
||||
Converts a video and updates the playlist database.
|
||||
"""
|
||||
print(f"Starting video conversion for: {file_path}")
|
||||
|
||||
# Ensure we use absolute path for upload folder
|
||||
upload_folder = app.config['UPLOAD_FOLDER']
|
||||
if not os.path.isabs(upload_folder):
|
||||
upload_folder = os.path.abspath(upload_folder)
|
||||
print(f"Converted upload folder to absolute path: {upload_folder}")
|
||||
|
||||
converted_file = convert_video(file_path, upload_folder)
|
||||
converted_file = convert_video(file_path, app.config['UPLOAD_FOLDER'])
|
||||
if converted_file:
|
||||
converted_filename = os.path.basename(converted_file)
|
||||
print(f"Video converted successfully: {converted_filename}")
|
||||
@@ -134,79 +118,39 @@ def convert_video_and_update_playlist(app, file_path, original_filename, target_
|
||||
print(f"Video conversion failed for: {file_path}")
|
||||
|
||||
# PDF conversion functions
|
||||
def convert_pdf_to_images(pdf_file, output_folder, delete_pdf=True, dpi=300):
|
||||
def convert_pdf_to_images(pdf_file, output_folder, delete_pdf=True, dpi=600):
|
||||
"""
|
||||
Convert a PDF file to high-quality JPG images in sequential order.
|
||||
Uses standard 300 DPI for reliable conversion.
|
||||
Convert a PDF file to images in sequential order at high resolution (4K).
|
||||
"""
|
||||
print(f"Converting PDF to JPG images: {pdf_file} at {dpi} DPI")
|
||||
print(f"Original output folder: {output_folder}")
|
||||
|
||||
# Force absolute path resolution to ensure we use the app directory
|
||||
if not os.path.isabs(output_folder):
|
||||
# If relative path, resolve from the current working directory
|
||||
output_folder = os.path.abspath(output_folder)
|
||||
print(f"Converted relative path to absolute: {output_folder}")
|
||||
else:
|
||||
print(f"Using provided absolute path: {output_folder}")
|
||||
|
||||
# Ensure we're using the app static folder, not workspace root
|
||||
if output_folder.endswith('static/uploads'):
|
||||
# Check if we're accidentally using workspace root instead of app folder
|
||||
expected_app_path = '/opt/digiserver/app/static/uploads'
|
||||
if output_folder != expected_app_path:
|
||||
print(f"WARNING: Correcting path from {output_folder} to {expected_app_path}")
|
||||
output_folder = expected_app_path
|
||||
|
||||
print(f"Final output folder: {output_folder}")
|
||||
|
||||
print(f"Converting PDF to images: {pdf_file} at {dpi} DPI")
|
||||
try:
|
||||
# Ensure output folder exists
|
||||
if not os.path.exists(output_folder):
|
||||
os.makedirs(output_folder, exist_ok=True)
|
||||
print(f"Created output folder: {output_folder}")
|
||||
|
||||
# Convert PDF to images using pdf2image
|
||||
print("Starting PDF conversion...")
|
||||
# Convert PDF to images
|
||||
images = convert_from_path(pdf_file, dpi=dpi)
|
||||
print(f"PDF converted to {len(images)} page(s)")
|
||||
|
||||
if not images:
|
||||
print("ERROR: No images generated from PDF")
|
||||
return []
|
||||
|
||||
base_name = os.path.splitext(os.path.basename(pdf_file))[0]
|
||||
image_filenames = []
|
||||
|
||||
# Save each page as JPG image
|
||||
# Save each page as an image with zero-padded page numbers for proper sorting
|
||||
for i, image in enumerate(images):
|
||||
# Convert to RGB if necessary
|
||||
if image.mode != 'RGB':
|
||||
image = image.convert('RGB')
|
||||
|
||||
# Simple naming with page numbers
|
||||
# Use consistent naming with zero-padded page numbers (e.g., page_001.jpg)
|
||||
page_num = str(i + 1).zfill(3) # e.g., 001, 002, etc.
|
||||
image_filename = f"{base_name}_page_{page_num}.jpg"
|
||||
image_path = os.path.join(output_folder, image_filename)
|
||||
|
||||
# Save as JPG
|
||||
image.save(image_path, 'JPEG', quality=85, optimize=True)
|
||||
image.save(image_path, 'JPEG')
|
||||
image_filenames.append(image_filename)
|
||||
print(f"Saved page {i + 1} to: {image_path}")
|
||||
print(f"Saved page {i + 1} as image: {image_path}")
|
||||
|
||||
print(f"PDF conversion complete. {len(image_filenames)} JPG images saved to {output_folder}")
|
||||
|
||||
# Delete the PDF file if requested and conversion was successful
|
||||
if delete_pdf and os.path.exists(pdf_file) and image_filenames:
|
||||
# Verify all pages were saved
|
||||
print(f"PDF conversion complete. {len(image_filenames)} pages saved.")
|
||||
print(f"Images in order: {image_filenames}")
|
||||
|
||||
# Delete the PDF file if requested
|
||||
if delete_pdf and os.path.exists(pdf_file):
|
||||
os.remove(pdf_file)
|
||||
print(f"PDF file deleted: {pdf_file}")
|
||||
|
||||
|
||||
return image_filenames
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error converting PDF to JPG images: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
print(f"Error converting PDF to images: {e}")
|
||||
return []
|
||||
|
||||
def update_playlist_with_files(image_filenames, duration, target_type, target_id):
|
||||
@@ -263,35 +207,126 @@ def process_pdf(input_file, output_folder, duration, target_type, target_id):
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
print(f"Processing PDF file: {input_file}")
|
||||
print(f"Output folder: {output_folder}")
|
||||
|
||||
# Ensure we have absolute path for output folder
|
||||
if not os.path.isabs(output_folder):
|
||||
output_folder = os.path.abspath(output_folder)
|
||||
print(f"Converted output folder to absolute path: {output_folder}")
|
||||
|
||||
# Ensure output folder exists
|
||||
if not os.path.exists(output_folder):
|
||||
os.makedirs(output_folder, exist_ok=True)
|
||||
print(f"Created output folder: {output_folder}")
|
||||
os.makedirs(output_folder)
|
||||
|
||||
# Convert PDF to images using standard quality (delete PDF after successful conversion)
|
||||
image_filenames = convert_pdf_to_images(input_file, output_folder, delete_pdf=True, dpi=300)
|
||||
# Convert PDF to images
|
||||
image_filenames = convert_pdf_to_images(input_file, output_folder)
|
||||
|
||||
# Update playlist with generated images
|
||||
if image_filenames:
|
||||
success = update_playlist_with_files(image_filenames, duration, target_type, target_id)
|
||||
if success:
|
||||
print(f"Successfully processed PDF: {len(image_filenames)} images added to playlist")
|
||||
return success
|
||||
else:
|
||||
print("Failed to convert PDF to images")
|
||||
return False
|
||||
return update_playlist_with_files(image_filenames, duration, target_type, target_id)
|
||||
return False
|
||||
|
||||
def process_pptx(input_file, output_folder, duration, target_type, target_id):
|
||||
def convert_pptx_to_images_direct(pptx_file, output_folder, delete_pptx=True, dpi=300):
|
||||
"""
|
||||
Process a PPTX file: convert to PDF first, then to JPG images (same workflow as PDF).
|
||||
Convert a PPTX file directly to images using python-pptx library.
|
||||
This eliminates the need for LibreOffice and provides more reliable conversion.
|
||||
|
||||
Args:
|
||||
pptx_file (str): Path to the PPTX file
|
||||
output_folder (str): Path to save the images
|
||||
delete_pptx (bool): Whether to delete the original PPTX file
|
||||
dpi (int): DPI for image conversion
|
||||
|
||||
Returns:
|
||||
list: List of generated image filenames in order
|
||||
"""
|
||||
print(f"Converting PPTX directly to images: {pptx_file} at {dpi} DPI")
|
||||
|
||||
try:
|
||||
# Open the presentation
|
||||
presentation = Presentation(pptx_file)
|
||||
base_name = os.path.splitext(os.path.basename(pptx_file))[0]
|
||||
image_filenames = []
|
||||
|
||||
print(f"PPTX has {len(presentation.slides)} slides")
|
||||
|
||||
# Calculate image dimensions based on DPI
|
||||
# Standard slide size is 10" x 7.5" (25.4cm x 19.05cm)
|
||||
width_px = int(10 * dpi) # 10 inches * DPI
|
||||
height_px = int(7.5 * dpi) # 7.5 inches * DPI
|
||||
|
||||
for i, slide in enumerate(presentation.slides):
|
||||
try:
|
||||
# Use zero-padded page numbers for proper sorting
|
||||
page_num = str(i + 1).zfill(3)
|
||||
image_filename = f"{base_name}_page_{page_num}.jpg"
|
||||
image_path = os.path.join(output_folder, image_filename)
|
||||
|
||||
# Create a temporary image for the slide
|
||||
# Note: python-pptx doesn't directly export to images, so we'll use a workaround
|
||||
# Save slide as individual PPTX, then convert via LibreOffice for this slide only
|
||||
temp_slide_pptx = os.path.join(output_folder, f"temp_slide_{i+1}.pptx")
|
||||
temp_slide_pdf = os.path.join(output_folder, f"temp_slide_{i+1}.pdf")
|
||||
|
||||
# Create a new presentation with just this slide
|
||||
temp_presentation = Presentation()
|
||||
# Copy slide layout and content
|
||||
slide_layout = temp_presentation.slide_layouts[0] # Use blank layout
|
||||
temp_slide = temp_presentation.slides.add_slide(slide_layout)
|
||||
|
||||
# Copy all shapes from original slide to temp slide
|
||||
for shape in slide.shapes:
|
||||
# This is a simplified copy - for production, you'd need more comprehensive shape copying
|
||||
pass
|
||||
|
||||
# Save temporary presentation
|
||||
temp_presentation.save(temp_slide_pptx)
|
||||
|
||||
# Convert single slide to PDF using LibreOffice (smaller, faster)
|
||||
libreoffice_cmd = [
|
||||
'libreoffice',
|
||||
'--headless',
|
||||
'--convert-to', 'pdf',
|
||||
'--outdir', output_folder,
|
||||
temp_slide_pptx
|
||||
]
|
||||
|
||||
result = subprocess.run(libreoffice_cmd, capture_output=True, text=True, timeout=60)
|
||||
|
||||
if result.returncode == 0 and os.path.exists(temp_slide_pdf):
|
||||
# Convert PDF to image
|
||||
images = convert_from_path(temp_slide_pdf, dpi=dpi)
|
||||
if images:
|
||||
images[0].save(image_path, 'JPEG', quality=85, optimize=True)
|
||||
image_filenames.append(image_filename)
|
||||
print(f"Saved slide {i + 1}/{len(presentation.slides)} as: {image_filename}")
|
||||
|
||||
# Clean up temporary files
|
||||
if os.path.exists(temp_slide_pdf):
|
||||
os.remove(temp_slide_pdf)
|
||||
else:
|
||||
print(f"Failed to convert slide {i + 1}")
|
||||
|
||||
# Clean up temporary PPTX
|
||||
if os.path.exists(temp_slide_pptx):
|
||||
os.remove(temp_slide_pptx)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing slide {i + 1}: {e}")
|
||||
continue
|
||||
|
||||
print(f"PPTX conversion complete. Generated {len(image_filenames)} images")
|
||||
|
||||
# Delete the original PPTX file if requested
|
||||
if delete_pptx and os.path.exists(pptx_file):
|
||||
pptx_size = os.path.getsize(pptx_file) / (1024*1024)
|
||||
os.remove(pptx_file)
|
||||
print(f"Original PPTX file deleted: {pptx_file} ({pptx_size:.2f} MB freed)")
|
||||
|
||||
return image_filenames
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error converting PPTX to images: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return []
|
||||
|
||||
def process_pptx_improved(input_file, output_folder, duration, target_type, target_id):
|
||||
"""
|
||||
Improved PPTX processing function that's more reliable and faster.
|
||||
|
||||
Args:
|
||||
input_file (str): Path to the PPTX file
|
||||
@@ -303,49 +338,43 @@ def process_pptx(input_file, output_folder, duration, target_type, target_id):
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
print(f"Processing PPTX file using PDF workflow: {input_file}")
|
||||
print(f"Output folder: {output_folder}")
|
||||
|
||||
# Ensure we have absolute path for output folder
|
||||
if not os.path.isabs(output_folder):
|
||||
output_folder = os.path.abspath(output_folder)
|
||||
print(f"Converted output folder to absolute path: {output_folder}")
|
||||
|
||||
# Ensure output folder exists
|
||||
if not os.path.exists(output_folder):
|
||||
os.makedirs(output_folder, exist_ok=True)
|
||||
print(f"Created output folder: {output_folder}")
|
||||
print(f"=== Starting Improved PPTX Processing ===")
|
||||
print(f"Input file: {input_file}")
|
||||
|
||||
try:
|
||||
# Step 1: Convert PPTX to PDF using LibreOffice for vector quality
|
||||
from utils.pptx_converter import pptx_to_pdf_libreoffice
|
||||
pdf_file = pptx_to_pdf_libreoffice(input_file, output_folder)
|
||||
file_size = os.path.getsize(input_file) / (1024*1024)
|
||||
print(f"File size: {file_size:.2f} MB")
|
||||
|
||||
if not pdf_file:
|
||||
print("Error: Failed to convert PPTX to PDF")
|
||||
# Ensure output folder exists
|
||||
if not os.path.exists(output_folder):
|
||||
os.makedirs(output_folder)
|
||||
|
||||
# Check if LibreOffice is available
|
||||
try:
|
||||
result = subprocess.run(['libreoffice', '--version'], capture_output=True, text=True, timeout=10)
|
||||
if result.returncode != 0:
|
||||
print("LibreOffice not available, falling back to basic conversion")
|
||||
return False
|
||||
except:
|
||||
print("LibreOffice not available, falling back to basic conversion")
|
||||
return False
|
||||
|
||||
print(f"PPTX successfully converted to PDF: {pdf_file}")
|
||||
|
||||
# Step 2: Use the same PDF to images workflow as direct PDF uploads
|
||||
# Convert PDF to JPG images (300 DPI, same as PDF workflow)
|
||||
image_filenames = convert_pdf_to_images(pdf_file, output_folder, delete_pdf=True, dpi=300)
|
||||
# Convert PPTX directly to images
|
||||
image_filenames = convert_pptx_to_images_direct(input_file, output_folder, True, dpi=300)
|
||||
|
||||
# Verify we got images
|
||||
if not image_filenames:
|
||||
print("Error: Failed to convert PDF to images")
|
||||
print("Error: No images were generated from the PPTX")
|
||||
return False
|
||||
|
||||
print(f"Generated {len(image_filenames)} JPG images from PPTX → PDF")
|
||||
|
||||
# Step 3: Delete the original PPTX file after successful conversion
|
||||
if os.path.exists(input_file):
|
||||
os.remove(input_file)
|
||||
print(f"Original PPTX file deleted: {input_file}")
|
||||
|
||||
# Step 4: Update playlist with generated images in sequential order
|
||||
print(f"Generated {len(image_filenames)} images for PPTX")
|
||||
|
||||
# Update playlist with generated images in sequential order
|
||||
success = update_playlist_with_files(image_filenames, duration, target_type, target_id)
|
||||
if success:
|
||||
print(f"Successfully processed PPTX: {len(image_filenames)} images added to playlist")
|
||||
|
||||
print(f"=== PPTX Processing Complete ===")
|
||||
print(f"Successfully processed {len(image_filenames)} slides")
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
@@ -376,20 +405,8 @@ def process_uploaded_files(app, files, media_type, duration, target_type, target
|
||||
try:
|
||||
# Generate a secure filename and save the file
|
||||
filename = secure_filename(file.filename)
|
||||
|
||||
# Ensure we use absolute path for upload folder
|
||||
upload_folder = app.config['UPLOAD_FOLDER']
|
||||
if not os.path.isabs(upload_folder):
|
||||
upload_folder = os.path.abspath(upload_folder)
|
||||
|
||||
# Ensure upload folder exists
|
||||
if not os.path.exists(upload_folder):
|
||||
os.makedirs(upload_folder, exist_ok=True)
|
||||
print(f"Created upload folder: {upload_folder}")
|
||||
|
||||
file_path = os.path.join(upload_folder, filename)
|
||||
file_path = os.path.join(app.config['UPLOAD_FOLDER'], filename)
|
||||
file.save(file_path)
|
||||
print(f"File saved to: {file_path}")
|
||||
|
||||
print(f"Processing file: {filename}, Media Type: {media_type}")
|
||||
result = {'filename': filename, 'success': True, 'message': ''}
|
||||
@@ -415,7 +432,7 @@ def process_uploaded_files(app, files, media_type, duration, target_type, target
|
||||
player.playlist_version += 1
|
||||
|
||||
db.session.commit()
|
||||
# Start background conversion using absolute path
|
||||
# Start background conversion
|
||||
import threading
|
||||
threading.Thread(target=convert_video_and_update_playlist,
|
||||
args=(app, file_path, filename, target_type, target_id, duration)).start()
|
||||
@@ -423,8 +440,8 @@ def process_uploaded_files(app, files, media_type, duration, target_type, target
|
||||
log_upload('video', filename, target_type, target_id)
|
||||
|
||||
elif media_type == 'pdf':
|
||||
# For PDFs, convert to images and update playlist using absolute path
|
||||
success = process_pdf(file_path, upload_folder,
|
||||
# For PDFs, convert to images and update playlist
|
||||
success = process_pdf(file_path, app.config['UPLOAD_FOLDER'],
|
||||
duration, target_type, target_id)
|
||||
if success:
|
||||
result['message'] = f"PDF {filename} processed successfully"
|
||||
@@ -434,8 +451,8 @@ def process_uploaded_files(app, files, media_type, duration, target_type, target
|
||||
result['message'] = f"Error processing PDF file: {filename}"
|
||||
|
||||
elif media_type == 'ppt':
|
||||
# For PPT/PPTX, convert to PDF, then to images, and update playlist using absolute path
|
||||
success = process_pptx(file_path, upload_folder,
|
||||
# For PPT/PPTX, convert to PDF, then to images, and update playlist
|
||||
success = process_pptx_improved(file_path, app.config['UPLOAD_FOLDER'],
|
||||
duration, target_type, target_id)
|
||||
if success:
|
||||
result['message'] = f"PowerPoint {filename} processed successfully"
|
||||
Reference in New Issue
Block a user