Add Containerfle

This commit is contained in:
Jonny Ervine 2025-09-29 20:06:07 +08:00
parent fa37916440
commit e354caa03f
12 changed files with 1617 additions and 112 deletions

133
.dockerignore Normal file
View File

@ -0,0 +1,133 @@
# Git
.git
.gitignore
.gitattributes
# Documentation
README.md
*.md
docs/
# Python
__pycache__/
*.py[cod]
*$py.class
*.so
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# Virtual environments
venv/
env/
ENV/
env.bak/
venv.bak/
.venv/
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
# OS
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
# Logs
*.log
logs/
# Database files (will be created in container)
*.db
*.sqlite
*.sqlite3
# Data directories
data/
temp/
tmp/
# Docker
Dockerfile*
docker-compose*.yml
.dockerignore
# Environment files
.env
.env.local
.env.production
.env.development
# Node modules (if any)
node_modules/
# Coverage reports
htmlcov/
.coverage
.coverage.*
coverage.xml
# Testing
.pytest_cache/
.tox/
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# pipenv
Pipfile.lock
# PEP 582
__pypackages__/
# Celery
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json

76
Containerfile Normal file
View File

@ -0,0 +1,76 @@
# Multi-stage Containerfile for Hockey Results Application
# Supports multiple database backends (PostgreSQL, MariaDB, SQLite)
# Stage 1: Build stage
FROM python:3.11-slim as builder
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PIP_NO_CACHE_DIR=1 \
PIP_DISABLE_PIP_VERSION_CHECK=1
# Install system dependencies for building Python packages
RUN apt-get update && apt-get install -y \
build-essential \
libpq-dev \
default-libmysqlclient-dev \
pkg-config \
&& rm -rf /var/lib/apt/lists/*
# Create and activate virtual environment
RUN python -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
# Copy requirements and install Python dependencies
COPY requirements.txt motm_app/requirements.txt ./
RUN pip install --upgrade pip && \
pip install -r requirements.txt && \
pip install -r motm_app/requirements.txt
# Stage 2: Runtime stage
FROM python:3.11-slim
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PATH="/opt/venv/bin:$PATH" \
DATABASE_TYPE=sqlite \
FLASK_ENV=production \
FLASK_APP=motm_app/main.py
# Install runtime dependencies
RUN apt-get update && apt-get install -y \
libpq5 \
default-mysql-client \
curl \
&& rm -rf /var/lib/apt/lists/*
# Copy virtual environment from builder stage
COPY --from=builder /opt/venv /opt/venv
# Create non-root user
RUN groupadd -r appuser && useradd -r -g appuser appuser
# Create application directory
WORKDIR /app
# Copy application code
COPY --chown=appuser:appuser . .
# Create directories for data and logs
RUN mkdir -p /app/data /app/logs && \
chown -R appuser:appuser /app/data /app/logs
# Switch to non-root user
USER appuser
# Expose port
EXPOSE 5000
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:5000/ || exit 1
# Default command
CMD ["python", "motm_app/main.py"]

362
DOCKER.md Normal file
View File

@ -0,0 +1,362 @@
# Docker Containerization Guide
This guide explains how to run the Hockey Results Application using Docker containers with support for multiple database backends.
## 🏒 Quick Start
### Using Docker Compose (Recommended)
```bash
# Start with PostgreSQL (default)
docker-compose up -d
# Start with MariaDB
docker-compose --profile mariadb up -d
# Start with SQLite (no database container needed)
DATABASE_TYPE=sqlite docker-compose up -d
```
### Using Docker Commands
```bash
# Build the image
./docker/build.sh
# Run with SQLite
./docker/run.sh
# Run with PostgreSQL
./docker/run.sh -d postgresql
# Run with MariaDB
./docker/run.sh -d mysql
```
## 🗄️ Database Options
### 1. SQLite (Default)
- **Pros**: No external dependencies, perfect for development
- **Cons**: Not suitable for production with multiple users
- **Use case**: Development, testing, single-user deployments
```bash
# Using docker-compose
DATABASE_TYPE=sqlite docker-compose up -d
# Using docker run
./docker/run.sh -d sqlite
```
### 2. PostgreSQL (Recommended for Production)
- **Pros**: Robust, ACID compliant, excellent performance
- **Cons**: Requires external database container
- **Use case**: Production deployments, multi-user applications
```bash
# Using docker-compose (default)
docker-compose up -d
# Using docker run (requires external PostgreSQL)
./docker/run.sh -d postgresql
```
### 3. MariaDB/MySQL
- **Pros**: Widely supported, good performance
- **Cons**: Requires external database container
- **Use case**: Legacy systems, specific MySQL requirements
```bash
# Using docker-compose
docker-compose --profile mariadb up -d
# Using docker run (requires external MariaDB)
./docker/run.sh -d mysql
```
## 🚀 Deployment Options
### Development Environment
```bash
# Clone the repository
git clone <repository-url>
cd gcp-hockey-results
# Start with SQLite for development
./docker/run.sh -d sqlite -p 5000:5000
# Access the application
open http://localhost:5000
```
### Production Environment
#### Option 1: Docker Compose (Single Server)
```bash
# Set production environment variables
export SECRET_KEY="your-production-secret-key"
export BASIC_AUTH_PASSWORD="strong-production-password"
export POSTGRES_PASSWORD="strong-database-password"
# Start production stack
docker-compose up -d
# Scale the application (optional)
docker-compose up -d --scale hockey-app=3
```
#### Option 2: Kubernetes (Multi-Server)
```yaml
# kubernetes/deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: hockey-app
spec:
replicas: 3
selector:
matchLabels:
app: hockey-app
template:
metadata:
labels:
app: hockey-app
spec:
containers:
- name: hockey-app
image: hockey-results:latest
ports:
- containerPort: 5000
env:
- name: DATABASE_TYPE
value: "postgresql"
- name: POSTGRES_HOST
value: "postgres-service"
```
#### Option 3: Cloud Platforms
**Google Cloud Run:**
```bash
# Build and push to Google Container Registry
./docker/build.sh --registry gcr.io/your-project-id --push
# Deploy to Cloud Run
gcloud run deploy hockey-app \
--image gcr.io/your-project-id/hockey-results:latest \
--platform managed \
--region us-central1 \
--set-env-vars DATABASE_TYPE=postgresql
```
**AWS ECS:**
```bash
# Build and push to ECR
./docker/build.sh --registry your-account.dkr.ecr.region.amazonaws.com --push
# Create ECS task definition with the image
```
## 🔧 Configuration
### Environment Variables
| Variable | Default | Description |
|----------|---------|-------------|
| `DATABASE_TYPE` | `sqlite` | Database type: sqlite, postgresql, mysql |
| `SECRET_KEY` | `your-secret-key-change-in-production` | Flask secret key |
| `FLASK_ENV` | `production` | Flask environment |
| `BASIC_AUTH_USERNAME` | `admin` | Basic auth username |
| `BASIC_AUTH_PASSWORD` | `letmein` | Basic auth password |
### PostgreSQL Configuration
| Variable | Default | Description |
|----------|---------|-------------|
| `POSTGRES_HOST` | `postgres` | PostgreSQL host |
| `POSTGRES_PORT` | `5432` | PostgreSQL port |
| `POSTGRES_DATABASE` | `hockey_results` | Database name |
| `POSTGRES_USER` | `hockey_user` | Database user |
| `POSTGRES_PASSWORD` | `hockey_password` | Database password |
### MySQL/MariaDB Configuration
| Variable | Default | Description |
|----------|---------|-------------|
| `MYSQL_HOST` | `mariadb` | MySQL host |
| `MYSQL_PORT` | `3306` | MySQL port |
| `MYSQL_DATABASE` | `hockey_results` | Database name |
| `MYSQL_USER` | `hockey_user` | Database user |
| `MYSQL_PASSWORD` | `hockey_password` | Database password |
## 📊 Monitoring and Logs
### View Logs
```bash
# Docker Compose
docker-compose logs -f hockey-app
# Docker Run
docker logs -f hockey-results-app
```
### Health Checks
```bash
# Check container health
docker ps
# Manual health check
curl -f http://localhost:5000/ || echo "Application is not healthy"
```
### Database Connection Test
```bash
# Test PostgreSQL connection
docker exec hockey-results-app python -c "
from motm_app.database import db_config
print('Database URL:', db_config.database_url)
"
# Test application database initialization
docker exec hockey-results-app python -c "
from motm_app.database import init_database
init_database()
print('Database initialized successfully')
"
```
## 🛠️ Development
### Local Development with Docker
```bash
# Build development image
./docker/build.sh -t dev
# Run with volume mount for live code changes
docker run -it --rm \
-p 5000:5000 \
-v $(pwd):/app \
-e FLASK_ENV=development \
-e FLASK_DEBUG=true \
hockey-results:dev
```
### Database Migrations
```bash
# Run migrations inside container
docker exec hockey-results-app python -c "
from motm_app.database import init_database
init_database()
"
```
### Backup and Restore
```bash
# Backup PostgreSQL data
docker exec hockey-postgres pg_dump -U hockey_user hockey_results > backup.sql
# Restore PostgreSQL data
docker exec -i hockey-postgres psql -U hockey_user hockey_results < backup.sql
# Backup SQLite data
docker cp hockey-results-app:/app/data/hockey_results.db ./backup.db
```
## 🔒 Security Considerations
### Production Security
1. **Change default passwords**:
```bash
export SECRET_KEY="$(openssl rand -hex 32)"
export BASIC_AUTH_PASSWORD="$(openssl rand -base64 32)"
export POSTGRES_PASSWORD="$(openssl rand -base64 32)"
```
2. **Use secrets management**:
```yaml
# docker-compose.prod.yml
services:
hockey-app:
environment:
- SECRET_KEY_FILE=/run/secrets/secret_key
secrets:
- secret_key
secrets:
secret_key:
file: ./secrets/secret_key.txt
```
3. **Network security**:
```yaml
# Use custom networks
networks:
hockey-network:
driver: bridge
ipam:
config:
- subnet: 172.20.0.0/16
```
## 🚨 Troubleshooting
### Common Issues
**Container won't start:**
```bash
# Check logs
docker logs hockey-results-app
# Check if port is already in use
netstat -tlnp | grep :5000
```
**Database connection issues:**
```bash
# Check database container status
docker-compose ps
# Test database connectivity
docker exec hockey-results-app python -c "
import psycopg2
conn = psycopg2.connect(host='postgres', port=5432, database='hockey_results', user='hockey_user', password='hockey_password')
print('Database connected successfully')
"
```
**Permission issues:**
```bash
# Fix file permissions
sudo chown -R $USER:$USER ./data
chmod -R 755 ./data
```
### Performance Tuning
**Database Optimization:**
```bash
# PostgreSQL tuning
docker exec hockey-postgres psql -U hockey_user hockey_results -c "
ALTER SYSTEM SET shared_buffers = '256MB';
ALTER SYSTEM SET effective_cache_size = '1GB';
SELECT pg_reload_conf();
"
```
**Application Scaling:**
```bash
# Scale application containers
docker-compose up -d --scale hockey-app=3
# Use load balancer
docker-compose --profile nginx up -d
```
## 📚 Additional Resources
- [Docker Documentation](https://docs.docker.com/)
- [Docker Compose Documentation](https://docs.docker.com/compose/)
- [PostgreSQL Docker Image](https://hub.docker.com/_/postgres)
- [MariaDB Docker Image](https://hub.docker.com/_/mariadb)
- [SQLAlchemy Documentation](https://docs.sqlalchemy.org/)

125
docker-compose.yml Normal file
View File

@ -0,0 +1,125 @@
version: '3.8'
services:
# Hockey Results Application
hockey-app:
build:
context: .
dockerfile: Containerfile
container_name: hockey-results-app
ports:
- "5000:5000"
environment:
- DATABASE_TYPE=postgresql
- POSTGRES_HOST=postgres
- POSTGRES_PORT=5432
- POSTGRES_DATABASE=hockey_results
- POSTGRES_USER=hockey_user
- POSTGRES_PASSWORD=hockey_password
- FLASK_ENV=production
- SECRET_KEY=your-secret-key-change-in-production
- BASIC_AUTH_USERNAME=admin
- BASIC_AUTH_PASSWORD=letmein
volumes:
- ./data:/app/data
- ./logs:/app/logs
depends_on:
postgres:
condition: service_healthy
restart: unless-stopped
networks:
- hockey-network
# PostgreSQL Database
postgres:
image: postgres:15-alpine
container_name: hockey-postgres
environment:
- POSTGRES_DB=hockey_results
- POSTGRES_USER=hockey_user
- POSTGRES_PASSWORD=hockey_password
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
volumes:
- postgres_data:/var/lib/postgresql/data
- ./init-scripts:/docker-entrypoint-initdb.d
ports:
- "5432:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U hockey_user -d hockey_results"]
interval: 10s
timeout: 5s
retries: 5
restart: unless-stopped
networks:
- hockey-network
# MariaDB Database (Alternative)
mariadb:
image: mariadb:10.11
container_name: hockey-mariadb
environment:
- MYSQL_ROOT_PASSWORD=root_password
- MYSQL_DATABASE=hockey_results
- MYSQL_USER=hockey_user
- MYSQL_PASSWORD=hockey_password
- MYSQL_CHARSET=utf8mb4
- MYSQL_COLLATION=utf8mb4_unicode_ci
volumes:
- mariadb_data:/var/lib/mysql
- ./init-scripts:/docker-entrypoint-initdb.d
ports:
- "3306:3306"
healthcheck:
test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "hockey_user", "-p$$MYSQL_PASSWORD"]
interval: 10s
timeout: 5s
retries: 5
restart: unless-stopped
networks:
- hockey-network
profiles:
- mariadb
# Redis for caching (optional)
redis:
image: redis:7-alpine
container_name: hockey-redis
ports:
- "6379:6379"
volumes:
- redis_data:/data
restart: unless-stopped
networks:
- hockey-network
profiles:
- redis
# Nginx reverse proxy (optional)
nginx:
image: nginx:alpine
container_name: hockey-nginx
ports:
- "80:80"
- "443:443"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
- ./ssl:/etc/nginx/ssl:ro
depends_on:
- hockey-app
restart: unless-stopped
networks:
- hockey-network
profiles:
- nginx
volumes:
postgres_data:
driver: local
mariadb_data:
driver: local
redis_data:
driver: local
networks:
hockey-network:
driver: bridge

127
docker/build.sh Normal file
View File

@ -0,0 +1,127 @@
#!/bin/bash
# Docker build script for Hockey Results Application
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Configuration
IMAGE_NAME="hockey-results"
IMAGE_TAG="latest"
FULL_IMAGE_NAME="${IMAGE_NAME}:${IMAGE_TAG}"
echo -e "${BLUE}🏒 Building Hockey Results Application Docker Image${NC}"
echo -e "${BLUE}================================================${NC}"
# Function to display usage
usage() {
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Options:"
echo " -t, --tag TAG Set image tag (default: latest)"
echo " -n, --name NAME Set image name (default: hockey-results)"
echo " --no-cache Build without cache"
echo " --push Push image to registry after build"
echo " -h, --help Show this help message"
echo ""
echo "Examples:"
echo " $0 # Build with default settings"
echo " $0 -t v1.0.0 # Build with specific tag"
echo " $0 --no-cache # Build without cache"
echo " $0 -t v1.0.0 --push # Build and push to registry"
}
# Parse command line arguments
PUSH_IMAGE=false
NO_CACHE=""
REGISTRY=""
while [[ $# -gt 0 ]]; do
case $1 in
-t|--tag)
IMAGE_TAG="$2"
FULL_IMAGE_NAME="${IMAGE_NAME}:${IMAGE_TAG}"
shift 2
;;
-n|--name)
IMAGE_NAME="$2"
FULL_IMAGE_NAME="${IMAGE_NAME}:${IMAGE_TAG}"
shift 2
;;
--no-cache)
NO_CACHE="--no-cache"
shift
;;
--push)
PUSH_IMAGE=true
shift
;;
--registry)
REGISTRY="$2"
shift 2
;;
-h|--help)
usage
exit 0
;;
*)
echo -e "${RED}Unknown option: $1${NC}"
usage
exit 1
;;
esac
done
echo -e "${YELLOW}📋 Build Configuration:${NC}"
echo -e " Image Name: ${GREEN}${FULL_IMAGE_NAME}${NC}"
echo -e " No Cache: ${GREEN}${NO_CACHE:-false}${NC}"
echo -e " Push Image: ${GREEN}${PUSH_IMAGE}${NC}"
echo ""
# Check if Docker is running
if ! docker info > /dev/null 2>&1; then
echo -e "${RED}❌ Docker is not running. Please start Docker and try again.${NC}"
exit 1
fi
# Build the image
echo -e "${BLUE}🔨 Building Docker image...${NC}"
if docker build $NO_CACHE -t "$FULL_IMAGE_NAME" .; then
echo -e "${GREEN}✅ Docker image built successfully: ${FULL_IMAGE_NAME}${NC}"
else
echo -e "${RED}❌ Docker build failed${NC}"
exit 1
fi
# Show image information
echo -e "${BLUE}📊 Image Information:${NC}"
docker images "$FULL_IMAGE_NAME"
# Push image if requested
if [ "$PUSH_IMAGE" = true ]; then
echo -e "${BLUE}📤 Pushing image to registry...${NC}"
if [ -n "$REGISTRY" ]; then
FULL_REGISTRY_NAME="${REGISTRY}/${FULL_IMAGE_NAME}"
docker tag "$FULL_IMAGE_NAME" "$FULL_REGISTRY_NAME"
docker push "$FULL_REGISTRY_NAME"
echo -e "${GREEN}✅ Image pushed to registry: ${FULL_REGISTRY_NAME}${NC}"
else
docker push "$FULL_IMAGE_NAME"
echo -e "${GREEN}✅ Image pushed to registry: ${FULL_IMAGE_NAME}${NC}"
fi
fi
echo -e "${GREEN}🎉 Build process completed successfully!${NC}"
# Show next steps
echo -e "${YELLOW}📝 Next Steps:${NC}"
echo -e " To run the container: ${BLUE}docker run -p 5000:5000 ${FULL_IMAGE_NAME}${NC}"
echo -e " To run with docker-compose: ${BLUE}docker-compose up${NC}"
echo -e " To run with PostgreSQL: ${BLUE}docker-compose --profile postgres up${NC}"
echo -e " To run with MariaDB: ${BLUE}docker-compose --profile mariadb up${NC}"

192
docker/run.sh Normal file
View File

@ -0,0 +1,192 @@
#!/bin/bash
# Docker run script for Hockey Results Application
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Default configuration
IMAGE_NAME="hockey-results:latest"
CONTAINER_NAME="hockey-results-app"
PORT="5000:5000"
DATABASE_TYPE="sqlite"
VOLUME_MOUNT=""
# Function to display usage
usage() {
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Options:"
echo " -i, --image IMAGE Docker image name (default: hockey-results:latest)"
echo " -c, --container NAME Container name (default: hockey-results-app)"
echo " -p, --port PORT Port mapping (default: 5000:5000)"
echo " -d, --database TYPE Database type: sqlite, postgresql, mysql (default: sqlite)"
echo " -v, --volume PATH Mount volume for data persistence"
echo " --detach Run container in background"
echo " --rm Remove container when it exits"
echo " --env KEY=VALUE Set environment variable"
echo " -h, --help Show this help message"
echo ""
echo "Database Types:"
echo " sqlite Use SQLite database (default, no external dependencies)"
echo " postgresql Use PostgreSQL database"
echo " mysql Use MySQL/MariaDB database"
echo ""
echo "Examples:"
echo " $0 # Run with SQLite"
echo " $0 -d postgresql # Run with PostgreSQL"
echo " $0 -v ./data:/app/data # Run with data persistence"
echo " $0 -p 8080:5000 # Run on different port"
echo " $0 --detach # Run in background"
}
# Parse command line arguments
DETACH=""
REMOVE=""
ENV_VARS=""
while [[ $# -gt 0 ]]; do
case $1 in
-i|--image)
IMAGE_NAME="$2"
shift 2
;;
-c|--container)
CONTAINER_NAME="$2"
shift 2
;;
-p|--port)
PORT="$2"
shift 2
;;
-d|--database)
DATABASE_TYPE="$2"
shift 2
;;
-v|--volume)
VOLUME_MOUNT="-v $2"
shift 2
;;
--detach)
DETACH="-d"
shift
;;
--rm)
REMOVE="--rm"
shift
;;
--env)
ENV_VARS="$ENV_VARS -e $2"
shift 2
;;
-h|--help)
usage
exit 0
;;
*)
echo -e "${RED}Unknown option: $1${NC}"
usage
exit 1
;;
esac
done
echo -e "${BLUE}🏒 Running Hockey Results Application${NC}"
echo -e "${BLUE}====================================${NC}"
# Check if Docker is running
if ! docker info > /dev/null 2>&1; then
echo -e "${RED}❌ Docker is not running. Please start Docker and try again.${NC}"
exit 1
fi
# Check if image exists
if ! docker images -q "$IMAGE_NAME" | grep -q .; then
echo -e "${YELLOW}⚠️ Image $IMAGE_NAME not found. Building it first...${NC}"
./docker/build.sh -n "$(echo $IMAGE_NAME | cut -d':' -f1)" -t "$(echo $IMAGE_NAME | cut -d':' -f2)"
fi
# Stop existing container if running
if docker ps -q -f name="$CONTAINER_NAME" | grep -q .; then
echo -e "${YELLOW}🛑 Stopping existing container...${NC}"
docker stop "$CONTAINER_NAME"
fi
# Remove existing container if it exists
if docker ps -aq -f name="$CONTAINER_NAME" | grep -q .; then
echo -e "${YELLOW}🗑️ Removing existing container...${NC}"
docker rm "$CONTAINER_NAME"
fi
# Set environment variables based on database type
case $DATABASE_TYPE in
"postgresql")
ENV_VARS="$ENV_VARS -e DATABASE_TYPE=postgresql"
ENV_VARS="$ENV_VARS -e POSTGRES_HOST=host.docker.internal"
ENV_VARS="$ENV_VARS -e POSTGRES_PORT=5432"
ENV_VARS="$ENV_VARS -e POSTGRES_DATABASE=hockey_results"
ENV_VARS="$ENV_VARS -e POSTGRES_USER=hockey_user"
ENV_VARS="$ENV_VARS -e POSTGRES_PASSWORD=hockey_password"
echo -e "${YELLOW}📊 Using PostgreSQL database${NC}"
;;
"mysql")
ENV_VARS="$ENV_VARS -e DATABASE_TYPE=mysql"
ENV_VARS="$ENV_VARS -e MYSQL_HOST=host.docker.internal"
ENV_VARS="$ENV_VARS -e MYSQL_PORT=3306"
ENV_VARS="$ENV_VARS -e MYSQL_DATABASE=hockey_results"
ENV_VARS="$ENV_VARS -e MYSQL_USER=hockey_user"
ENV_VARS="$ENV_VARS -e MYSQL_PASSWORD=hockey_password"
echo -e "${YELLOW}📊 Using MySQL/MariaDB database${NC}"
;;
"sqlite")
ENV_VARS="$ENV_VARS -e DATABASE_TYPE=sqlite"
ENV_VARS="$ENV_VARS -e SQLITE_DATABASE_PATH=/app/data/hockey_results.db"
VOLUME_MOUNT="$VOLUME_MOUNT -v $(pwd)/data:/app/data"
echo -e "${YELLOW}📊 Using SQLite database${NC}"
;;
*)
echo -e "${RED}❌ Unsupported database type: $DATABASE_TYPE${NC}"
echo -e "Supported types: sqlite, postgresql, mysql"
exit 1
;;
esac
echo -e "${BLUE}🚀 Starting container...${NC}"
echo -e " Image: ${GREEN}$IMAGE_NAME${NC}"
echo -e " Container: ${GREEN}$CONTAINER_NAME${NC}"
echo -e " Port: ${GREEN}$PORT${NC}"
echo -e " Database: ${GREEN}$DATABASE_TYPE${NC}"
# Run the container
docker run \
$DETACH \
$REMOVE \
--name "$CONTAINER_NAME" \
-p "$PORT" \
$VOLUME_MOUNT \
$ENV_VARS \
"$IMAGE_NAME"
if [ $? -eq 0 ]; then
if [ -n "$DETACH" ]; then
echo -e "${GREEN}✅ Container started successfully in background${NC}"
echo -e "${BLUE}📝 Container Information:${NC}"
docker ps -f name="$CONTAINER_NAME" --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}"
echo ""
echo -e "${YELLOW}📋 Useful Commands:${NC}"
echo -e " View logs: ${BLUE}docker logs $CONTAINER_NAME${NC}"
echo -e " Stop container: ${BLUE}docker stop $CONTAINER_NAME${NC}"
echo -e " Remove container: ${BLUE}docker rm $CONTAINER_NAME${NC}"
echo -e " Access shell: ${BLUE}docker exec -it $CONTAINER_NAME /bin/bash${NC}"
else
echo -e "${GREEN}✅ Container finished${NC}"
fi
else
echo -e "${RED}❌ Failed to start container${NC}"
exit 1
fi

68
docker/start.sh Normal file
View File

@ -0,0 +1,68 @@
#!/bin/bash
# Docker startup script for Hockey Results Application
set -e
echo "🏒 Starting Hockey Results Application..."
# Function to wait for database
wait_for_database() {
local db_type=$1
local max_attempts=30
local attempt=1
echo "⏳ Waiting for $db_type database to be ready..."
case $db_type in
"postgresql")
while [ $attempt -le $max_attempts ]; do
if python -c "import psycopg2; psycopg2.connect(host='$POSTGRES_HOST', port='$POSTGRES_PORT', database='$POSTGRES_DATABASE', user='$POSTGRES_USER', password='$POSTGRES_PASSWORD')" 2>/dev/null; then
echo "✅ PostgreSQL database is ready!"
return 0
fi
echo "Attempt $attempt/$max_attempts: Database not ready, waiting..."
sleep 2
attempt=$((attempt + 1))
done
;;
"mysql"|"mariadb")
while [ $attempt -le $max_attempts ]; do
if python -c "import pymysql; pymysql.connect(host='$MYSQL_HOST', port='$MYSQL_PORT', database='$MYSQL_DATABASE', user='$MYSQL_USER', password='$MYSQL_PASSWORD')" 2>/dev/null; then
echo "✅ MySQL/MariaDB database is ready!"
return 0
fi
echo "Attempt $attempt/$max_attempts: Database not ready, waiting..."
sleep 2
attempt=$((attempt + 1))
done
;;
esac
echo "❌ Database connection timeout after $max_attempts attempts"
exit 1
}
# Initialize database if needed
init_database() {
echo "🔧 Initializing database..."
python -c "
from motm_app.database import init_database
try:
init_database()
print('✅ Database initialized successfully')
except Exception as e:
print(f'⚠️ Database initialization warning: {e}')
"
}
# Wait for database if not using SQLite
if [ "$DATABASE_TYPE" != "sqlite" ]; then
wait_for_database "$DATABASE_TYPE"
fi
# Initialize database
init_database
# Start the application
echo "🚀 Starting Flask application..."
exec python motm_app/main.py

112
motm_app/alembic.ini Normal file
View File

@ -0,0 +1,112 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version number format
version_num_format = %04d
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses
# os.pathsep. If this key is omitted entirely, it falls back to the legacy
# behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = sqlite:///hockey_results.db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

292
motm_app/database.py Normal file
View File

@ -0,0 +1,292 @@
# encoding=utf-8
"""
Database configuration and models for multi-database support using SQLAlchemy.
Supports PostgreSQL, MariaDB/MySQL, and SQLite.
"""
import os
from sqlalchemy import create_engine, Column, Integer, String, Date, DateTime, Text, SmallInteger, ForeignKey, Boolean, Float
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
from sqlalchemy.dialects.mysql import LONGTEXT
from sqlalchemy.dialects.postgresql import JSON
from datetime import datetime
# Base class for all models
Base = declarative_base()
class DatabaseConfig:
"""Database configuration class for multiple database support."""
def __init__(self):
self.database_url = self._get_database_url()
self.engine = create_engine(self.database_url, echo=False)
self.SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=self.engine)
def _get_database_url(self):
"""Get database URL from environment variables or configuration."""
db_type = os.getenv('DATABASE_TYPE', 'sqlite').lower()
if db_type == 'postgresql':
return self._get_postgresql_url()
elif db_type in ['mysql', 'mariadb']:
return self._get_mysql_url()
elif db_type == 'sqlite':
return self._get_sqlite_url()
else:
raise ValueError(f"Unsupported database type: {db_type}")
def _get_postgresql_url(self):
"""Get PostgreSQL connection URL."""
host = os.getenv('POSTGRES_HOST', 'localhost')
port = os.getenv('POSTGRES_PORT', '5432')
database = os.getenv('POSTGRES_DATABASE', 'hockey_results')
username = os.getenv('POSTGRES_USER', 'postgres')
password = os.getenv('POSTGRES_PASSWORD', '')
return f"postgresql://{username}:{password}@{host}:{port}/{database}"
def _get_mysql_url(self):
"""Get MySQL/MariaDB connection URL."""
host = os.getenv('MYSQL_HOST', 'localhost')
port = os.getenv('MYSQL_PORT', '3306')
database = os.getenv('MYSQL_DATABASE', 'hockey_results')
username = os.getenv('MYSQL_USER', 'root')
password = os.getenv('MYSQL_PASSWORD', '')
charset = os.getenv('MYSQL_CHARSET', 'utf8mb4')
return f"mysql+pymysql://{username}:{password}@{host}:{port}/{database}?charset={charset}"
def _get_sqlite_url(self):
"""Get SQLite connection URL."""
database_path = os.getenv('SQLITE_DATABASE_PATH', 'hockey_results.db')
return f"sqlite:///{database_path}"
def get_session(self):
"""Get database session."""
return self.SessionLocal()
def create_tables(self):
"""Create all database tables."""
Base.metadata.create_all(bind=self.engine)
# Global database configuration instance
db_config = DatabaseConfig()
# Database Models
class Player(Base):
"""Player model."""
__tablename__ = 'players'
player_number = Column(Integer, primary_key=True)
player_forenames = Column(String(50))
player_surname = Column(String(30))
player_nickname = Column(String(30))
player_chinese_name = Column(String(10))
player_email = Column(String(255))
player_dob = Column(Date)
player_hkid = Column(String(20))
player_tel_number = Column(String(30))
player_team = Column(String(6))
player_picture_url = Column(String(255))
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
class Club(Base):
"""Club model."""
__tablename__ = 'clubs'
id = Column(Integer, primary_key=True)
hockey_club = Column(String(100), unique=True, nullable=False)
logo_url = Column(String(255))
created_at = Column(DateTime, default=datetime.utcnow)
class Team(Base):
"""Team model."""
__tablename__ = 'teams'
id = Column(Integer, primary_key=True)
club = Column(String(100), ForeignKey('clubs.hockey_club'))
team = Column(String(10))
display_name = Column(String(100))
league = Column(String(50))
created_at = Column(DateTime, default=datetime.utcnow)
class MatchSquad(Base):
"""Match squad model."""
__tablename__ = 'match_squad'
id = Column(Integer, primary_key=True)
player_number = Column(Integer, ForeignKey('players.player_number'))
player_forenames = Column(String(50))
player_surname = Column(String(30))
player_nickname = Column(String(30))
match_date = Column(Date)
created_at = Column(DateTime, default=datetime.utcnow)
class HockeyFixture(Base):
"""Hockey fixture model."""
__tablename__ = 'hockey_fixtures'
fixture_number = Column(Integer, primary_key=True)
date = Column(Date)
time = Column(String(10))
home_team = Column(String(100))
away_team = Column(String(100))
venue = Column(String(255))
home_score = Column(Integer)
away_score = Column(Integer)
umpire1 = Column(String(100))
umpire2 = Column(String(100))
match_official = Column(String(100))
division = Column(String(50))
created_at = Column(DateTime, default=datetime.utcnow)
class AdminSettings(Base):
"""Admin settings model."""
__tablename__ = 'admin_settings'
id = Column(Integer, primary_key=True)
userid = Column(String(50), default='admin')
next_fixture = Column(Integer)
next_club = Column(String(100))
next_team = Column(String(100))
next_date = Column(Date)
curr_motm = Column(Integer, ForeignKey('players.player_number'))
curr_dotd = Column(Integer, ForeignKey('players.player_number'))
oppo_logo = Column(String(255))
hkfc_logo = Column(String(255))
motm_url_suffix = Column(String(50))
prev_fixture = Column(Integer)
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
class MotmVote(Base):
"""MOTM/DotD voting model."""
__tablename__ = 'motm_votes'
id = Column(Integer, primary_key=True)
player_number = Column(Integer, ForeignKey('players.player_number'))
player_name = Column(String(100))
motm_total = Column(Integer, default=0)
dotd_total = Column(Integer, default=0)
goals_total = Column(Integer, default=0)
assists_total = Column(Integer, default=0)
fixture_number = Column(Integer)
motm_votes = Column(Integer, default=0)
dotd_votes = Column(Integer, default=0)
goals = Column(Integer, default=0)
assists = Column(Integer, default=0)
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
class MatchComment(Base):
"""Match comments model."""
__tablename__ = 'match_comments'
id = Column(Integer, primary_key=True)
match_date = Column(Date)
opposition = Column(String(100))
comment = Column(Text)
created_at = Column(DateTime, default=datetime.utcnow)
class HockeyUser(Base):
"""User authentication model."""
__tablename__ = 'hockey_users'
id = Column(Integer, primary_key=True)
username = Column(String(50), unique=True, nullable=False)
email = Column(String(255), unique=True, nullable=False)
password = Column(String(255), nullable=False)
created_at = Column(DateTime, default=datetime.utcnow)
last_login = Column(DateTime)
# Database utility functions
def get_db_session():
"""Get database session."""
return db_config.get_session()
def execute_sql(sql_command, params=None):
"""Execute SQL command with parameters."""
session = get_db_session()
try:
if params:
result = session.execute(sql_command, params)
else:
result = session.execute(sql_command)
session.commit()
return result
except Exception as e:
session.rollback()
print(f"SQL Error: {e}")
raise
finally:
session.close()
def fetch_all(sql_command, params=None):
"""Fetch all results from SQL query."""
session = get_db_session()
try:
if params:
result = session.execute(sql_command, params)
else:
result = session.execute(sql_command)
return result.fetchall()
except Exception as e:
print(f"SQL Error: {e}")
return []
finally:
session.close()
def fetch_one(sql_command, params=None):
"""Fetch one result from SQL query."""
session = get_db_session()
try:
if params:
result = session.execute(sql_command, params)
else:
result = session.execute(sql_command)
return result.fetchone()
except Exception as e:
print(f"SQL Error: {e}")
return None
finally:
session.close()
# Legacy compatibility functions
def sql_write(sql_cmd):
"""Legacy compatibility function for sql_write."""
try:
execute_sql(sql_cmd)
return True
except Exception as e:
print(f"Write Error: {e}")
return False
def sql_write_static(sql_cmd):
"""Legacy compatibility function for sql_write_static."""
return sql_write(sql_cmd)
def sql_read(sql_cmd):
"""Legacy compatibility function for sql_read."""
try:
result = fetch_all(sql_cmd)
# Convert to list of dictionaries for compatibility
return [dict(row) for row in result] if result else []
except Exception as e:
print(f"Read Error: {e}")
return []
def sql_read_static(sql_cmd):
"""Legacy compatibility function for sql_read_static."""
return sql_read(sql_cmd)
# Initialize database tables
def init_database():
"""Initialize database tables."""
try:
db_config.create_tables()
print("Database tables created successfully")
except Exception as e:
print(f"Database initialization error: {e}")
raise

View File

@ -1,121 +1,116 @@
# encoding=utf-8 # encoding=utf-8
import pymysql """
Database configuration module with SQLAlchemy support.
This module provides backward compatibility with the old PyMySQL-based functions
while using SQLAlchemy for database operations.
"""
import os import os
import json import warnings
from database import (
# These environment variables are configured in app.yaml. db_config,
CLOUDSQL_CONNECTION_NAME = "hk-hockey:asia-east2:hk-hockey-sql" sql_write,
LOCAL_DB_SERVER = "mariadb.db.svc.cluster.local" sql_write_static,
CLOUDSQL_USER = "root" sql_read,
CLOUDSQL_WRITE_USER = "hockeyWrite" sql_read_static,
CLOUDSQL_READ_USER = "hockeyRead" get_db_session,
CLOUDSQL_PASSWORD = "P8P1YopMlwg8TxhE" execute_sql,
CLOUDSQL_WRITE_PASSWORD = "1URYcxXXlQ6xOWgj" fetch_all,
CLOUDSQL_READ_PASSWORD = "o4GWrbbkBKy3oR6u" fetch_one,
CLOUDSQL_DATABASE = "20209_hockeyResults" init_database
LOCAL_DATABASE = "hockeyResults2021" )
CLOUDSQL_DATABASE_STATIC = "hockeyResults"
CLOUDSQL_CHARSET = "utf8"
# Legacy constants for backward compatibility
CLOUDSQL_CONNECTION_NAME = os.getenv('CLOUDSQL_CONNECTION_NAME', "hk-hockey:asia-east2:hk-hockey-sql")
LOCAL_DB_SERVER = os.getenv('LOCAL_DB_SERVER', "mariadb.db.svc.cluster.local")
CLOUDSQL_USER = os.getenv('CLOUDSQL_USER', "root")
CLOUDSQL_WRITE_USER = os.getenv('CLOUDSQL_WRITE_USER', "hockeyWrite")
CLOUDSQL_READ_USER = os.getenv('CLOUDSQL_READ_USER', "hockeyRead")
CLOUDSQL_PASSWORD = os.getenv('CLOUDSQL_PASSWORD', "P8P1YopMlwg8TxhE")
CLOUDSQL_WRITE_PASSWORD = os.getenv('CLOUDSQL_WRITE_PASSWORD', "1URYcxXXlQ6xOWgj")
CLOUDSQL_READ_PASSWORD = os.getenv('CLOUDSQL_READ_PASSWORD', "o4GWrbbkBKy3oR6u")
CLOUDSQL_DATABASE = os.getenv('CLOUDSQL_DATABASE', "20209_hockeyResults")
LOCAL_DATABASE = os.getenv('LOCAL_DATABASE', "hockeyResults2021")
CLOUDSQL_DATABASE_STATIC = os.getenv('CLOUDSQL_DATABASE_STATIC', "hockeyResults")
CLOUDSQL_CHARSET = os.getenv('CLOUDSQL_CHARSET', "utf8")
# Legacy functions for backward compatibility
def write_cloudsql(): def write_cloudsql():
# When deployed to App Engine, the `SERVER_SOFTWARE` environment variable """
# will be set to 'Google App Engine/version'. Legacy function - now uses SQLAlchemy.
if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'): Returns a session object for compatibility.
# Connect using the unix socket located at """
# /cloudsql/cloudsql-connection-name. warnings.warn(
cloudsql_unix_socket = os.path.join('/cloudsql', CLOUDSQL_CONNECTION_NAME) "write_cloudsql() is deprecated. Use get_db_session() instead.",
db = pymysql.connect(unix_socket=cloudsql_unix_socket, user=CLOUDSQL_WRITE_USER, passwd=CLOUDSQL_WRITE_PASSWORD, db=CLOUDSQL_DATABASE, charset=CLOUDSQL_CHARSET) DeprecationWarning,
else: stacklevel=2
db = pymysql.connect(host=LOCAL_DB_SERVER, user=CLOUDSQL_WRITE_USER, passwd=CLOUDSQL_WRITE_PASSWORD, db=LOCAL_DATABASE, charset=CLOUDSQL_CHARSET) )
return db return get_db_session()
def write_cloudsql_static(): def write_cloudsql_static():
# When deployed to App Engine, the `SERVER_SOFTWARE` environment variable """
# will be set to 'Google App Engine/version'. Legacy function - now uses SQLAlchemy.
if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'): Returns a session object for compatibility.
# Connect using the unix socket located at """
# /cloudsql/cloudsql-connection-name. warnings.warn(
cloudsql_unix_socket = os.path.join('/cloudsql', CLOUDSQL_CONNECTION_NAME) "write_cloudsql_static() is deprecated. Use get_db_session() instead.",
db = pymysql.connect(unix_socket=cloudsql_unix_socket, user=CLOUDSQL_WRITE_USER, passwd=CLOUDSQL_WRITE_PASSWORD, db=CLOUDSQL_DATABASE_STATIC, charset=CLOUDSQL_CHARSET) DeprecationWarning,
else: stacklevel=2
db = pymysql.connect(host=LOCAL_DB_SERVER, user=CLOUDSQL_WRITE_USER, passwd=CLOUDSQL_WRITE_PASSWORD, db=CLOUDSQL_DATABASE_STATIC, charset=CLOUDSQL_CHARSET) )
return db return get_db_session()
def read_cloudsql(): def read_cloudsql():
# When deployed to App Engine, the `SERVER_SOFTWARE` environment variable """
# will be set to 'Google App Engine/version'. Legacy function - now uses SQLAlchemy.
if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'): Returns a session object for compatibility.
# Connect using the unix socket located at """
# /cloudsql/cloudsql-connection-name. warnings.warn(
cloudsql_unix_socket = os.path.join('/cloudsql', CLOUDSQL_CONNECTION_NAME) "read_cloudsql() is deprecated. Use get_db_session() instead.",
db = pymysql.connect(unix_socket=cloudsql_unix_socket, user=CLOUDSQL_READ_USER, passwd=CLOUDSQL_READ_PASSWORD, db=CLOUDSQL_DATABASE, charset=CLOUDSQL_CHARSET) DeprecationWarning,
else: stacklevel=2
db = pymysql.connect(host=LOCAL_DB_SERVER, user=CLOUDSQL_READ_USER, passwd=CLOUDSQL_READ_PASSWORD, db=LOCAL_DATABASE, charset=CLOUDSQL_CHARSET) )
return db return get_db_session()
def read_cloudsql_static(): def read_cloudsql_static():
# When deployed to App Engine, the `SERVER_SOFTWARE` environment variable """
# will be set to 'Google App Engine/version'. Legacy function - now uses SQLAlchemy.
if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'): Returns a session object for compatibility.
# Connect using the unix socket located at """
# /cloudsql/cloudsql-connection-name. warnings.warn(
cloudsql_unix_socket = os.path.join('/cloudsql', CLOUDSQL_CONNECTION_NAME) "read_cloudsql_static() is deprecated. Use get_db_session() instead.",
db = pymysql.connect(unix_socket=cloudsql_unix_socket, user=CLOUDSQL_READ_USER, passwd=CLOUDSQL_READ_PASSWORD, db=CLOUDSQL_DATABASE_STATIC, charset=CLOUDSQL_CHARSET) DeprecationWarning,
else: stacklevel=2
db = pymysql.connect(host=LOCAL_DB_SERVER, user=CLOUDSQL_READ_USER, passwd=CLOUDSQL_READ_PASSWORD, db=CLOUDSQL_DATABASE_STATIC, charset=CLOUDSQL_CHARSET) )
return db return get_db_session()
def sql_write(sql_cmd): # These functions now use SQLAlchemy but maintain the same interface
try: __all__ = [
db = write_cloudsql() 'sql_write',
cursor = db.cursor(pymysql.cursors.DictCursor) 'sql_write_static',
cursor.execute(sql_cmd) 'sql_read',
db.commit() 'sql_read_static',
except Exception as e: 'get_db_session',
print(e) 'execute_sql',
finally: 'fetch_all',
cursor.close() 'fetch_one',
db.close() 'init_database',
return db 'db_config',
# Legacy constants
def sql_write_static(sql_cmd): 'CLOUDSQL_CONNECTION_NAME',
try: 'LOCAL_DB_SERVER',
db = write_cloudsql_static() 'CLOUDSQL_USER',
cursor = db.cursor(pymysql.cursors.DictCursor) 'CLOUDSQL_WRITE_USER',
cursor.execute(sql_cmd) 'CLOUDSQL_READ_USER',
db.commit() 'CLOUDSQL_PASSWORD',
except Exception as e: 'CLOUDSQL_WRITE_PASSWORD',
print(e) 'CLOUDSQL_READ_PASSWORD',
finally: 'CLOUDSQL_DATABASE',
cursor.close() 'LOCAL_DATABASE',
db.close() 'CLOUDSQL_DATABASE_STATIC',
return db 'CLOUDSQL_CHARSET',
# Legacy functions
def sql_read(sql_cmd): 'write_cloudsql',
try: 'write_cloudsql_static',
db = read_cloudsql() 'read_cloudsql',
cursor = db.cursor(pymysql.cursors.DictCursor) 'read_cloudsql_static'
cursor.execute(sql_cmd) ]
rows = cursor.fetchall()
except Exception as e:
print(e)
rows = ''
finally:
cursor.close()
db.close()
return rows
def sql_read_static(sql_cmd):
try:
db = read_cloudsql_static()
cursor = db.cursor(pymysql.cursors.DictCursor)
cursor.execute(sql_cmd)
rows = cursor.fetchall()
except Exception as e:
print(e)
rows = ''
finally:
cursor.close()
db.close()
return rows

View File

@ -1,7 +1,6 @@
Flask>=2.0.0,<3.0.0 Flask>=2.0.0,<3.0.0
Werkzeug>=2.0.0 Werkzeug>=2.0.0
email-validator email-validator
flask-mysql
flask_login flask_login
Flask-BasicAuth Flask-BasicAuth
Flask-Bootstrap Flask-Bootstrap
@ -9,4 +8,16 @@ flask_wtf
wtforms>=3.0.0 wtforms>=3.0.0
wtforms_components wtforms_components
MarkupSafe>=2.0.0 MarkupSafe>=2.0.0
pymysql
# SQLAlchemy and database drivers
SQLAlchemy>=2.0.0
Flask-SQLAlchemy>=3.0.0
alembic>=1.12.0
# Database drivers
pymysql>=1.1.0
psycopg2-binary>=2.9.0
PyMySQL>=1.1.0
# Legacy support (can be removed after migration)
flask-mysql

View File

@ -1,7 +1,6 @@
Flask>=2.0.0,<3.0.0 Flask>=2.0.0,<3.0.0
Werkzeug>=2.0.0 Werkzeug>=2.0.0
email-validator email-validator
flask-mysql
flask_login flask_login
Flask-BasicAuth Flask-BasicAuth
Flask-Bootstrap Flask-Bootstrap
@ -10,3 +9,16 @@ wtforms>=3.0.0
wtforms_components wtforms_components
MarkupSafe>=2.0.0 MarkupSafe>=2.0.0
# SQLAlchemy and database drivers
SQLAlchemy>=2.0.0
Flask-SQLAlchemy>=3.0.0
alembic>=1.12.0
# Database drivers
pymysql>=1.1.0
psycopg2-binary>=2.9.0
PyMySQL>=1.1.0
# Legacy support (can be removed after migration)
flask-mysql