From ead332847eff6d6ea1c23d68254d67da8d9df9a1 Mon Sep 17 00:00:00 2001 From: wlinator Date: Fri, 19 Jul 2024 16:14:28 -0400 Subject: [PATCH] chore: Add database migrations and docker-compose.dev.yml --- .gitignore | 1 + Luminara.py | 8 +++--- config/parser.py | 19 -------------- db/database.py | 57 ++++++++++++++++++++++++++++++++++++++++++ docker-compose.dev.yml | 32 ++++++++++++++++++++++++ docker-compose.yml | 5 +++- 6 files changed, 99 insertions(+), 23 deletions(-) create mode 100644 docker-compose.dev.yml diff --git a/.gitignore b/.gitignore index 7a6ed10..4747dcf 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ venv/ __pycache__/ .run/ .vscode/ +data/ *.db .env diff --git a/Luminara.py b/Luminara.py index 364514e..88e1309 100644 --- a/Luminara.py +++ b/Luminara.py @@ -10,6 +10,7 @@ import services.config_service import services.help_service from lib.constants import CONST from services.blacklist_service import BlacklistUserService +from db.database import run_migrations # Remove the default logger configuration logger.remove() @@ -42,9 +43,7 @@ client = Client.LumiBot( @client.check async def blacklist_check(ctx): - if BlacklistUserService.is_user_blacklisted(ctx.author.id): - return False - return True + return not BlacklistUserService.is_user_blacklisted(ctx.author.id) def load_modules(): @@ -87,6 +86,9 @@ if __name__ == "__main__": logger.info("LUMI IS BOOTING") + # Run database migrations + run_migrations() + # cache all JSON [ config.parser.JsonCache.read_json(file[:-5]) diff --git a/config/parser.py b/config/parser.py index 324a13a..5e4b35c 100644 --- a/config/parser.py +++ b/config/parser.py @@ -1,8 +1,5 @@ import json -import yaml -from loguru import logger - class JsonCache: _cache = {} @@ -13,21 +10,5 @@ class JsonCache: if path not in JsonCache._cache: with open(f"config/JSON/{path}.json") as file: JsonCache._cache[path] = json.load(file) - logger.debug(f"{path}.json was loaded and cached.") return JsonCache._cache[path] - - -class YamlCache: - _cache = {} - - @staticmethod - def read_credentialsl(): - """Read and cache the creds.yaml data if not already cached.""" - path = "creds" - if path not in YamlCache._cache: - with open(f"{path}.yaml") as file: - YamlCache._cache[path] = yaml.safe_load(file) - logger.debug(f"{path}.yaml was loaded and cached.") - - return YamlCache._cache[path] diff --git a/db/database.py b/db/database.py index 42f2b49..a2d7e41 100644 --- a/db/database.py +++ b/db/database.py @@ -1,6 +1,8 @@ import mysql.connector from loguru import logger from mysql.connector import pooling +import os +import re from lib.constants import CONST @@ -47,3 +49,58 @@ def select_query_one(query, values=None): cursor.execute(query, values) output = cursor.fetchone() return output[0] if output else None + + +def run_migrations(): + migrations_dir = "db/migrations" + migration_files = sorted( + [f for f in os.listdir(migrations_dir) if f.endswith(".sql")], + ) + + with _cnxpool.get_connection() as conn: + with conn.cursor() as cursor: + # Create migrations table if it doesn't exist + cursor.execute(""" + CREATE TABLE IF NOT EXISTS migrations ( + id INT AUTO_INCREMENT PRIMARY KEY, + filename VARCHAR(255) NOT NULL, + applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """) + + for migration_file in migration_files: + # Check if migration has already been applied + cursor.execute( + "SELECT COUNT(*) FROM migrations WHERE filename = %s", + (migration_file,), + ) + if cursor.fetchone()[0] > 0: + logger.debug( + f"Migration {migration_file} already applied, skipping.", + ) + continue + + # Read and execute migration file + with open(os.path.join(migrations_dir, migration_file)) as f: + migration_sql = f.read() + + try: + # Split the migration file into individual statements + statements = re.split(r";\s*$", migration_sql, flags=re.MULTILINE) + for statement in statements: + if statement.strip(): + cursor.execute(statement) + + # Record successful migration + cursor.execute( + "INSERT INTO migrations (filename) VALUES (%s)", + (migration_file,), + ) + conn.commit() + logger.debug(f"Successfully applied migration: {migration_file}") + except mysql.connector.Error as e: + conn.rollback() + logger.error(f"Error applying migration {migration_file}: {e}") + raise + + logger.debug("All migrations completed.") diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml new file mode 100644 index 0000000..cec742f --- /dev/null +++ b/docker-compose.dev.yml @@ -0,0 +1,32 @@ +services: + core: + build: . + container_name: lumi-core + restart: always + depends_on: + db: + condition: service_healthy + + db: + image: mariadb + container_name: lumi-db + restart: always + environment: + MARIADB_ROOT_PASSWORD: ${MARIADB_ROOT_PASSWORD} + MARIADB_USER: ${MARIADB_USER} + MARIADB_PASSWORD: ${MARIADB_PASSWORD} + MARIADB_DATABASE: ${MARIADB_DATABASE} + volumes: + - ./data:/var/lib/mysql/ + healthcheck: + test: [ "CMD", "mariadb", "-h", "localhost", "-u", "${MARIADB_USER}", "-p${MARIADB_PASSWORD}", "-e", "SELECT 1" ] + interval: 5s + timeout: 10s + retries: 5 + + adminer: + image: adminer + container_name: lumi-adminer + restart: always + ports: + - 8080:8080 \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index f5c609c..497a8e8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,8 +1,11 @@ services: core: - build: . + image: ghcr.io/wlinator/luminara:main container_name: lumi-core restart: always + env_file: + - path: ./.env + required: true depends_on: db: condition: service_healthy