1
Fork 0
mirror of https://github.com/wlinator/luminara.git synced 2024-10-02 18:23:12 +00:00

chore: Add database migrations and docker-compose.dev.yml

This commit is contained in:
wlinator 2024-07-19 16:14:28 -04:00
parent 8da2b15b82
commit ead332847e
6 changed files with 99 additions and 23 deletions

1
.gitignore vendored
View file

@ -3,6 +3,7 @@ venv/
__pycache__/ __pycache__/
.run/ .run/
.vscode/ .vscode/
data/
*.db *.db
.env .env

View file

@ -10,6 +10,7 @@ import services.config_service
import services.help_service import services.help_service
from lib.constants import CONST from lib.constants import CONST
from services.blacklist_service import BlacklistUserService from services.blacklist_service import BlacklistUserService
from db.database import run_migrations
# Remove the default logger configuration # Remove the default logger configuration
logger.remove() logger.remove()
@ -42,9 +43,7 @@ client = Client.LumiBot(
@client.check @client.check
async def blacklist_check(ctx): async def blacklist_check(ctx):
if BlacklistUserService.is_user_blacklisted(ctx.author.id): return not BlacklistUserService.is_user_blacklisted(ctx.author.id)
return False
return True
def load_modules(): def load_modules():
@ -87,6 +86,9 @@ if __name__ == "__main__":
logger.info("LUMI IS BOOTING") logger.info("LUMI IS BOOTING")
# Run database migrations
run_migrations()
# cache all JSON # cache all JSON
[ [
config.parser.JsonCache.read_json(file[:-5]) config.parser.JsonCache.read_json(file[:-5])

View file

@ -1,8 +1,5 @@
import json import json
import yaml
from loguru import logger
class JsonCache: class JsonCache:
_cache = {} _cache = {}
@ -13,21 +10,5 @@ class JsonCache:
if path not in JsonCache._cache: if path not in JsonCache._cache:
with open(f"config/JSON/{path}.json") as file: with open(f"config/JSON/{path}.json") as file:
JsonCache._cache[path] = json.load(file) JsonCache._cache[path] = json.load(file)
logger.debug(f"{path}.json was loaded and cached.")
return JsonCache._cache[path] return JsonCache._cache[path]
class YamlCache:
_cache = {}
@staticmethod
def read_credentialsl():
"""Read and cache the creds.yaml data if not already cached."""
path = "creds"
if path not in YamlCache._cache:
with open(f"{path}.yaml") as file:
YamlCache._cache[path] = yaml.safe_load(file)
logger.debug(f"{path}.yaml was loaded and cached.")
return YamlCache._cache[path]

View file

@ -1,6 +1,8 @@
import mysql.connector import mysql.connector
from loguru import logger from loguru import logger
from mysql.connector import pooling from mysql.connector import pooling
import os
import re
from lib.constants import CONST from lib.constants import CONST
@ -47,3 +49,58 @@ def select_query_one(query, values=None):
cursor.execute(query, values) cursor.execute(query, values)
output = cursor.fetchone() output = cursor.fetchone()
return output[0] if output else None return output[0] if output else None
def run_migrations():
migrations_dir = "db/migrations"
migration_files = sorted(
[f for f in os.listdir(migrations_dir) if f.endswith(".sql")],
)
with _cnxpool.get_connection() as conn:
with conn.cursor() as cursor:
# Create migrations table if it doesn't exist
cursor.execute("""
CREATE TABLE IF NOT EXISTS migrations (
id INT AUTO_INCREMENT PRIMARY KEY,
filename VARCHAR(255) NOT NULL,
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
for migration_file in migration_files:
# Check if migration has already been applied
cursor.execute(
"SELECT COUNT(*) FROM migrations WHERE filename = %s",
(migration_file,),
)
if cursor.fetchone()[0] > 0:
logger.debug(
f"Migration {migration_file} already applied, skipping.",
)
continue
# Read and execute migration file
with open(os.path.join(migrations_dir, migration_file)) as f:
migration_sql = f.read()
try:
# Split the migration file into individual statements
statements = re.split(r";\s*$", migration_sql, flags=re.MULTILINE)
for statement in statements:
if statement.strip():
cursor.execute(statement)
# Record successful migration
cursor.execute(
"INSERT INTO migrations (filename) VALUES (%s)",
(migration_file,),
)
conn.commit()
logger.debug(f"Successfully applied migration: {migration_file}")
except mysql.connector.Error as e:
conn.rollback()
logger.error(f"Error applying migration {migration_file}: {e}")
raise
logger.debug("All migrations completed.")

32
docker-compose.dev.yml Normal file
View file

@ -0,0 +1,32 @@
services:
core:
build: .
container_name: lumi-core
restart: always
depends_on:
db:
condition: service_healthy
db:
image: mariadb
container_name: lumi-db
restart: always
environment:
MARIADB_ROOT_PASSWORD: ${MARIADB_ROOT_PASSWORD}
MARIADB_USER: ${MARIADB_USER}
MARIADB_PASSWORD: ${MARIADB_PASSWORD}
MARIADB_DATABASE: ${MARIADB_DATABASE}
volumes:
- ./data:/var/lib/mysql/
healthcheck:
test: [ "CMD", "mariadb", "-h", "localhost", "-u", "${MARIADB_USER}", "-p${MARIADB_PASSWORD}", "-e", "SELECT 1" ]
interval: 5s
timeout: 10s
retries: 5
adminer:
image: adminer
container_name: lumi-adminer
restart: always
ports:
- 8080:8080

View file

@ -1,8 +1,11 @@
services: services:
core: core:
build: . image: ghcr.io/wlinator/luminara:main
container_name: lumi-core container_name: lumi-core
restart: always restart: always
env_file:
- path: ./.env
required: true
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy