Browse Source

Setup Alembic migrations and FastAPI app

Alembic:
- Created alembic.ini configuration
- Setup async env.py for SQLAlchemy
- Generated initial schema migration with all tables
- Added sequence for device.simple_id (auto-increment)
- Applied migrations to database

FastAPI:
- Created main.py with basic app setup
- Added CORS middleware
- Root and health check endpoints
- Tested server startup

Database tables created:
- users (with 5 roles)
- organizations (with product flags)
- devices (with simple_id sequence)
- refresh_tokens
- audit_logs

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
root 1 month ago
parent
commit
df94aa71a0

+ 72 - 0
backend/alembic.ini

@@ -0,0 +1,72 @@
+# Alembic configuration file
+
+[alembic]
+# Path to migration scripts
+script_location = alembic
+
+# Template used to generate migration files
+file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
+
+# Timezone for generating revision timestamps
+timezone = UTC
+
+# Max length of characters to apply to the "slug" field
+truncate_slug_length = 40
+
+# Set to 'true' to run the environment during the 'revision' command
+# revision_environment = false
+
+# Set to 'true' to allow .pyc and .pyo files without a source .py file
+# sourceless = false
+
+# Version location specification
+version_locations = %(here)s/alembic/versions
+
+# Version path separator
+version_path_separator = os
+
+# The output encoding used when revision files are written
+output_encoding = utf-8
+
+# Database URL - will be overridden by env.py
+sqlalchemy.url = postgresql+asyncpg://mybeacon:mybeacon@localhost/mybeacon
+
+
+[post_write_hooks]
+# Post-write hooks
+# hook_name = <module_path>:<function_name>
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S

+ 86 - 0
backend/alembic/env.py

@@ -0,0 +1,86 @@
+"""
+Alembic environment configuration for async SQLAlchemy.
+"""
+
+import asyncio
+from logging.config import fileConfig
+
+from sqlalchemy import pool
+from sqlalchemy.engine import Connection
+from sqlalchemy.ext.asyncio import async_engine_from_config
+
+from alembic import context
+
+# Import Base and all models so Alembic can detect them
+from app.core.database import Base
+from app.models import *  # noqa: F403, F401
+from app.config import settings
+
+# Alembic Config object
+config = context.config
+
+# Override sqlalchemy.url with the one from settings
+config.set_main_option("sqlalchemy.url", settings.DATABASE_URL)
+
+# Interpret the config file for Python logging
+if config.config_file_name is not None:
+    fileConfig(config.config_file_name)
+
+# Target metadata for 'autogenerate' support
+target_metadata = Base.metadata
+
+
+def run_migrations_offline() -> None:
+    """
+    Run migrations in 'offline' mode.
+
+    This configures the context with just a URL and not an Engine.
+    Calls to context.execute() here emit the given string to the script output.
+    """
+    url = config.get_main_option("sqlalchemy.url")
+    context.configure(
+        url=url,
+        target_metadata=target_metadata,
+        literal_binds=True,
+        dialect_opts={"paramstyle": "named"},
+    )
+
+    with context.begin_transaction():
+        context.run_migrations()
+
+
+def do_run_migrations(connection: Connection) -> None:
+    """Run migrations with the given connection."""
+    context.configure(connection=connection, target_metadata=target_metadata)
+
+    with context.begin_transaction():
+        context.run_migrations()
+
+
+async def run_async_migrations() -> None:
+    """
+    Run migrations in 'online' mode using async engine.
+    """
+    connectable = async_engine_from_config(
+        config.get_section(config.config_ini_section, {}),
+        prefix="sqlalchemy.",
+        poolclass=pool.NullPool,
+    )
+
+    async with connectable.connect() as connection:
+        await connection.run_sync(do_run_migrations)
+
+    await connectable.dispose()
+
+
+def run_migrations_online() -> None:
+    """
+    Run migrations in 'online' mode.
+    """
+    asyncio.run(run_async_migrations())
+
+
+if context.is_offline_mode():
+    run_migrations_offline()
+else:
+    run_migrations_online()

+ 26 - 0
backend/alembic/script.py.mako

@@ -0,0 +1,26 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision: str = ${repr(up_revision)}
+down_revision: Union[str, None] = ${repr(down_revision)}
+branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
+depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
+
+
+def upgrade() -> None:
+    ${upgrades if upgrades else "pass"}
+
+
+def downgrade() -> None:
+    ${downgrades if downgrades else "pass"}

+ 126 - 0
backend/alembic/versions/20251227_0841_2affe85d6033_initial_schema.py

@@ -0,0 +1,126 @@
+"""Initial schema
+
+Revision ID: 2affe85d6033
+Revises: 
+Create Date: 2025-12-27 08:41:03.191770+00:00
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision: str = '2affe85d6033'
+down_revision: Union[str, None] = None
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+    # ### commands auto generated by Alembic - please adjust! ###
+    # Create sequence for device simple_id (auto-increment, never reused)
+    op.execute('CREATE SEQUENCE device_simple_id_seq START 1')
+
+    op.create_table('organizations',
+    sa.Column('id', sa.Integer(), nullable=False),
+    sa.Column('name', sa.String(length=255), nullable=False),
+    sa.Column('contact_email', sa.String(length=255), nullable=False),
+    sa.Column('contact_phone', sa.String(length=50), nullable=True),
+    sa.Column('wifi_enabled', sa.Boolean(), nullable=False),
+    sa.Column('ble_enabled', sa.Boolean(), nullable=False),
+    sa.Column('status', sa.String(length=20), nullable=False),
+    sa.Column('notes', sa.String(), nullable=True),
+    sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
+    sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_organizations'))
+    )
+    op.create_table('devices',
+    sa.Column('id', sa.Integer(), nullable=False),
+    sa.Column('simple_id', sa.Integer(), server_default=sa.text("nextval('device_simple_id_seq')"), nullable=False),
+    sa.Column('mac_address', sa.String(length=17), nullable=False),
+    sa.Column('serial_number', sa.String(length=100), nullable=True),
+    sa.Column('device_type', sa.String(length=50), nullable=False),
+    sa.Column('model', sa.String(length=50), nullable=True),
+    sa.Column('firmware_version', sa.String(length=50), nullable=True),
+    sa.Column('organization_id', sa.Integer(), nullable=True),
+    sa.Column('status', sa.String(length=20), nullable=False),
+    sa.Column('last_seen_at', sa.DateTime(timezone=True), nullable=True),
+    sa.Column('last_ip', postgresql.INET(), nullable=True),
+    sa.Column('config', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
+    sa.Column('notes', sa.String(), nullable=True),
+    sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
+    sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
+    sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], name=op.f('fk_devices_organization_id_organizations'), ondelete='SET NULL'),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_devices')),
+    sa.UniqueConstraint('mac_address', name=op.f('uq_devices_mac_address')),
+    sa.UniqueConstraint('simple_id', name=op.f('uq_devices_simple_id'))
+    )
+    op.create_table('users',
+    sa.Column('id', sa.Integer(), nullable=False),
+    sa.Column('email', sa.String(length=255), nullable=False),
+    sa.Column('hashed_password', sa.String(length=255), nullable=False),
+    sa.Column('full_name', sa.String(length=255), nullable=True),
+    sa.Column('phone', sa.String(length=50), nullable=True),
+    sa.Column('role', sa.String(length=20), nullable=False),
+    sa.Column('status', sa.String(length=20), nullable=False),
+    sa.Column('organization_id', sa.Integer(), nullable=True),
+    sa.Column('email_verified', sa.Boolean(), nullable=False),
+    sa.Column('email_verification_token', sa.String(length=255), nullable=True),
+    sa.Column('email_verified_at', sa.DateTime(timezone=True), nullable=True),
+    sa.Column('password_reset_token', sa.String(length=255), nullable=True),
+    sa.Column('password_reset_expires', sa.DateTime(timezone=True), nullable=True),
+    sa.Column('last_login_at', sa.DateTime(timezone=True), nullable=True),
+    sa.Column('last_login_ip', postgresql.INET(), nullable=True),
+    sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
+    sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
+    sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], name=op.f('fk_users_organization_id_organizations'), ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_users')),
+    sa.UniqueConstraint('email', name=op.f('uq_users_email'))
+    )
+    op.create_table('audit_logs',
+    sa.Column('id', sa.BigInteger(), nullable=False),
+    sa.Column('user_id', sa.Integer(), nullable=True),
+    sa.Column('user_email', sa.String(length=255), nullable=True),
+    sa.Column('organization_id', sa.Integer(), nullable=True),
+    sa.Column('action', sa.String(length=50), nullable=False),
+    sa.Column('resource_type', sa.String(length=50), nullable=True),
+    sa.Column('resource_id', sa.Integer(), nullable=True),
+    sa.Column('description', sa.String(), nullable=True),
+    sa.Column('changes', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
+    sa.Column('ip_address', postgresql.INET(), nullable=True),
+    sa.Column('user_agent', sa.String(), nullable=True),
+    sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
+    sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
+    sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], name=op.f('fk_audit_logs_organization_id_organizations'), ondelete='SET NULL'),
+    sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_audit_logs_user_id_users'), ondelete='SET NULL'),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_audit_logs'))
+    )
+    op.create_table('refresh_tokens',
+    sa.Column('id', sa.Integer(), nullable=False),
+    sa.Column('user_id', sa.Integer(), nullable=False),
+    sa.Column('token', sa.String(length=512), nullable=False),
+    sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
+    sa.Column('device_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
+    sa.Column('revoked_at', sa.DateTime(timezone=True), nullable=True),
+    sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
+    sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
+    sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_refresh_tokens_user_id_users'), ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_refresh_tokens')),
+    sa.UniqueConstraint('token', name=op.f('uq_refresh_tokens_token'))
+    )
+    # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.drop_table('refresh_tokens')
+    op.drop_table('audit_logs')
+    op.drop_table('users')
+    op.drop_table('devices')
+    op.drop_table('organizations')
+
+    # Drop sequence
+    op.execute('DROP SEQUENCE IF EXISTS device_simple_id_seq')
+    # ### end Alembic commands ###

+ 47 - 0
backend/app/main.py

@@ -0,0 +1,47 @@
+"""
+FastAPI application entry point.
+"""
+
+from fastapi import FastAPI
+from fastapi.middleware.cors import CORSMiddleware
+
+from app.config import settings
+
+# Create FastAPI app
+app = FastAPI(
+    title=settings.PROJECT_NAME,
+    version="0.1.0",
+    description="MyBeacon Backend API - Modular BLE/WiFi monitoring platform",
+    docs_url="/docs",
+    redoc_url="/redoc",
+)
+
+# Configure CORS
+app.add_middleware(
+    CORSMiddleware,
+    allow_origins=settings.cors_origins_list,
+    allow_credentials=True,
+    allow_methods=["*"],
+    allow_headers=["*"],
+)
+
+
+@app.get("/")
+async def root():
+    """Root endpoint - API info."""
+    return {
+        "name": settings.PROJECT_NAME,
+        "version": "0.1.0",
+        "status": "running",
+    }
+
+
+@app.get("/health")
+async def health_check():
+    """Health check endpoint."""
+    return {"status": "healthy"}
+
+
+# Include routers (will add later)
+# from app.api.v1 import router as api_v1_router
+# app.include_router(api_v1_router, prefix=settings.API_V1_PREFIX)