diff --git a/.air.toml b/.air.toml index ad9f38b..8d71cf6 100644 --- a/.air.toml +++ b/.air.toml @@ -3,7 +3,7 @@ testdata_dir = "testdata" tmp_dir = "tmp" [build] - args_bin = [] + args_bin = ["--dev"] bin = "./tmp/main" cmd = "go build -o ./tmp/main ./cmd/oslstats" delay = 1000 @@ -14,7 +14,7 @@ tmp_dir = "tmp" follow_symlink = false full_bin = "" include_dir = [] - include_ext = ["go", "templ"] + include_ext = ["go", "templ", "js"] include_file = [] kill_delay = "0s" log = "build-errors.log" diff --git a/.gitignore b/.gitignore index 9d37f15..7a77d4d 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,10 @@ tmp/ static/css/output.css internal/view/**/*_templ.go internal/view/**/*_templ.txt +cmd/test/* +.opencode + +# Database backups (compressed) +backups/*.sql.gz +backups/*.sql +!backups/.gitkeep diff --git a/.test.env b/.test.env new file mode 100644 index 0000000..c5388de --- /dev/null +++ b/.test.env @@ -0,0 +1,124 @@ +# Environment Configuration +# Generated by ezconf +# +# Variables marked as (required) must be set +# Variables with defaults can be left commented out to use the default value + +# HLog Configuration +################### +# Log level for the logger - trace, debug, info, warn, error, fatal, panic (default: info) +LOG_LEVEL=trace + +# Output destination for logs - console, file, or both (default: console) +# LOG_OUTPUT=console + +# Directory path for log files (required) +LOG_DIR= + +# Name of the log file (required) +LOG_FILE_NAME= + +# Append to existing log file or overwrite (default: true) +# LOG_APPEND=true + +# HWS Configuration +################## +# Host to listen on (default: 127.0.0.1) +# HWS_HOST=127.0.0.1 + +# Port to listen on (default: 3000) +HWS_PORT=3333 + +# Flag for GZIP compression on requests (default: false) +# HWS_GZIP=false + +# Timeout for reading request headers in seconds (default: 2) +# HWS_READ_HEADER_TIMEOUT=2 + +# Timeout for writing requests in seconds (default: 10) +# HWS_WRITE_TIMEOUT=10 + +# Timeout for idle connections in seconds (default: 120) +# HWS_IDLE_TIMEOUT=120 + +# Delay in seconds before server shutsdown when Shutdown is called (default: 5) +# HWS_SHUTDOWN_DELAY=5 + +# HWSAuth Configuration +###################### +# Enable SSL secure cookies (default: false) +# HWSAUTH_SSL=false + +# Full server address for SSL (required) +HWSAUTH_TRUSTED_HOST=http://127.0.0.1:3000 + +# Secret key for signing JWT tokens (required) +HWSAUTH_SECRET_KEY=/2epovpAmHFwdmlCxHRnihT50ZQtrGF/wK7+wiJdFLI= + +# Access token expiry in minutes (default: 5) +# HWSAUTH_ACCESS_TOKEN_EXPIRY=5 + +# Refresh token expiry in minutes (default: 1440) +# HWSAUTH_REFRESH_TOKEN_EXPIRY=1440 + +# Token fresh time in minutes (default: 5) +# HWSAUTH_TOKEN_FRESH_TIME=5 + +# Redirect destination for authenticated users (default: "/profile") +# HWSAUTH_LANDING_PAGE="/profile" + +# Database type (postgres, mysql, sqlite, mariadb) (default: "postgres") +# HWSAUTH_DATABASE_TYPE="postgres" + +# Database version string (default: "15") +HWSAUTH_DATABASE_VERSION=18 + +# Custom JWT blacklist table name (default: "jwtblacklist") +# HWSAUTH_JWT_TABLE_NAME="jwtblacklist" + +# DB Configuration +################# +# Database user for authentication (required) +DB_USER=pgdev + +# Database password for authentication (required) +DB_PASSWORD=pgdevuser + +# Database host address (required) +DB_HOST=10.3.0.60 + +# Database port (default: 5432) +# DB_PORT=5432 + +# Database name to connect to (required) +DB_NAME=oslstats_test + +# SSL mode for connection (default: disable) +# DB_SSL=disable + +# Number of backups to keep (default: 10) +# DB_BACKUP_RETENTION=10 + +# Discord Configuration +###################### +# Discord application client ID (required) +DISCORD_CLIENT_ID=1463459682235580499 + +# Discord application client secret (required) +DISCORD_CLIENT_SECRET=pinbGa9IkgYQfeBIfBuosor6ODK-JTON + +# Path for the OAuth redirect handler (required) +DISCORD_REDIRECT_PATH=auth/callback + +# Token for the discord bot (required) +DISCORD_BOT_TOKEN=MTQ2MzQ1OTY4MjIzNTU4MDQ5OQ.GK-9Q6.Z876_JG7oUIKFwKp5snxUjAzloxVjy7KP37TX4 + +# OAuth Configuration +#################### +# Private key for signing OAuth state tokens (required) +OAUTH_PRIVATE_KEY=b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZWQyNTUxOQAAACDtDHHkeGp1POc0z6/vDj8SK48lVeuGswu/8UO4oBcYSAAAAJj7edqp+3naqQAAAAtzc2gtZWQyNTUxOQAAACDtDHHkeGp1POc0z6/vDj8SK48lVeuGswu/8UO4oBcYSAAAAEAuqALdQqnaDFb5PvuUN4ng1d191hsirOhnahsT0aJFV+0MceR4anU85zTPr+8OPxIrjyVV64azC7/xQ7igFxhIAAAAEWhhZWxub3JyQGZsYWdzaGlwAQIDBA== + +# RBAC Configuration +################### +# Discord ID to grant admin role on first login (required) +ADMIN_DISCORD_ID=202990104170463241 diff --git a/AGENTS.md b/AGENTS.md index dcb4055..1394bce 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -9,123 +9,231 @@ This document provides guidelines for AI coding agents and developers working on **Architecture**: Web application with Discord OAuth, PostgreSQL database, templ templates **Key Technologies**: Bun ORM, templ, TailwindCSS, custom golib libraries -## Build, Test, and Development Commands +## Build and Development Commands ### Building +NEVER BUILD MANUALLY ```bash # Full production build (tailwind → templ → go generate → go build) -make build +just build # Build and run -make run - -# Clean build artifacts -make clean +just run ``` ### Development Mode ```bash # Watch mode with hot reload (templ, air, tailwindcss in parallel) -make dev +just dev # Development server runs on: # - Proxy: http://localhost:3000 (use this) # - App: http://localhost:3333 (internal) ``` -### Testing +### Database Migrations + +**oslstats uses Bun's migration framework for safe, incremental schema changes.** + +#### Quick Reference + +**New Migration System**: Migrations now accept a count parameter. Default is 1 migration at a time. + ```bash -# Run all tests -go test ./... +# Show migration status +just migrate status -# Run tests for a specific package -go test ./pkg/oauth +# Run 1 migration (default, with automatic backup) +just migrate up 1 +# OR just +just migrate up -# Run a single test function -go test ./pkg/oauth -run TestGenerateState_Success +# Run 3 migrations +just migrate up 3 -# Run tests with verbose output -go test -v ./pkg/oauth +# Run all pending migrations +just migrate up all -# Run tests with coverage -go test -cover ./... -go test -coverprofile=coverage.out ./... -go tool cover -html=coverage.out +# Run with a specific environment file +just migrate up 3 .test.env + +# Rollback works the same for all arguments +just migrate down 2 .test.env + +# Create new migration +just migrate new add_email_to_users + +# Dev: Reset database (DESTRUCTIVE - deletes all data) +just reset-db ``` -### Database +#### Creating a New Migration + +**Example: Adding an email field to users table** + +1. **Generate migration file:** + ```bash + just migrate new add_leagues_and_slap_version + ``` + Creates: `cmd/oslstats/migrations/20250124150030_add_leagues_and_slap_version.go` + +2. **Edit the migration file:** + ```go + package migrations + + import ( + "context" + "github.com/uptrace/bun" + ) + + func init() { + Migrations.MustRegister( + // UP: Add email column + func(ctx context.Context, db *bun.DB) error { + _, err := dbConn.NewAddColumn(). + Model((*db.Season)(nil)). + ColumnExpr("slap_version VARCHAR NOT NULL"). + IfNotExists(). + Exec(ctx) + if err != nil { + return err + } + + // Create leagues table + _, err = dbConn.NewCreateTable(). + Model((*db.League)(nil)). + Exec(ctx) + if err != nil { + return err + } + + // Create season_leagues join table + _, err = dbConn.NewCreateTable(). + Model((*db.SeasonLeague)(nil)). + Exec(ctx) + return err + }, + // DOWN: Remove email column (for rollback) + func(ctx context.Context, db *bun.DB) error { + // Drop season_leagues join table first + _, err := dbConn.NewDropTable(). + Model((*db.SeasonLeague)(nil)). + IfExists(). + Exec(ctx) + if err != nil { + return err + } + + // Drop leagues table + _, err = dbConn.NewDropTable(). + Model((*db.League)(nil)). + IfExists(). + Exec(ctx) + if err != nil { + return err + } + + // Remove slap_version column from seasons table + _, err = dbConn.NewDropColumn(). + Model((*db.Season)(nil)). + ColumnExpr("slap_version"). + Exec(ctx) + return err + }, + ) + } + ``` + +3. **Update the model** (`internal/db/user.go`): + ```go + type Season struct { + bun.BaseModel `bun:"table:seasons,alias:s"` + + ID int `bun:"id,pk,autoincrement"` + Name string `bun:"name,unique"` + SlapVersion string `bun:"slap_version"` // NEW FIELD + } + ``` + +4. **Apply the migration:** + ```bash + just migrate up 1 + ``` + Output: + ``` + [INFO] Step 1/5: Validating migrations... + [INFO] Migration validation passed ✓ + [INFO] Step 2/5: Checking for pending migrations... + [INFO] Running 1 migration(s): + 📋 20250124150030_add_email_to_users + [INFO] Step 3/5: Creating backup... + [INFO] Backup created: backups/20250124_150145_pre_migration.sql.gz (2.3 MB) + [INFO] Step 4/5: Acquiring migration lock... + [INFO] Migration lock acquired + [INFO] Step 5/5: Applying migrations... + [INFO] Migrated to group 2 + ✅ 20250124150030_add_email_to_users + [INFO] Migration lock released + ``` + +#### Environment Variables + ```bash -# Run migrations -make migrate -# OR -./bin/oslstats --migrate +# Backup directory (default: backups) +DB_BACKUP_DIR=backups + +# Number of backups to keep (default: 10) +DB_BACKUP_RETENTION=10 ``` +#### Troubleshooting + +**"pg_dump not found"** +- Migrations will still run, but backups will be skipped +- Install PostgreSQL client tools for backups: + ```bash + # Ubuntu/Debian + sudo apt-get install postgresql-client + + # macOS + brew install postgresql + + # Arch + sudo pacman -S postgresql-libs + ``` + +**"migration already in progress"** +- Another instance is running migrations +- Wait for it to complete (max 5 minutes) +- If stuck, check for hung database connections + +**"migration build failed"** +- Migration file has syntax errors +- Fix the errors and try again +- Use `go build ./cmd/oslstats/migrations` to debug + ### Configuration Management ```bash # Generate .env template file -make genenv -# OR with custom output: make genenv OUT=.env.example +just genenv +# OR with custom output: just genenv .env.example # Show environment variable documentation -make envdoc +just envdoc # Show current environment values -make showenv +just showenv ``` ## Code Style Guidelines -### Import Organization -Organize imports in **3 groups** separated by blank lines: - -```go -import ( - // 1. Standard library - "context" - "net/http" - "fmt" - - // 2. External dependencies - "git.haelnorr.com/h/golib/hws" - "github.com/pkg/errors" - "github.com/uptrace/bun" - - // 3. Internal packages - "git.haelnorr.com/h/oslstats/internal/config" - "git.haelnorr.com/h/oslstats/pkg/oauth" -) -``` - -### Naming Conventions - -**Variables**: -- Local: `camelCase` (userAgentKey, httpServer, dbConn) -- Exported: `PascalCase` (Config, User, Token) -- Common abbreviations: `cfg`, `ctx`, `tx`, `db`, `err`, `w`, `r` - -**Functions**: -- Exported: `PascalCase` (GetConfig, NewStore, GenerateState) -- Private: `camelCase` (throwError, shouldShowDetails, loadModels) -- HTTP handlers: Return `http.Handler`, use dependency injection pattern -- Database functions: Use `bun.Tx` as parameter for transactions - -**Types**: -- Structs/Interfaces: `PascalCase` (Config, User, OAuthSession) -- Use `-er` suffix for interfaces (implied from usage) - -**Files**: -- Prefer single word: `config.go`, `oauth.go`, `errors.go` -- Don't use snake_case except for tests: `state_test.go` -- Test files: `*_test.go` alongside source files - ### Error Handling **Always wrap errors** with context using `github.com/pkg/errors`: ```go if err != nil { - return errors.Wrap(err, "operation_name") + return errors.Wrap(err, "package.FunctionName") } ``` @@ -234,94 +342,14 @@ func ConfigFromEnv() (any, error) { - Use inline comments for ENV var documentation in Config structs - Explain security-critical code flows -### Testing - -**Test File Location**: Place `*_test.go` files alongside source files - -**Test Naming**: -```go -func TestFunctionName_Scenario(t *testing.T) -func TestGenerateState_Success(t *testing.T) -func TestVerifyState_WrongUserAgentKey(t *testing.T) -``` - -**Test Structure**: -- Use subtests with `t.Run()` for related scenarios -- Use table-driven tests for multiple similar cases -- Create helper functions for common setup (e.g., `testConfig()`) -- Test happy paths, error cases, edge cases, and security properties - -**Test Categories** (from pkg/oauth/state_test.go example): -1. Happy path tests -2. Error handling (nil params, empty fields, malformed input) -3. Security tests (MITM, CSRF, replay attacks, tampering) -4. Edge cases (concurrency, constant-time comparison) -5. Integration tests (round-trip verification) - -### Security - -**Critical Practices**: -- Use `crypto/subtle.ConstantTimeCompare` for cryptographic comparisons -- Implement CSRF protection via state tokens -- Store sensitive cookies as HttpOnly -- Use separate logging levels for security violations (WARN) -- Validate all inputs at function boundaries -- Use parameterized queries (Bun ORM handles this) -- Never commit secrets (.env, keys/ are gitignored) - -## Project Structure - -``` -oslstats/ -├── cmd/oslstats/ # Application entry point -│ ├── main.go # Entry point with flag parsing -│ ├── run.go # Server initialization & graceful shutdown -│ ├── httpserver.go # HTTP server setup -│ ├── routes.go # Route registration -│ ├── middleware.go # Middleware registration -│ ├── auth.go # Authentication setup -│ └── db.go # Database connection & migrations -├── internal/ # Private application code -│ ├── config/ # Configuration aggregation -│ ├── db/ # Database models & queries (Bun ORM) -│ ├── discord/ # Discord OAuth integration -│ ├── handlers/ # HTTP request handlers -│ ├── session/ # Session store (in-memory) -│ └── view/ # Templ templates -│ ├── component/ # Reusable UI components -│ ├── layout/ # Page layouts -│ └── page/ # Full pages -├── pkg/ # Reusable packages -│ ├── contexts/ # Context key definitions -│ ├── embedfs/ # Embedded static files -│ └── oauth/ # OAuth state management -├── bin/ # Compiled binaries (gitignored) -├── keys/ # Private keys (gitignored) -├── tmp/ # Air hot reload temp files (gitignored) -├── Makefile # Build automation -├── .air.toml # Hot reload configuration -└── go.mod # Go module definition -``` - -## Key Dependencies - -- **git.haelnorr.com/h/golib/*** - Custom libraries (env, ezconf, hlog, hws, hwsauth, cookies, jwt) -- **github.com/a-h/templ** - Type-safe HTML templating -- **github.com/uptrace/bun** - PostgreSQL ORM -- **github.com/bwmarrin/discordgo** - Discord API client -- **github.com/pkg/errors** - Error wrapping (use this, not fmt.Errorf) -- **github.com/joho/godotenv** - .env file loading - ## Notes for AI Agents 1. **Never commit** .env files, keys/, or generated files (*_templ.go, output.css) 2. **Database operations** should use `bun.Tx` for transaction safety -3. **Templates** are written in templ, not Go html/template - run `templ generate` after changes -4. **Static files** are embedded via `//go:embed` - check pkg/embedfs/ +3. **Templates** are written in templ, not Go html/template - run `just templ` after changes +4. **Static files** are embedded via `//go:embed` - check internal/embedfs/ 5. **Error messages** should be descriptive and use errors.Wrap for context 6. **Security is critical** - especially in OAuth flows (see pkg/oauth/state_test.go for examples) 7. **Air proxy** runs on port 3000 during development; app runs on 3333 -8. **Test coverage** is currently limited - prioritize testing security-critical code -9. **Configuration** uses ezconf pattern - see internal/*/ezconf.go files for examples -10. **Graceful shutdown** is implemented in cmd/oslstats/run.go - follow this pattern -11. When in plan mode, always use the interactive question tool if available +8. **Configuration** uses ezconf pattern - see internal/*/ezconf.go files for examples +9. When in plan mode, always use the interactive question tool if available diff --git a/Makefile b/Makefile deleted file mode 100644 index 8f59266..0000000 --- a/Makefile +++ /dev/null @@ -1,39 +0,0 @@ -# Makefile -.PHONY: build - -BINARY_NAME=oslstats - -build: - tailwindcss -i ./pkg/embedfs/files/css/input.css -o ./pkg/embedfs/files/css/output.css && \ - go mod tidy && \ - templ generate && \ - go generate ./cmd/${BINARY_NAME} && \ - go build -ldflags="-w -s" -o ./bin/${BINARY_NAME}${SUFFIX} ./cmd/${BINARY_NAME} - -run: - make build - ./bin/${BINARY_NAME}${SUFFIX} - -dev: - templ generate --watch &\ - air &\ - tailwindcss -i ./pkg/embedfs/files/css/input.css -o ./pkg/embedfs/files/css/output.css --watch - -clean: - go clean - -genenv: - make build - ./bin/${BINARY_NAME} --genenv ${OUT} - -envdoc: - make build - ./bin/${BINARY_NAME} --envdoc - -showenv: - make build - ./bin/${BINARY_NAME} --showenv - -migrate: - make build - ./bin/${BINARY_NAME}${SUFFIX} --migrate diff --git a/pkg/embedfs/files/js/popups.js b/backups/.gitkeep similarity index 100% rename from pkg/embedfs/files/js/popups.js rename to backups/.gitkeep diff --git a/cmd/oslstats/db.go b/cmd/oslstats/db.go deleted file mode 100644 index 922b928..0000000 --- a/cmd/oslstats/db.go +++ /dev/null @@ -1,54 +0,0 @@ -package main - -import ( - "context" - "database/sql" - "fmt" - - "git.haelnorr.com/h/oslstats/internal/config" - "git.haelnorr.com/h/oslstats/internal/db" - "github.com/pkg/errors" - "github.com/uptrace/bun" - "github.com/uptrace/bun/dialect/pgdialect" - "github.com/uptrace/bun/driver/pgdriver" -) - -func setupBun(ctx context.Context, cfg *config.Config) (conn *bun.DB, close func() error, err error) { - dsn := fmt.Sprintf("postgres://%s:%s@%s:%v/%s?sslmode=%s", - cfg.DB.User, cfg.DB.Password, cfg.DB.Host, cfg.DB.Port, cfg.DB.DB, cfg.DB.SSL) - sqldb := sql.OpenDB(pgdriver.NewConnector(pgdriver.WithDSN(dsn))) - conn = bun.NewDB(sqldb, pgdialect.New()) - close = sqldb.Close - - err = loadModels(ctx, conn, cfg.Flags.MigrateDB) - if err != nil { - return nil, nil, errors.Wrap(err, "loadModels") - } - - return conn, close, nil -} - -func loadModels(ctx context.Context, conn *bun.DB, resetDB bool) error { - models := []any{ - (*db.User)(nil), - (*db.DiscordToken)(nil), - } - - for _, model := range models { - _, err := conn.NewCreateTable(). - Model(model). - IfNotExists(). - Exec(ctx) - if err != nil { - return errors.Wrap(err, "db.NewCreateTable") - } - if resetDB { - err = conn.ResetModel(ctx, model) - if err != nil { - return errors.Wrap(err, "db.ResetModel") - } - } - } - - return nil -} diff --git a/cmd/oslstats/main.go b/cmd/oslstats/main.go index 38e0eea..4bdeec8 100644 --- a/cmd/oslstats/main.go +++ b/cmd/oslstats/main.go @@ -5,12 +5,19 @@ import ( "fmt" "os" + "git.haelnorr.com/h/golib/hlog" "git.haelnorr.com/h/oslstats/internal/config" + "git.haelnorr.com/h/oslstats/internal/db/migrate" "github.com/pkg/errors" ) func main() { - flags := config.SetupFlags() + flags, err := config.SetupFlags() + if err != nil { + fmt.Fprintf(os.Stderr, "Error parsing flags: %v\n", err) + os.Exit(1) + } + ctx := context.Background() cfg, loader, err := config.GetConfig(flags) @@ -18,29 +25,66 @@ func main() { fmt.Fprintf(os.Stderr, "%s\n", errors.Wrap(err, "Failed to load config")) os.Exit(1) } - + // Handle utility flags if flags.EnvDoc || flags.ShowEnv { - loader.PrintEnvVarsStdout(flags.ShowEnv) + if err = loader.PrintEnvVarsStdout(flags.ShowEnv); err != nil { + fmt.Fprintf(os.Stderr, "%s\n", errors.Wrap(err, "Failed to print env doc")) + } return } if flags.GenEnv != "" { - loader.GenerateEnvFile(flags.GenEnv, true) - return - } - - if flags.MigrateDB { - _, closedb, err := setupBun(ctx, cfg) - if err != nil { - fmt.Fprintf(os.Stderr, "%s\n", err) - os.Exit(1) + if err = loader.GenerateEnvFile(flags.GenEnv, true); err != nil { + fmt.Fprintf(os.Stderr, "%s\n", errors.Wrap(err, "Failed to generate env file")) } - closedb() return } - if err := run(ctx, os.Stdout, cfg); err != nil { - fmt.Fprintf(os.Stderr, "%s\n", err) + // Setup the logger + logger, err := hlog.NewLogger(cfg.HLOG, os.Stdout) + if err != nil { + fmt.Fprintf(os.Stderr, "%s\n", errors.Wrap(err, "Failed to init logger")) os.Exit(1) } + + // Handle migration file creation (doesn't need DB connection) + if flags.MigrateCreate != "" { + if err := migrate.CreateMigration(flags.MigrateCreate); err != nil { + logger.Fatal().Err(err).Str("stacktrace", fmt.Sprintf("%+v", errors.Wrap(err, "createMigration"))).Msg("Error creating migration") + } + return + } + + // Handle commands that need database connection + if flags.MigrateUp != "" || flags.MigrateRollback != "" || + flags.MigrateStatus || flags.MigrateDryRun || + flags.ResetDB { + + var command, countStr string + // Route to appropriate command + if flags.MigrateUp != "" { + command = "up" + countStr = flags.MigrateUp + } else if flags.MigrateRollback != "" { + command = "rollback" + countStr = flags.MigrateRollback + } else if flags.MigrateStatus { + command = "status" + } + if flags.ResetDB { + err = migrate.ResetDatabase(ctx, cfg) + } else { + err = migrate.RunMigrations(ctx, cfg, command, countStr) + } + + if err != nil { + logger.Fatal().Err(err).Str("stacktrace", fmt.Sprintf("%+v", errors.Wrap(err, "dbFlags"))).Msg("Error migrating database") + } + return + } + + // Normal server startup + if err := run(ctx, logger, cfg); err != nil { + logger.Fatal().Err(err).Str("stacktrace", fmt.Sprintf("%+v", errors.Wrap(err, "run"))).Msg("Error starting server") + } } diff --git a/cmd/oslstats/middleware.go b/cmd/oslstats/middleware.go deleted file mode 100644 index cd4ea78..0000000 --- a/cmd/oslstats/middleware.go +++ /dev/null @@ -1,24 +0,0 @@ -package main - -import ( - "git.haelnorr.com/h/golib/hws" - "git.haelnorr.com/h/golib/hwsauth" - "git.haelnorr.com/h/oslstats/internal/db" - - "github.com/pkg/errors" - "github.com/uptrace/bun" -) - -func addMiddleware( - server *hws.Server, - auth *hwsauth.Authenticator[*db.User, bun.Tx], -) error { - - err := server.AddMiddleware( - auth.Authenticate(), - ) - if err != nil { - return errors.Wrap(err, "server.AddMiddleware") - } - return nil -} diff --git a/cmd/oslstats/routes.go b/cmd/oslstats/routes.go deleted file mode 100644 index c19f6d0..0000000 --- a/cmd/oslstats/routes.go +++ /dev/null @@ -1,75 +0,0 @@ -package main - -import ( - "net/http" - - "git.haelnorr.com/h/golib/hws" - "git.haelnorr.com/h/golib/hwsauth" - "github.com/pkg/errors" - "github.com/uptrace/bun" - - "git.haelnorr.com/h/oslstats/internal/config" - "git.haelnorr.com/h/oslstats/internal/db" - "git.haelnorr.com/h/oslstats/internal/discord" - "git.haelnorr.com/h/oslstats/internal/handlers" - "git.haelnorr.com/h/oslstats/internal/store" -) - -func addRoutes( - server *hws.Server, - staticFS *http.FileSystem, - cfg *config.Config, - conn *bun.DB, - auth *hwsauth.Authenticator[*db.User, bun.Tx], - store *store.Store, - discordAPI *discord.APIClient, -) error { - // Create the routes - routes := []hws.Route{ - { - Path: "/static/", - Method: hws.MethodGET, - Handler: http.StripPrefix("/static/", handlers.StaticFS(staticFS, server)), - }, - { - Path: "/", - Method: hws.MethodGET, - Handler: handlers.Index(server), - }, - { - Path: "/login", - Method: hws.MethodGET, - Handler: auth.LogoutReq(handlers.Login(server, cfg, store, discordAPI)), - }, - { - Path: "/auth/callback", - Method: hws.MethodGET, - Handler: auth.LogoutReq(handlers.Callback(server, auth, conn, cfg, store, discordAPI)), - }, - { - Path: "/register", - Methods: []hws.Method{hws.MethodGET, hws.MethodPOST}, - Handler: auth.LogoutReq(handlers.Register(server, auth, conn, cfg, store)), - }, - { - Path: "/logout", - Methods: []hws.Method{hws.MethodGET, hws.MethodPOST}, - Handler: auth.LoginReq(handlers.Logout(server, auth, conn, discordAPI)), - }, - } - - htmxRoutes := []hws.Route{ - { - Path: "/htmx/isusernameunique", - Method: hws.MethodPOST, - Handler: handlers.IsUsernameUnique(server, conn, cfg, store), - }, - } - - // Register the routes with the server - err := server.AddRoutes(append(routes, htmxRoutes...)...) - if err != nil { - return errors.Wrap(err, "server.AddRoutes") - } - return nil -} diff --git a/cmd/oslstats/run.go b/cmd/oslstats/run.go index 2d6f34f..a400a80 100644 --- a/cmd/oslstats/run.go +++ b/cmd/oslstats/run.go @@ -2,7 +2,7 @@ package main import ( "context" - "io" + "fmt" "os" "os/signal" "sync" @@ -12,30 +12,22 @@ import ( "github.com/pkg/errors" "git.haelnorr.com/h/oslstats/internal/config" + "git.haelnorr.com/h/oslstats/internal/db" "git.haelnorr.com/h/oslstats/internal/discord" + "git.haelnorr.com/h/oslstats/internal/embedfs" + "git.haelnorr.com/h/oslstats/internal/server" "git.haelnorr.com/h/oslstats/internal/store" - "git.haelnorr.com/h/oslstats/pkg/embedfs" ) // Initializes and runs the server -func run(ctx context.Context, w io.Writer, cfg *config.Config) error { +func run(ctx context.Context, logger *hlog.Logger, cfg *config.Config) error { ctx, cancel := signal.NotifyContext(ctx, os.Interrupt) defer cancel() - // Setup the logger - logger, err := hlog.NewLogger(cfg.HLOG, w) - if err != nil { - return errors.Wrap(err, "hlog.NewLogger") - } - // Setup the database connection logger.Debug().Msg("Config loaded and logger started") logger.Debug().Msg("Connecting to database") - bun, closedb, err := setupBun(ctx, cfg) - if err != nil { - return errors.Wrap(err, "setupDBConn") - } - defer closedb() + conn := db.NewDB(cfg.DB) // Setup embedded files logger.Debug().Msg("Getting embedded files") @@ -56,7 +48,7 @@ func run(ctx context.Context, w io.Writer, cfg *config.Config) error { } logger.Debug().Msg("Setting up HTTP server") - httpServer, err := setupHttpServer(&staticFS, cfg, logger, bun, store, discordAPI) + httpServer, err := server.Setup(staticFS, cfg, logger, conn, store, discordAPI) if err != nil { return errors.Wrap(err, "setupHttpServer") } @@ -73,11 +65,16 @@ func run(ctx context.Context, w io.Writer, cfg *config.Config) error { wg.Go(func() { <-ctx.Done() shutdownCtx := context.Background() - shutdownCtx, cancel := context.WithTimeout(shutdownCtx, 10*time.Second) + shutdownCtx, cancel := context.WithTimeout(shutdownCtx, 60*time.Second) defer cancel() + logger.Info().Msg("Shut down requested, waiting 60 seconds...") err := httpServer.Shutdown(shutdownCtx) if err != nil { - logger.Error().Err(err).Msg("Graceful shutdown failed") + logger.Error().Err(err).Str("stacktrace", fmt.Sprintf("%+v", errors.Wrap(err, "httpServer.Shutdown"))).Msg("Error during HTTP server shutdown") + } + err = conn.Close() + if err != nil { + logger.Error().Err(err).Str("stacktrace", fmt.Sprintf("%+v", errors.Wrap(err, "closedb"))).Msg("Error during database close") } }) wg.Wait() diff --git a/go.mod b/go.mod index e909fb8..7db4d2a 100644 --- a/go.mod +++ b/go.mod @@ -1,14 +1,16 @@ module git.haelnorr.com/h/oslstats -go 1.25.5 +go 1.25.6 require ( git.haelnorr.com/h/golib/env v0.9.1 git.haelnorr.com/h/golib/ezconf v0.1.1 git.haelnorr.com/h/golib/hlog v0.10.4 - git.haelnorr.com/h/golib/hws v0.3.1 - git.haelnorr.com/h/golib/hwsauth v0.5.2 + git.haelnorr.com/h/golib/hws v0.5.0 + git.haelnorr.com/h/golib/hwsauth v0.6.1 + git.haelnorr.com/h/golib/notify v0.1.0 github.com/a-h/templ v0.3.977 + github.com/coder/websocket v1.8.14 github.com/joho/godotenv v1.5.1 github.com/pkg/errors v0.9.1 github.com/uptrace/bun v1.2.16 @@ -17,13 +19,15 @@ require ( ) require ( + github.com/gobwas/glob v0.2.3 // indirect github.com/gorilla/websocket v1.4.2 // indirect - golang.org/x/crypto v0.45.0 // indirect + golang.org/x/crypto v0.47.0 // indirect ) require ( git.haelnorr.com/h/golib/cookies v0.9.0 git.haelnorr.com/h/golib/jwt v0.10.1 // indirect + git.haelnorr.com/h/timefmt v0.1.0 github.com/bwmarrin/discordgo v0.29.0 github.com/go-logr/logr v1.4.3 // indirect github.com/golang-jwt/jwt v3.2.2+incompatible // indirect @@ -38,9 +42,9 @@ require ( github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect go.opentelemetry.io/otel v1.38.0 // indirect go.opentelemetry.io/otel/trace v1.38.0 // indirect - golang.org/x/sys v0.40.0 // indirect - k8s.io/apimachinery v0.35.0 // indirect + golang.org/x/sys v0.41.0 // indirect + k8s.io/apimachinery v0.35.1 // indirect k8s.io/klog/v2 v2.130.1 // indirect - k8s.io/utils v0.0.0-20260108192941-914a6e750570 // indirect + k8s.io/utils v0.0.0-20260210185600-b8788abfbbc2 // indirect mellium.im/sasl v0.3.2 // indirect ) diff --git a/go.sum b/go.sum index afdf0fa..16f49fb 100644 --- a/go.sum +++ b/go.sum @@ -6,23 +6,31 @@ git.haelnorr.com/h/golib/ezconf v0.1.1 h1:4euTSDb9jvuQQkVq+x5gHoYPYyUZPWxoOSlWCI git.haelnorr.com/h/golib/ezconf v0.1.1/go.mod h1:rETDcjpcEyyeBgCiZSU617wc0XycwZSC5+IAOtXmwP8= git.haelnorr.com/h/golib/hlog v0.10.4 h1:vpCsV/OddjIYx8F48U66WxojjmhEbeLGQAOBG4ViSRQ= git.haelnorr.com/h/golib/hlog v0.10.4/go.mod h1:+wJ8vecQY/JITTXKmI3JfkHiUGyMs7N6wooj2wuWZbc= -git.haelnorr.com/h/golib/hws v0.3.1 h1:uFXAT8SuKs4VACBdrkmZ+dJjeBlSPgCKUPt8zGCcwrI= -git.haelnorr.com/h/golib/hws v0.3.1/go.mod h1:6ZlRKnt8YMpv5XcMXmyBGmD1/euvBo3d1azEvHJjOLo= -git.haelnorr.com/h/golib/hwsauth v0.5.2 h1:K4McXMEHtI5o4fAL3AZrmaMkwORNqSTV3MM6BExNKag= -git.haelnorr.com/h/golib/hwsauth v0.5.2/go.mod h1:NOonrVU/lX8lzuV77eDEiTwBjn7RrzYVcSdXUJWeHmQ= +git.haelnorr.com/h/golib/hws v0.5.0 h1:0CSv2f+dm/KzB/o5o6uXCyvN74iBdMTImhkyAZzU52c= +git.haelnorr.com/h/golib/hws v0.5.0/go.mod h1:dxAbbGGNzqLXhZXwgt091QsvsPBdrS+1YsNQNldNVoM= +git.haelnorr.com/h/golib/hwsauth v0.6.1 h1:3BiM6hwuYDjgfu02hshvUtr592DnWi9Epj//3N13ti0= +git.haelnorr.com/h/golib/hwsauth v0.6.1/go.mod h1:xPdxqHzr1ZU0MHlG4o8r1zEstBu4FJCdaA0ZHSFxmKA= git.haelnorr.com/h/golib/jwt v0.10.1 h1:1Adxt9H3Y4fWFvFjWpvg/vSFhbgCMDMxgiE3m7KvDMI= git.haelnorr.com/h/golib/jwt v0.10.1/go.mod h1:fbuPrfucT9lL0faV5+Q5Gk9WFJxPlwzRPpbMQKYZok4= +git.haelnorr.com/h/golib/notify v0.1.0 h1:xdf6zd21F6n+SuGTeJiuLNMf6zFXMvwpKD0gmNq8N10= +git.haelnorr.com/h/golib/notify v0.1.0/go.mod h1:ARqaRmCYb8LMURhDM75sG+qX+YpqXmUVeAtacwjHjBc= +git.haelnorr.com/h/timefmt v0.1.0 h1:ULDkWEtFIV+FkkoV0q9n62Spj+HDdtFL9QeAdGIEp+o= +git.haelnorr.com/h/timefmt v0.1.0/go.mod h1:12gXXYLP4w9Fa9ZkbZWdvKV6RyZEzwAm9mN+WB3oXpw= github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU= github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= github.com/a-h/templ v0.3.977 h1:kiKAPXTZE2Iaf8JbtM21r54A8bCNsncrfnokZZSrSDg= github.com/a-h/templ v0.3.977/go.mod h1:oCZcnKRf5jjsGpf2yELzQfodLphd2mwecwG4Crk5HBo= github.com/bwmarrin/discordgo v0.29.0 h1:FmWeXFaKUwrcL3Cx65c20bTRW+vOb6k8AnaP+EgjDno= github.com/bwmarrin/discordgo v0.29.0/go.mod h1:NJZpH+1AfhIcyQsPeuBKsUtYrRnjkyu0kIVMCHkZtRY= +github.com/coder/websocket v1.8.14 h1:9L0p0iKiNOibykf283eHkKUHHrpG7f65OE3BhhO7v9g= +github.com/coder/websocket v1.8.14/go.mod h1:NX3SzP+inril6yawo5CQXx8+fk145lPDC6pumgx0mVg= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= @@ -71,25 +79,25 @@ go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwEx go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= -golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= +golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= +golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= -golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= +golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -k8s.io/apimachinery v0.35.0 h1:Z2L3IHvPVv/MJ7xRxHEtk6GoJElaAqDCCU0S6ncYok8= -k8s.io/apimachinery v0.35.0/go.mod h1:jQCgFZFR1F4Ik7hvr2g84RTJSZegBc8yHgFWKn//hns= +k8s.io/apimachinery v0.35.1 h1:yxO6gV555P1YV0SANtnTjXYfiivaTPvCTKX6w6qdDsU= +k8s.io/apimachinery v0.35.1/go.mod h1:jQCgFZFR1F4Ik7hvr2g84RTJSZegBc8yHgFWKn//hns= k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk= k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE= -k8s.io/utils v0.0.0-20260108192941-914a6e750570 h1:JT4W8lsdrGENg9W+YwwdLJxklIuKWdRm+BC+xt33FOY= -k8s.io/utils v0.0.0-20260108192941-914a6e750570/go.mod h1:xDxuJ0whA3d0I4mf/C4ppKHxXynQ+fxnkmQH0vTHnuk= +k8s.io/utils v0.0.0-20260210185600-b8788abfbbc2 h1:AZYQSJemyQB5eRxqcPky+/7EdBj0xi3g0ZcxxJ7vbWU= +k8s.io/utils v0.0.0-20260210185600-b8788abfbbc2/go.mod h1:xDxuJ0whA3d0I4mf/C4ppKHxXynQ+fxnkmQH0vTHnuk= mellium.im/sasl v0.3.2 h1:PT6Xp7ccn9XaXAnJ03FcEjmAn7kK1x7aoXV6F+Vmrl0= mellium.im/sasl v0.3.2/go.mod h1:NKXDi1zkr+BlMHLQjY3ofYuU4KSPFxknb8mfEu6SveY= diff --git a/internal/config/config.go b/internal/config/config.go index 112752a..3cd6094 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -1,3 +1,4 @@ +// Package config provides the environment based configuration for the program package config import ( @@ -7,6 +8,7 @@ import ( "git.haelnorr.com/h/golib/hwsauth" "git.haelnorr.com/h/oslstats/internal/db" "git.haelnorr.com/h/oslstats/internal/discord" + "git.haelnorr.com/h/oslstats/internal/rbac" "git.haelnorr.com/h/oslstats/pkg/oauth" "github.com/joho/godotenv" "github.com/pkg/errors" @@ -19,10 +21,11 @@ type Config struct { HLOG *hlog.Config Discord *discord.Config OAuth *oauth.Config + RBAC *rbac.Config Flags *Flags } -// Load the application configuration and get a pointer to the Config object +// GetConfig loads the application configuration and returns a pointer to the Config object // If doconly is specified, only the loader will be returned func GetConfig(flags *Flags) (*Config, *ezconf.ConfigLoader, error) { err := godotenv.Load(flags.EnvFile) @@ -31,14 +34,18 @@ func GetConfig(flags *Flags) (*Config, *ezconf.ConfigLoader, error) { } loader := ezconf.New() - loader.RegisterIntegrations( + err = loader.RegisterIntegrations( hlog.NewEZConfIntegration(), hws.NewEZConfIntegration(), hwsauth.NewEZConfIntegration(), db.NewEZConfIntegration(), discord.NewEZConfIntegration(), oauth.NewEZConfIntegration(), + rbac.NewEZConfIntegration(), ) + if err != nil { + return nil, nil, errors.Wrap(err, "loader.RegisterIntegrations") + } if err := loader.ParseEnvVars(); err != nil { return nil, nil, errors.Wrap(err, "loader.ParseEnvVars") } @@ -81,6 +88,11 @@ func GetConfig(flags *Flags) (*Config, *ezconf.ConfigLoader, error) { return nil, nil, errors.New("OAuth Config not loaded") } + rbaccfg, ok := loader.GetConfig("rbac") + if !ok { + return nil, nil, errors.New("RBAC Config not loaded") + } + config := &Config{ DB: dbcfg.(*db.Config), HWS: hwscfg.(*hws.Config), @@ -88,6 +100,7 @@ func GetConfig(flags *Flags) (*Config, *ezconf.ConfigLoader, error) { HLOG: hlogcfg.(*hlog.Config), Discord: discordcfg.(*discord.Config), OAuth: oauthcfg.(*oauth.Config), + RBAC: rbaccfg.(*rbac.Config), Flags: flags, } diff --git a/internal/config/flags.go b/internal/config/flags.go index ba87131..b131a67 100644 --- a/internal/config/flags.go +++ b/internal/config/flags.go @@ -2,31 +2,125 @@ package config import ( "flag" + "strconv" + + "github.com/pkg/errors" ) type Flags struct { - MigrateDB bool - EnvDoc bool - ShowEnv bool - GenEnv string - EnvFile string + // Utility flags + EnvDoc bool + ShowEnv bool + GenEnv string + EnvFile string + DevMode bool + + // Database reset (destructive) + ResetDB bool + + // Migration commands + MigrateUp string + MigrateRollback string + MigrateStatus bool + MigrateCreate string + MigrateDryRun bool + + // Backup control + MigrateNoBackup bool } -func SetupFlags() *Flags { - // Parse commandline args - migrateDB := flag.Bool("migrate", false, "Reset all the database tables with the updated models") +func SetupFlags() (*Flags, error) { + // Utility flags envDoc := flag.Bool("envdoc", false, "Print all environment variables and their documentation") showEnv := flag.Bool("showenv", false, "Print all environment variable values and their documentation") genEnv := flag.String("genenv", "", "Generate a .env file with all environment variables (specify filename)") envfile := flag.String("envfile", ".env", "Specify a .env file to use for the configuration") + devMode := flag.Bool("dev", false, "Run the server in dev mode") + + // Database reset (destructive) + resetDB := flag.Bool("reset-db", false, "⚠️ DESTRUCTIVE: Drop and recreate all tables (dev only)") + + // Migration commands + migrateUp := flag.String("migrate-up", "", "Run pending database migrations (usage: --migrate-up [count|all], default: 1)") + migrateRollback := flag.String("migrate-rollback", "", "Rollback migrations (usage: --migrate-rollback [count|all], default: 1)") + migrateStatus := flag.Bool("migrate-status", false, "Show database migration status") + migrateCreate := flag.String("migrate-create", "", "Create a new migration file with the given name") + migrateDryRun := flag.Bool("migrate-dry-run", false, "Preview pending migrations without applying them") + + // Backup control + migrateNoBackup := flag.Bool("no-backup", false, "Skip automatic backups (dev only - faster but less safe)") + flag.Parse() - flags := &Flags{ - MigrateDB: *migrateDB, - EnvDoc: *envDoc, - ShowEnv: *showEnv, - GenEnv: *genEnv, - EnvFile: *envfile, + // Validate: can't use multiple migration commands at once + commands := 0 + if *migrateUp != "" { + commands++ } - return flags + if *migrateRollback != "" { + commands++ + } + if *migrateStatus { + commands++ + } + if *migrateDryRun { + commands++ + } + if *resetDB { + commands++ + } + + if commands > 1 { + return nil, errors.New("cannot use multiple migration commands simultaneously") + } + + // Validate migration count values + if *migrateUp != "" { + if err := validateMigrationCount(*migrateUp); err != nil { + return nil, errors.Wrap(err, "invalid --migrate-up value") + } + } + if *migrateRollback != "" { + if err := validateMigrationCount(*migrateRollback); err != nil { + return nil, errors.Wrap(err, "invalid --migrate-rollback value") + } + } + + flags := &Flags{ + EnvDoc: *envDoc, + ShowEnv: *showEnv, + GenEnv: *genEnv, + EnvFile: *envfile, + DevMode: *devMode, + ResetDB: *resetDB, + MigrateUp: *migrateUp, + MigrateRollback: *migrateRollback, + MigrateStatus: *migrateStatus, + MigrateCreate: *migrateCreate, + MigrateDryRun: *migrateDryRun, + MigrateNoBackup: *migrateNoBackup, + } + return flags, nil +} + +// validateMigrationCount validates a migration count value +// Valid values: "all" or a positive integer (1, 2, 3, ...) +func validateMigrationCount(value string) error { + if value == "" { + return nil + } + if value == "all" { + return nil + } + + // Try parsing as integer + count, err := strconv.Atoi(value) + if err != nil { + return errors.New("must be a positive integer or 'all'") + } + if count < 1 { + return errors.New("must be a positive integer (1 or greater)") + } + + return nil } diff --git a/internal/contexts/devmode.go b/internal/contexts/devmode.go new file mode 100644 index 0000000..ea1c20e --- /dev/null +++ b/internal/contexts/devmode.go @@ -0,0 +1,16 @@ +package contexts + +import "context" + +func DevMode(ctx context.Context) DevInfo { + devmode, ok := ctx.Value(DevModeKey).(DevInfo) + if !ok { + return DevInfo{} + } + return devmode +} + +type DevInfo struct { + WebsocketBase string + HTMXLog bool +} diff --git a/internal/contexts/keys.go b/internal/contexts/keys.go new file mode 100644 index 0000000..561f47b --- /dev/null +++ b/internal/contexts/keys.go @@ -0,0 +1,14 @@ +// Package contexts provides utilities for loading and extracting structs from contexts +package contexts + +type Key string + +func (c Key) String() string { + return "oslstats context key " + string(c) +} + +var ( + DevModeKey Key = Key("devmode") + PermissionCacheKey Key = Key("permissions") + PreviewRoleKey Key = Key("preview-role") +) diff --git a/internal/contexts/permissions.go b/internal/contexts/permissions.go new file mode 100644 index 0000000..37ea512 --- /dev/null +++ b/internal/contexts/permissions.go @@ -0,0 +1,64 @@ +package contexts + +import ( + "context" + + "git.haelnorr.com/h/oslstats/internal/permissions" + "git.haelnorr.com/h/oslstats/internal/roles" +) + +// Permissions retrieves the permission cache from context (type-safe) +func Permissions(ctx context.Context) *PermissionCache { + cache, ok := ctx.Value(PermissionCacheKey).(*PermissionCache) + if !ok { + return nil + } + return cache +} + +type PermissionCache struct { + Permissions map[permissions.Permission]bool + Roles map[roles.Role]bool + HasWildcard bool +} + +// HasPermission returns true if the cache contains the provided permission +func (p *PermissionCache) HasPermission(perm permissions.Permission) bool { + if p.HasWildcard { + return true + } + _, exists := p.Permissions[perm] + return exists +} + +// HasAnyPermission returns true if the cache contains any of the provided permissions +func (p *PermissionCache) HasAnyPermission(perms []permissions.Permission) bool { + if p.HasWildcard { + return true + } + for _, perm := range perms { + _, exists := p.Permissions[perm] + if exists { + return true + } + } + return false +} + +// HasAllPermissions returns true only if more than one permission is provided and the cache +// contains all the provided permissions +func (p *PermissionCache) HasAllPermissions(perms []permissions.Permission) bool { + if p.HasWildcard { + return true + } + if len(perms) == 0 { + return false + } + for _, perm := range perms { + _, exists := p.Permissions[perm] + if !exists { + return false + } + } + return true +} diff --git a/internal/contexts/preview_role.go b/internal/contexts/preview_role.go new file mode 100644 index 0000000..c8513bc --- /dev/null +++ b/internal/contexts/preview_role.go @@ -0,0 +1,25 @@ +package contexts + +import ( + "context" + + "git.haelnorr.com/h/oslstats/internal/db" +) + +// WithPreviewRole adds a preview role to the context +func WithPreviewRole(ctx context.Context, role *db.Role) context.Context { + return context.WithValue(ctx, PreviewRoleKey, role) +} + +// GetPreviewRole retrieves the preview role from the context, or nil if not present +func GetPreviewRole(ctx context.Context) *db.Role { + if role, ok := ctx.Value(PreviewRoleKey).(*db.Role); ok { + return role + } + return nil +} + +// IsPreviewMode returns true if the user is currently in preview mode +func IsPreviewMode(ctx context.Context) bool { + return GetPreviewRole(ctx) != nil +} diff --git a/internal/db/audit.go b/internal/db/audit.go new file mode 100644 index 0000000..98b6817 --- /dev/null +++ b/internal/db/audit.go @@ -0,0 +1,148 @@ +package db + +import ( + "net/http" + "reflect" + "strings" +) + +type AuditMeta struct { + r *http.Request + u *User +} + +func NewAudit(r *http.Request, u *User) *AuditMeta { + if u == nil { + u = CurrentUser(r.Context()) + } + return &AuditMeta{r, u} +} + +// AuditInfo contains metadata for audit logging +type AuditInfo struct { + Action string // e.g., "seasons.create", "users.update" + ResourceType string // e.g., "season", "user" + ResourceID any // Primary key value (int, string, etc.) + Details any // Changed fields or additional metadata +} + +// extractTableName gets the bun table name from a model type using reflection +// Example: Season with `bun:"table:seasons,alias:s"` returns "seasons" +func extractTableName[T any]() string { + var model T + t := reflect.TypeOf(model) + + // Handle pointer types + if t.Kind() == reflect.Pointer { + t = t.Elem() + } + + // Look for bun.BaseModel field with table tag + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + if field.Type.Name() == "BaseModel" { + bunTag := field.Tag.Get("bun") + if bunTag != "" { + // Parse tag: "table:seasons,alias:s" -> "seasons" + for part := range strings.SplitSeq(bunTag, ",") { + part, _ := strings.CutPrefix(part, "table:") + return part + } + } + } + } + + // Fallback: use struct name in lowercase + "s" + return strings.ToLower(t.Name()) + "s" +} + +// extractResourceType converts a table name to singular resource type +// Example: "seasons" -> "season", "users" -> "user" +func extractResourceType(tableName string) string { + // Simple singularization: remove trailing 's' + if strings.HasSuffix(tableName, "s") && len(tableName) > 1 { + return tableName[:len(tableName)-1] + } + return tableName +} + +// buildAction creates a permission-style action string +// Example: ("season", "create") -> "seasons.create" +func buildAction(resourceType, operation string) string { + // Pluralize resource type (simple: add 's') + plural := resourceType + if !strings.HasSuffix(plural, "s") { + plural = plural + "s" + } + return plural + "." + operation +} + +// extractPrimaryKey uses reflection to find and return the primary key value from a model +// Returns nil if no primary key is found +func extractPrimaryKey[T any](model *T) any { + if model == nil { + return nil + } + + v := reflect.ValueOf(model) + if v.Kind() == reflect.Pointer { + v = v.Elem() + } + + t := v.Type() + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + bunTag := field.Tag.Get("bun") + if bunTag != "" && strings.Contains(bunTag, "pk") { + // Found primary key field + fieldValue := v.Field(i) + if fieldValue.IsValid() && fieldValue.CanInterface() { + return fieldValue.Interface() + } + } + } + + return nil +} + +// extractChangedFields builds a map of field names to their new values +// Only includes fields specified in the columns list +func extractChangedFields[T any](model *T, columns []string) map[string]any { + if model == nil || len(columns) == 0 { + return nil + } + + result := make(map[string]any) + v := reflect.ValueOf(model) + if v.Kind() == reflect.Pointer { + v = v.Elem() + } + + t := v.Type() + + // Build map of bun column names to field names + columnToField := make(map[string]int) + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + bunTag := field.Tag.Get("bun") + if bunTag != "" { + // Parse bun tag to get column name (first part before comma) + parts := strings.Split(bunTag, ",") + if len(parts) > 0 && parts[0] != "" { + columnToField[parts[0]] = i + } + } + } + + // Extract values for requested columns + for _, col := range columns { + if fieldIdx, ok := columnToField[col]; ok { + fieldValue := v.Field(fieldIdx) + if fieldValue.IsValid() && fieldValue.CanInterface() { + result[col] = fieldValue.Interface() + } + } + } + + return result +} diff --git a/internal/db/auditlog.go b/internal/db/auditlog.go new file mode 100644 index 0000000..8727c1c --- /dev/null +++ b/internal/db/auditlog.go @@ -0,0 +1,201 @@ +package db + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type AuditLog struct { + bun.BaseModel `bun:"table:audit_log,alias:al"` + + ID int `bun:"id,pk,autoincrement"` + UserID int `bun:"user_id,notnull"` + Action string `bun:"action,notnull"` + ResourceType string `bun:"resource_type,notnull"` + ResourceID *string `bun:"resource_id"` + Details json.RawMessage `bun:"details,type:jsonb"` + IPAddress string `bun:"ip_address"` + UserAgent string `bun:"user_agent"` + Result string `bun:"result,notnull"` // success, denied, error + ErrorMessage *string `bun:"error_message"` + CreatedAt int64 `bun:"created_at,notnull"` + + // Relations + User *User `bun:"rel:belongs-to,join:user_id=id"` +} + +// CreateAuditLog creates a new audit log entry +func CreateAuditLog(ctx context.Context, tx bun.Tx, log *AuditLog) error { + if log == nil { + return errors.New("log cannot be nil") + } + err := Insert(tx, log).Exec(ctx) + if err != nil { + return errors.Wrap(err, "db.Insert") + } + return nil +} + +type AuditLogFilter struct { + *ListFilter +} + +func NewAuditLogFilter() *AuditLogFilter { + return &AuditLogFilter{ + ListFilter: NewListFilter(), + } +} + +func (a *AuditLogFilter) UserID(id int) *AuditLogFilter { + a.Equals("al.user_id", id) + return a +} + +func (a *AuditLogFilter) Action(action string) *AuditLogFilter { + a.Equals("al.action", action) + return a +} + +func (a *AuditLogFilter) ResourceType(resourceType string) *AuditLogFilter { + a.Equals("al.resource_type", resourceType) + return a +} + +func (a *AuditLogFilter) Result(result string) *AuditLogFilter { + a.Equals("al.result", result) + return a +} + +func (a *AuditLogFilter) UserIDs(ids []int) *AuditLogFilter { + if len(ids) > 0 { + a.In("al.user_id", ids) + } + return a +} + +func (a *AuditLogFilter) Actions(actions []string) *AuditLogFilter { + fmt.Println(actions) + if len(actions) > 0 { + a.In("al.action", actions) + } + return a +} + +func (a *AuditLogFilter) ResourceTypes(resourceTypes []string) *AuditLogFilter { + if len(resourceTypes) > 0 { + a.In("al.resource_type", resourceTypes) + } + return a +} + +func (a *AuditLogFilter) Results(results []string) *AuditLogFilter { + if len(results) > 0 { + a.In("al.result", results) + } + return a +} + +func (a *AuditLogFilter) DateRange(start, end int64) *AuditLogFilter { + if start > 0 { + a.GreaterEqualThan("al.created_at", start) + } + if end > 0 { + a.LessEqualThan("al.created_at", end) + } + return a +} + +// GetAuditLogs retrieves audit logs with optional filters and pagination +func GetAuditLogs(ctx context.Context, tx bun.Tx, pageOpts *PageOpts, filters *AuditLogFilter) (*List[AuditLog], error) { + defaultPageOpts := &PageOpts{ + Page: 1, + PerPage: 10, + Order: bun.OrderDesc, + OrderBy: "created_at", + } + + return GetList[AuditLog](tx). + Relation("User"). + Filter(filters.filters...). + GetPaged(ctx, pageOpts, defaultPageOpts) +} + +// GetAuditLogsByUser retrieves audit logs for a specific user +func GetAuditLogsByUser(ctx context.Context, tx bun.Tx, userID int, pageOpts *PageOpts) (*List[AuditLog], error) { + if userID <= 0 { + return nil, errors.New("userID must be positive") + } + filters := NewAuditLogFilter().UserID(userID) + + return GetAuditLogs(ctx, tx, pageOpts, filters) +} + +// GetAuditLogsByAction retrieves audit logs for a specific action +func GetAuditLogsByAction(ctx context.Context, tx bun.Tx, action string, pageOpts *PageOpts) (*List[AuditLog], error) { + if action == "" { + return nil, errors.New("action cannot be empty") + } + filters := NewAuditLogFilter().Action(action) + + return GetAuditLogs(ctx, tx, pageOpts, filters) +} + +// GetAuditLogByID retrieves a single audit log by ID +func GetAuditLogByID(ctx context.Context, tx bun.Tx, id int) (*AuditLog, error) { + if id <= 0 { + return nil, errors.New("id must be positive") + } + return GetByField[AuditLog](tx, "al.id", id).Relation("User").Get(ctx) +} + +// GetUniqueActions retrieves a list of all unique actions in the audit log +func GetUniqueActions(ctx context.Context, tx bun.Tx) ([]string, error) { + var actions []string + err := tx.NewSelect(). + Model((*AuditLog)(nil)). + Column("action"). + Distinct(). + Order("action ASC"). + Scan(ctx, &actions) + if err != nil { + return nil, errors.Wrap(err, "tx.NewSelect") + } + return actions, nil +} + +// GetUniqueResourceTypes retrieves a list of all unique resource types in the audit log +func GetUniqueResourceTypes(ctx context.Context, tx bun.Tx) ([]string, error) { + var resourceTypes []string + err := tx.NewSelect(). + Model((*AuditLog)(nil)). + Column("resource_type"). + Distinct(). + Order("resource_type ASC"). + Scan(ctx, &resourceTypes) + if err != nil { + return nil, errors.Wrap(err, "tx.NewSelect") + } + return resourceTypes, nil +} + +// CleanupOldAuditLogs deletes audit logs older than the specified timestamp +func CleanupOldAuditLogs(ctx context.Context, tx bun.Tx, olderThan int64) (int, error) { + result, err := tx.NewDelete(). + Model((*AuditLog)(nil)). + Where("created_at < ?", olderThan). + Exec(ctx) + if err != nil { + return 0, errors.Wrap(err, "tx.NewDelete") + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "result.RowsAffected") + } + + return int(rowsAffected), nil +} diff --git a/internal/db/auditlogger.go b/internal/db/auditlogger.go new file mode 100644 index 0000000..988d4d6 --- /dev/null +++ b/internal/db/auditlogger.go @@ -0,0 +1,91 @@ +package db + +import ( + "context" + "encoding/json" + "fmt" + "time" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +// LogSuccess logs a successful permission-protected action +func LogSuccess( + ctx context.Context, + tx bun.Tx, + meta *AuditMeta, + info *AuditInfo, +) error { + return log(ctx, tx, meta, info, "success", nil) +} + +// LogError logs a failed action due to an error +func LogError( + ctx context.Context, + tx bun.Tx, + meta *AuditMeta, + info *AuditInfo, + err error, +) error { + errMsg := err.Error() + return log(ctx, tx, meta, info, "error", &errMsg) +} + +func log( + ctx context.Context, + tx bun.Tx, + meta *AuditMeta, + info *AuditInfo, + result string, + errorMessage *string, +) error { + if meta == nil { + return errors.New("audit meta cannot be nil for audit logging") + } + if info == nil { + return errors.New("audit info cannot be nil for audit logging") + } + if meta.u == nil { + return errors.New("user cannot be nil for audit logging") + } + if meta.r == nil { + return errors.New("request cannot be nil for audit logging") + } + + // Convert resourceID to string + var resourceIDStr *string + if info.ResourceID != nil { + idStr := fmt.Sprintf("%v", info.ResourceID) + resourceIDStr = &idStr + } + + // Marshal details to JSON + var detailsJSON json.RawMessage + if info.Details != nil { + jsonBytes, err := json.Marshal(info.Details) + if err != nil { + return errors.Wrap(err, "json.Marshal details") + } + detailsJSON = jsonBytes + } + + // Extract IP and User-Agent from request + ipAddress := meta.r.RemoteAddr + userAgent := meta.r.UserAgent() + + log := &AuditLog{ + UserID: meta.u.ID, + Action: info.Action, + ResourceType: info.ResourceType, + ResourceID: resourceIDStr, + Details: detailsJSON, + IPAddress: ipAddress, + UserAgent: userAgent, + Result: result, + ErrorMessage: errorMessage, + CreatedAt: time.Now().Unix(), + } + + return CreateAuditLog(ctx, tx, log) +} diff --git a/internal/db/backup.go b/internal/db/backup.go new file mode 100644 index 0000000..2bfe847 --- /dev/null +++ b/internal/db/backup.go @@ -0,0 +1,132 @@ +package db + +import ( + "context" + "fmt" + "os" + "os/exec" + "path/filepath" + "sort" + "time" + + "github.com/pkg/errors" +) + +// CreateBackup creates a compressed PostgreSQL dump before migrations +// Returns backup filename and error +// If pg_dump is not available, returns nil error with warning +func CreateBackup(ctx context.Context, cfg *Config, operation string) (string, error) { + // Check if pg_dump is available + if _, err := exec.LookPath("pg_dump"); err != nil { + fmt.Println("[WARN] pg_dump not found - skipping backup") + fmt.Println("[WARN] Install PostgreSQL client tools for automatic backups:") + fmt.Println("[WARN] Ubuntu/Debian: sudo apt-get install postgresql-client") + fmt.Println("[WARN] macOS: brew install postgresql") + fmt.Println("[WARN] Arch: sudo pacman -S postgresql-libs") + return "", nil // Don't fail, just warn + } + + // Ensure backup directory exists + if err := os.MkdirAll(cfg.BackupDir, 0o755); err != nil { + return "", errors.Wrap(err, "failed to create backup directory") + } + + // Generate filename: YYYYMMDD_HHmmss_pre_{operation}.sql.gz + timestamp := time.Now().Format("20060102_150405") + filename := filepath.Join(cfg.BackupDir, + fmt.Sprintf("%s_pre_%s.sql.gz", timestamp, operation)) + + // Check if gzip is available + useGzip := true + if _, err := exec.LookPath("gzip"); err != nil { + fmt.Println("[WARN] gzip not found - using uncompressed backup") + useGzip = false + filename = filepath.Join(cfg.BackupDir, + fmt.Sprintf("%s_pre_%s.sql", timestamp, operation)) + } + + // Build pg_dump command + var cmd *exec.Cmd + if useGzip { + // Use shell to pipe pg_dump through gzip + pgDumpCmd := fmt.Sprintf( + "pg_dump -h %s -p %d -U %s -d %s --no-owner --no-acl --clean --if-exists | gzip > %s", + cfg.Host, + cfg.Port, + cfg.User, + cfg.DB, + filename, + ) + cmd = exec.CommandContext(ctx, "sh", "-c", pgDumpCmd) + } else { + cmd = exec.CommandContext(ctx, "pg_dump", + "-h", cfg.Host, + "-p", fmt.Sprint(cfg.Port), + "-U", cfg.User, + "-d", cfg.DB, + "-f", filename, + "--no-owner", + "--no-acl", + "--clean", + "--if-exists", + ) + } + + // Set password via environment variable + cmd.Env = append(os.Environ(), + fmt.Sprintf("PGPASSWORD=%s", cfg.Password)) + + // Run backup + if err := cmd.Run(); err != nil { + return "", errors.Wrap(err, "pg_dump failed") + } + + // Get file size for logging + info, err := os.Stat(filename) + if err != nil { + return filename, errors.Wrap(err, "stat backup file") + } + + sizeMB := float64(info.Size()) / 1024 / 1024 + fmt.Printf("[INFO] Backup created: %s (%.2f MB)\n", filename, sizeMB) + + return filename, nil +} + +// CleanOldBackups keeps only the N most recent backups +func CleanOldBackups(cfg *Config, keepCount int) error { + // Get all backup files (both .sql and .sql.gz) + sqlFiles, err := filepath.Glob(filepath.Join(cfg.BackupDir, "*.sql")) + if err != nil { + return errors.Wrap(err, "failed to list .sql backups") + } + + gzFiles, err := filepath.Glob(filepath.Join(cfg.BackupDir, "*.sql.gz")) + if err != nil { + return errors.Wrap(err, "failed to list .sql.gz backups") + } + + files := append(sqlFiles, gzFiles...) + + if len(files) <= keepCount { + return nil // Nothing to clean + } + + // Sort files by modification time (newest first) + sort.Slice(files, func(i, j int) bool { + iInfo, _ := os.Stat(files[i]) + jInfo, _ := os.Stat(files[j]) + return iInfo.ModTime().After(jInfo.ModTime()) + }) + + // Delete old backups + for i := keepCount; i < len(files); i++ { + if err := os.Remove(files[i]); err != nil { + fmt.Printf("[WARN] Failed to remove old backup %s: %v\n", files[i], err) + } else { + fmt.Printf("[INFO] Removed old backup: %s\n", filepath.Base(files[i])) + } + } + + return nil +} diff --git a/internal/db/config.go b/internal/db/config.go index bc50be7..1549437 100644 --- a/internal/db/config.go +++ b/internal/db/config.go @@ -12,16 +12,22 @@ type Config struct { Port uint16 // ENV DB_PORT: Database port (default: 5432) DB string // ENV DB_NAME: Database name to connect to (required) SSL string // ENV DB_SSL: SSL mode for connection (default: disable) + + // Backup configuration + BackupDir string // ENV DB_BACKUP_DIR: Directory for database backups (default: backups) + BackupRetention int // ENV DB_BACKUP_RETENTION: Number of backups to keep (default: 10) } func ConfigFromEnv() (any, error) { cfg := &Config{ - User: env.String("DB_USER", ""), - Password: env.String("DB_PASSWORD", ""), - Host: env.String("DB_HOST", ""), - Port: env.UInt16("DB_PORT", 5432), - DB: env.String("DB_NAME", ""), - SSL: env.String("DB_SSL", "disable"), + User: env.String("DB_USER", ""), + Password: env.String("DB_PASSWORD", ""), + Host: env.String("DB_HOST", ""), + Port: env.UInt16("DB_PORT", 5432), + DB: env.String("DB_NAME", ""), + SSL: env.String("DB_SSL", "disable"), + BackupDir: env.String("DB_BACKUP_DIR", "backups"), + BackupRetention: env.Int("DB_BACKUP_RETENTION", 10), } // Validate SSL mode @@ -50,6 +56,9 @@ func ConfigFromEnv() (any, error) { if cfg.DB == "" { return nil, errors.New("Envar not set: DB_NAME") } + if cfg.BackupRetention < 1 { + return nil, errors.New("DB_BACKUP_RETENTION must be at least 1") + } return cfg, nil } diff --git a/internal/db/delete.go b/internal/db/delete.go new file mode 100644 index 0000000..501285c --- /dev/null +++ b/internal/db/delete.go @@ -0,0 +1,102 @@ +package db + +import ( + "context" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type deleter[T any] struct { + tx bun.Tx + q *bun.DeleteQuery + resourceID any // Store ID before deletion for audit + audit *AuditMeta + auditInfo *AuditInfo +} + +type systemType interface { + isSystem() bool +} + +func DeleteItem[T any](tx bun.Tx) *deleter[T] { + return &deleter[T]{ + tx: tx, + q: tx.NewDelete(). + Model((*T)(nil)), + } +} + +func (d *deleter[T]) Where(query string, args ...any) *deleter[T] { + d.q = d.q.Where(query, args...) + // Try to capture resource ID from WHERE clause if it's a simple "id = ?" pattern + if query == "id = ?" && len(args) > 0 { + d.resourceID = args[0] + } + return d +} + +// WithAudit enables audit logging for this delete operation +// If the provided *AuditInfo is nil, will use reflection to automatically work out the details +func (d *deleter[T]) WithAudit(meta *AuditMeta, info *AuditInfo) *deleter[T] { + d.audit = meta + d.auditInfo = info + return d +} + +func (d *deleter[T]) Delete(ctx context.Context) error { + result, err := d.q.Exec(ctx) + if err != nil { + return errors.Wrap(err, "bun.DeleteQuery.Exec") + } + rows, err := result.RowsAffected() + if err != nil { + return errors.Wrap(err, "result.RowsAffected") + } + if rows == 0 { + resource := extractResourceType(extractTableName[T]()) + return BadRequestNotFound(resource, "id", d.resourceID) + } + + // Handle audit logging if enabled + if d.audit != nil { + if d.auditInfo == nil { + tableName := extractTableName[T]() + resourceType := extractResourceType(tableName) + action := buildAction(resourceType, "delete") + + d.auditInfo = &AuditInfo{ + Action: action, + ResourceType: resourceType, + ResourceID: d.resourceID, + Details: nil, // Delete doesn't need details + } + } + + err = LogSuccess(ctx, d.tx, d.audit, d.auditInfo) + if err != nil { + return errors.Wrap(err, "LogSuccess") + } + } + + return nil +} + +func DeleteByID[T any](tx bun.Tx, id int) *deleter[T] { + return DeleteItem[T](tx).Where("id = ?", id) +} + +func DeleteWithProtection[T systemType](ctx context.Context, tx bun.Tx, id int, audit *AuditMeta) error { + deleter := DeleteByID[T](tx, id) + item, err := GetByID[T](tx, id).Get(ctx) + if err != nil { + return errors.Wrap(err, "GetByID") + } + if (*item).isSystem() { + return errors.New("record is system protected") + } + if audit != nil { + deleter = deleter.WithAudit(audit, nil) + } + return deleter.Delete(ctx) +} diff --git a/internal/db/discordtokens.go b/internal/db/discordtokens.go index 7cc09d9..229075c 100644 --- a/internal/db/discordtokens.go +++ b/internal/db/discordtokens.go @@ -22,14 +22,14 @@ type DiscordToken struct { // UpdateDiscordToken adds the provided discord token to the database. // If the user already has a token stored, it will replace that token instead. -func (user *User) UpdateDiscordToken(ctx context.Context, tx bun.Tx, token *discord.Token) error { +func (u *User) UpdateDiscordToken(ctx context.Context, tx bun.Tx, token *discord.Token) error { if token == nil { return errors.New("token cannot be nil") } expiresAt := time.Now().Add(time.Duration(token.ExpiresIn) * time.Second).Unix() discordToken := &DiscordToken{ - DiscordID: user.DiscordID, + DiscordID: u.DiscordID, AccessToken: token.AccessToken, RefreshToken: token.RefreshToken, ExpiresAt: expiresAt, @@ -37,30 +37,28 @@ func (user *User) UpdateDiscordToken(ctx context.Context, tx bun.Tx, token *disc TokenType: token.TokenType, } - _, err := tx.NewInsert(). - Model(discordToken). - On("CONFLICT (discord_id) DO UPDATE"). - Set("access_token = EXCLUDED.access_token"). - Set("refresh_token = EXCLUDED.refresh_token"). - Set("expires_at = EXCLUDED.expires_at"). + err := Insert(tx, discordToken). + ConflictUpdate([]string{"discord_id"}, "access_token", "refresh_token", "expires_at"). Exec(ctx) - if err != nil { - return errors.Wrap(err, "tx.NewInsert") + return errors.Wrap(err, "db.Insert") } return nil } // DeleteDiscordTokens deletes a users discord OAuth tokens from the database. // It returns the DiscordToken so that it can be revoked via the discord API -func (user *User) DeleteDiscordTokens(ctx context.Context, tx bun.Tx) (*DiscordToken, error) { - token, err := user.GetDiscordToken(ctx, tx) +func (u *User) DeleteDiscordTokens(ctx context.Context, tx bun.Tx) (*DiscordToken, error) { + token, err := u.GetDiscordToken(ctx, tx) if err != nil { + if IsBadRequest(err) { + return nil, nil // Token doesn't exist - not an error + } return nil, errors.Wrap(err, "user.GetDiscordToken") } _, err = tx.NewDelete(). Model((*DiscordToken)(nil)). - Where("discord_id = ?", user.DiscordID). + Where("discord_id = ?", u.DiscordID). Exec(ctx) if err != nil { return nil, errors.Wrap(err, "tx.NewDelete") @@ -69,25 +67,18 @@ func (user *User) DeleteDiscordTokens(ctx context.Context, tx bun.Tx) (*DiscordT } // GetDiscordToken retrieves the users discord token from the database -func (user *User) GetDiscordToken(ctx context.Context, tx bun.Tx) (*DiscordToken, error) { - token := new(DiscordToken) - err := tx.NewSelect(). - Model(token). - Where("discord_id = ?", user.DiscordID). - Limit(1). - Scan(ctx) - if err != nil { - return nil, errors.Wrap(err, "tx.NewSelect") - } - return token, nil +func (u *User) GetDiscordToken(ctx context.Context, tx bun.Tx) (*DiscordToken, error) { + return GetByField[DiscordToken](tx, "discord_id", u.DiscordID).Get(ctx) } // Convert reverts the token back into a *discord.Token func (t *DiscordToken) Convert() *discord.Token { + expiresIn := t.ExpiresAt - time.Now().Unix() + expiresIn = max(expiresIn, 0) token := &discord.Token{ AccessToken: t.AccessToken, RefreshToken: t.RefreshToken, - ExpiresIn: int(t.ExpiresAt - time.Now().Unix()), + ExpiresIn: int(expiresIn), Scope: t.Scope, TokenType: t.TokenType, } diff --git a/internal/db/doc.go b/internal/db/doc.go new file mode 100644 index 0000000..3557935 --- /dev/null +++ b/internal/db/doc.go @@ -0,0 +1,2 @@ +// Package db is an internal package for all the database models and related methods +package db diff --git a/internal/db/errors.go b/internal/db/errors.go new file mode 100644 index 0000000..f85f44f --- /dev/null +++ b/internal/db/errors.go @@ -0,0 +1,31 @@ +package db + +import ( + "fmt" + "strings" +) + +func IsBadRequest(err error) bool { + return strings.Contains(err.Error(), "bad request:") +} + +func BadRequest(err string) error { + return fmt.Errorf("bad request: %s", err) +} + +func BadRequestNotFound(resource, field string, value any) error { + errStr := fmt.Sprintf("%s with %s=%v not found", resource, field, value) + return BadRequest(errStr) +} + +func BadRequestNotAssociated(parent, child, parentField, childField string, parentID, childID any) error { + errStr := fmt.Sprintf("%s with %s=%v not associated to %s with %s=%v", + child, childField, childID, parent, parentField, parentID) + return BadRequest(errStr) +} + +func BadRequestAssociated(parent, child, parentField, childField string, parentID, childID any) error { + errStr := fmt.Sprintf("%s with %s=%v already associated to %s with %s=%v", + child, childField, childID, parent, parentField, parentID) + return BadRequest(errStr) +} diff --git a/internal/db/fixture.go b/internal/db/fixture.go new file mode 100644 index 0000000..15b0f71 --- /dev/null +++ b/internal/db/fixture.go @@ -0,0 +1,282 @@ +package db + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type Fixture struct { + bun.BaseModel `bun:"table:fixtures,alias:f"` + + ID int `bun:"id,pk,autoincrement"` + SeasonID int `bun:",notnull,unique:round"` + LeagueID int `bun:",notnull,unique:round"` + HomeTeamID int `bun:",notnull,unique:round"` + AwayTeamID int `bun:",notnull,unique:round"` + Round int `bun:"round,unique:round"` + GameWeek *int `bun:"game_week"` + CreatedAt int64 `bun:"created_at,notnull"` + UpdatedAt *int64 `bun:"updated_at"` + + Season *Season `bun:"rel:belongs-to,join:season_id=id"` + League *League `bun:"rel:belongs-to,join:league_id=id"` + HomeTeam *Team `bun:"rel:belongs-to,join:home_team_id=id"` + AwayTeam *Team `bun:"rel:belongs-to,join:away_team_id=id"` +} + +func NewFixture(ctx context.Context, tx bun.Tx, seasonShortName, leagueShortName string, + homeTeamID, awayTeamID, round int, audit *AuditMeta, +) (*Fixture, error) { + season, league, teams, err := GetSeasonLeague(ctx, tx, seasonShortName, leagueShortName) + if err != nil { + return nil, errors.Wrap(err, "GetSeasonLeague") + } + homeTeam, err := GetTeam(ctx, tx, homeTeamID) + if err != nil { + return nil, errors.Wrap(err, "GetTeam") + } + awayTeam, err := GetTeam(ctx, tx, awayTeamID) + if err != nil { + return nil, errors.Wrap(err, "GetTeam") + } + if err = checkTeamsAssociated(season, league, teams, []*Team{homeTeam, awayTeam}); err != nil { + return nil, errors.Wrap(err, "checkTeamsAssociated") + } + fixture := newFixture(season, league, homeTeam, awayTeam, round, time.Now()) + err = Insert(tx, fixture).WithAudit(audit, nil).Exec(ctx) + if err != nil { + return nil, errors.Wrap(err, "Insert") + } + return fixture, nil +} + +func NewRound(ctx context.Context, tx bun.Tx, seasonShortName, leagueShortName string, + round int, audit *AuditMeta, +) ([]*Fixture, error) { + season, league, teams, err := GetSeasonLeague(ctx, tx, seasonShortName, leagueShortName) + if err != nil { + return nil, errors.Wrap(err, "GetSeasonLeague") + } + fixtures := generateRound(season, league, round, teams) + err = InsertMultiple(tx, fixtures).WithAudit(audit, nil).Exec(ctx) + if err != nil { + return nil, errors.Wrap(err, "InsertMultiple") + } + return fixtures, nil +} + +func GetFixtures(ctx context.Context, tx bun.Tx, seasonShortName, leagueShortName string) (*Season, *League, []*Fixture, error) { + season, league, _, err := GetSeasonLeague(ctx, tx, seasonShortName, leagueShortName) + if err != nil { + return nil, nil, nil, errors.Wrap(err, "GetSeasonLeague") + } + fixtures, err := GetList[Fixture](tx). + Where("season_id = ?", season.ID). + Where("league_id = ?", league.ID). + Order("game_week ASC NULLS FIRST", "round ASC", "id ASC"). + Relation("HomeTeam"). + Relation("AwayTeam"). + GetAll(ctx) + if err != nil { + return nil, nil, nil, errors.Wrap(err, "GetList") + } + return season, league, fixtures, nil +} + +func GetFixture(ctx context.Context, tx bun.Tx, id int) (*Fixture, error) { + return GetByID[Fixture](tx, id). + Relation("Season"). + Relation("League"). + Relation("HomeTeam"). + Relation("AwayTeam"). + Get(ctx) +} + +func GetFixturesByGameWeek(ctx context.Context, tx bun.Tx, seasonID, leagueID, gameweek int) ([]*Fixture, error) { + fixtures, err := GetList[Fixture](tx). + Where("season_id = ?", seasonID). + Where("league_id = ?", leagueID). + Where("game_week = ?", gameweek). + Order("round ASC", "id ASC"). + Relation("HomeTeam"). + Relation("AwayTeam"). + GetAll(ctx) + if err != nil { + return nil, errors.Wrap(err, "GetList") + } + return fixtures, nil +} + +func GetUnallocatedFixtures(ctx context.Context, tx bun.Tx, seasonID, leagueID int) ([]*Fixture, error) { + fixtures, err := GetList[Fixture](tx). + Where("season_id = ?", seasonID). + Where("league_id = ?", leagueID). + Where("game_week IS NULL"). + Order("round ASC", "id ASC"). + Relation("HomeTeam"). + Relation("AwayTeam"). + GetAll(ctx) + if err != nil { + return nil, errors.Wrap(err, "GetList") + } + return fixtures, nil +} + +func CountUnallocatedFixtures(ctx context.Context, tx bun.Tx, seasonID, leagueID int) (int, error) { + count, err := GetList[Fixture](tx). + Where("season_id = ?", seasonID). + Where("league_id = ?", leagueID). + Where("game_week IS NULL"). + Count(ctx) + if err != nil { + return 0, errors.Wrap(err, "GetList") + } + return count, nil +} + +func GetMaxGameWeek(ctx context.Context, tx bun.Tx, seasonID, leagueID int) (int, error) { + var maxGameWeek int + err := tx.NewSelect(). + Model((*Fixture)(nil)). + Column("game_week"). + Where("season_id = ?", seasonID). + Where("league_id = ?", leagueID). + Order("game_week DESC NULLS LAST"). + Limit(1).Scan(ctx, &maxGameWeek) + if err != nil { + return 0, errors.Wrap(err, "tx.NewSelect") + } + return maxGameWeek, nil +} + +func UpdateFixtureGameWeeks(ctx context.Context, tx bun.Tx, fixtures []*Fixture, audit *AuditMeta) error { + details := []any{} + for _, fixture := range fixtures { + err := UpdateByID(tx, fixture.ID, fixture). + Column("game_week"). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "UpdateByID") + } + details = append(details, map[string]any{"fixture_id": fixture.ID, "game_week": fixture.GameWeek}) + } + info := &AuditInfo{ + "fixtures.manage", + "fixture", + "multiple", + map[string]any{"updated": details}, + } + err := LogSuccess(ctx, tx, audit, info) + if err != nil { + return errors.Wrap(err, "LogSuccess") + } + return nil +} + +func DeleteAllFixtures(ctx context.Context, tx bun.Tx, seasonShortName, leagueShortName string, audit *AuditMeta) error { + season, league, _, err := GetSeasonLeague(ctx, tx, seasonShortName, leagueShortName) + if err != nil { + return errors.Wrap(err, "GetSeasonLeague") + } + err = DeleteItem[Fixture](tx). + Where("season_id = ?", season.ID). + Where("league_id = ?", league.ID). + WithAudit(audit, nil). + Delete(ctx) + if err != nil { + return errors.Wrap(err, "DeleteItem") + } + return nil +} + +func DeleteFixture(ctx context.Context, tx bun.Tx, id int, audit *AuditMeta) error { + err := DeleteByID[Fixture](tx, id). + WithAudit(audit, nil). + Delete(ctx) + if err != nil { + return errors.Wrap(err, "DeleteByID") + } + return nil +} + +func newFixture(season *Season, league *League, homeTeam, awayTeam *Team, round int, created time.Time) *Fixture { + return &Fixture{ + SeasonID: season.ID, + LeagueID: league.ID, + HomeTeamID: homeTeam.ID, + AwayTeamID: awayTeam.ID, + Round: round, + CreatedAt: created.Unix(), + } +} + +func checkTeamsAssociated(season *Season, league *League, teamsIn []*Team, toCheck []*Team) error { + badIDs := []string{} + master := map[int]bool{} + for _, team := range teamsIn { + master[team.ID] = true + } + for _, team := range toCheck { + if !master[team.ID] { + badIDs = append(badIDs, strconv.Itoa(team.ID)) + } + } + ids := strings.Join(badIDs, ",") + if len(ids) > 0 { + return BadRequestNotAssociated("season_league", "team", + "season_id,league_id", "ids", + fmt.Sprintf("%v,%v", season.ID, league.ID), + ids) + } + return nil +} + +type versus struct { + homeTeam *Team + awayTeam *Team +} + +func generateRound(season *Season, league *League, round int, teams []*Team) []*Fixture { + now := time.Now() + numTeams := len(teams) + numGames := numTeams * (numTeams - 1) / 2 + fixtures := make([]*Fixture, numGames) + for i, matchup := range allTeamsPlay(teams, round) { + fixtures[i] = newFixture(season, league, matchup.homeTeam, matchup.awayTeam, round, now) + } + return fixtures +} + +func allTeamsPlay(teams []*Team, round int) []*versus { + matchups := []*versus{} + if len(teams) < 2 { + return matchups + } + team1 := teams[0] + teams = teams[1:] + matchups = append(matchups, playOtherTeams(team1, teams, round)...) + matchups = append(matchups, allTeamsPlay(teams, round)...) + return matchups +} + +func playOtherTeams(team *Team, teams []*Team, round int) []*versus { + matchups := make([]*versus, len(teams)) + for i, opponent := range teams { + versus := &versus{} + if i%2+round%2 == 0 { + versus.homeTeam = team + versus.awayTeam = opponent + } else { + versus.homeTeam = opponent + versus.awayTeam = team + } + matchups[i] = versus + } + return matchups +} diff --git a/internal/db/getbyfield.go b/internal/db/getbyfield.go new file mode 100644 index 0000000..7a86944 --- /dev/null +++ b/internal/db/getbyfield.go @@ -0,0 +1,70 @@ +package db + +import ( + "context" + "database/sql" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type fieldgetter[T any] struct { + q *bun.SelectQuery + field string + value any + model *T +} + +func (g *fieldgetter[T]) get(ctx context.Context) (*T, error) { + if g.field == "id" && (g.value).(int) < 1 { + return nil, errors.New("invalid id") + } + err := g.q. + Where("? = ?", bun.Ident(g.field), g.value). + Scan(ctx) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + resource := extractResourceType(extractTableName[T]()) + return nil, BadRequestNotFound(resource, g.field, g.value) + } + return nil, errors.Wrap(err, "bun.SelectQuery.Scan") + } + return g.model, nil +} + +func (g *fieldgetter[T]) Get(ctx context.Context) (*T, error) { + g.q = g.q.Limit(1) + return g.get(ctx) +} + +func (g *fieldgetter[T]) Relation(name string, apply ...func(*bun.SelectQuery) *bun.SelectQuery) *fieldgetter[T] { + g.q = g.q.Relation(name, apply...) + return g +} + +func (g *fieldgetter[T]) Join(join string, args ...any) *fieldgetter[T] { + g.q = g.q.Join(join, args...) + return g +} + +// GetByField retrieves a single record by field name +func GetByField[T any]( + tx bun.Tx, + field string, + value any, +) *fieldgetter[T] { + model := new(T) + return &fieldgetter[T]{ + tx.NewSelect().Model(model), + field, + value, + model, + } +} + +func GetByID[T any]( + tx bun.Tx, + id int, +) *fieldgetter[T] { + return GetByField[T](tx, "id", id) +} diff --git a/internal/db/getlist.go b/internal/db/getlist.go new file mode 100644 index 0000000..52033ae --- /dev/null +++ b/internal/db/getlist.go @@ -0,0 +1,152 @@ +package db + +import ( + "context" + "database/sql" + "fmt" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type listgetter[T any] struct { + q *bun.SelectQuery + items *[]*T +} + +type List[T any] struct { + Items []*T + Total int + PageOpts PageOpts +} + +type Filter struct { + Field string + Value any + Comparator Comparator +} + +type Comparator string + +const ( + Equal Comparator = "=" + Less Comparator = "<" + LessEqual Comparator = "<=" + Greater Comparator = ">" + GreaterEqual Comparator = ">=" + In Comparator = "IN" +) + +type ListFilter struct { + filters []Filter +} + +func NewListFilter() *ListFilter { + return &ListFilter{[]Filter{}} +} + +func (f *ListFilter) Equals(field string, value any) { + f.filters = append(f.filters, Filter{field, value, Equal}) +} + +func (f *ListFilter) LessThan(field string, value any) { + f.filters = append(f.filters, Filter{field, value, Less}) +} + +func (f *ListFilter) LessEqualThan(field string, value any) { + f.filters = append(f.filters, Filter{field, value, LessEqual}) +} + +func (f *ListFilter) GreaterThan(field string, value any) { + f.filters = append(f.filters, Filter{field, value, Greater}) +} + +func (f *ListFilter) GreaterEqualThan(field string, value any) { + f.filters = append(f.filters, Filter{field, value, GreaterEqual}) +} + +func (f *ListFilter) In(field string, values any) { + f.filters = append(f.filters, Filter{field, values, In}) +} + +func GetList[T any](tx bun.Tx) *listgetter[T] { + l := &listgetter[T]{ + items: new([]*T), + } + l.q = tx.NewSelect(). + Model(l.items) + return l +} + +func (l *listgetter[T]) String() string { + return l.q.String() +} + +func (l *listgetter[T]) Join(join string, args ...any) *listgetter[T] { + l.q = l.q.Join(join, args...) + return l +} + +func (l *listgetter[T]) Where(query string, args ...any) *listgetter[T] { + l.q = l.q.Where(query, args...) + return l +} + +func (l *listgetter[T]) Order(orders ...string) *listgetter[T] { + l.q = l.q.Order(orders...) + return l +} + +func (l *listgetter[T]) Relation(name string, apply ...func(*bun.SelectQuery) *bun.SelectQuery) *listgetter[T] { + l.q = l.q.Relation(name, apply...) + return l +} + +func (l *listgetter[T]) Filter(filters ...Filter) *listgetter[T] { + for _, filter := range filters { + if filter.Comparator == In { + l.q = l.q.Where("? IN (?)", bun.Ident(filter.Field), bun.In(filter.Value)) + } else { + l.q = l.q.Where("? ? ?", bun.Ident(filter.Field), bun.Safe(filter.Comparator), filter.Value) + } + } + fmt.Println(l.q.String()) + return l +} + +func (l *listgetter[T]) GetPaged(ctx context.Context, pageOpts, defaults *PageOpts) (*List[T], error) { + if defaults == nil { + return nil, errors.New("default pageopts is nil") + } + total, err := l.q.Count(ctx) + if err != nil { + return nil, errors.Wrap(err, "query.Count") + } + l.q, pageOpts = setPageOpts(l.q, pageOpts, defaults, total) + err = l.q.Scan(ctx) + if err != nil && errors.Is(err, sql.ErrNoRows) { + return nil, errors.Wrap(err, "query.Scan") + } + list := &List[T]{ + Items: *l.items, + Total: total, + PageOpts: *pageOpts, + } + return list, nil +} + +func (l *listgetter[T]) Count(ctx context.Context) (int, error) { + count, err := l.q.Count(ctx) + if err != nil { + return 0, errors.Wrap(err, "query.Count") + } + return count, nil +} + +func (l *listgetter[T]) GetAll(ctx context.Context) ([]*T, error) { + err := l.q.Scan(ctx) + if err != nil && errors.Is(err, sql.ErrNoRows) { + return nil, errors.Wrap(err, "query.Scan") + } + return *l.items, nil +} diff --git a/internal/db/insert.go b/internal/db/insert.go new file mode 100644 index 0000000..9b1a951 --- /dev/null +++ b/internal/db/insert.go @@ -0,0 +1,123 @@ +package db + +import ( + "context" + "fmt" + "strings" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type inserter[T any] struct { + tx bun.Tx + q *bun.InsertQuery + model *T + models []*T + isBulk bool + audit *AuditMeta + auditInfo *AuditInfo +} + +// Insert creates an inserter for a single model +// The model will have all fields populated after Exec() via Returning("*") +func Insert[T any](tx bun.Tx, model *T) *inserter[T] { + if model == nil { + panic("model cannot be nil") + } + return &inserter[T]{ + tx: tx, + q: tx.NewInsert().Model(model).Returning("*"), + model: model, + isBulk: false, + } +} + +// InsertMultiple creates an inserter for bulk insert +// All models will have fields populated after Exec() via Returning("*") +func InsertMultiple[T any](tx bun.Tx, models []*T) *inserter[T] { + if len(models) == 0 { + panic("models cannot be nil or empty") + } + return &inserter[T]{ + tx: tx, + q: tx.NewInsert().Model(&models).Returning("*"), + models: models, + isBulk: true, + } +} + +func (i *inserter[T]) ConflictNothing(conflicts ...string) *inserter[T] { + fieldstr := strings.Join(conflicts, ", ") + i.q = i.q.On(fmt.Sprintf("CONFLICT (%s) DO NOTHING", fieldstr)) + return i +} + +func (i *inserter[T]) ConflictUpdate(conflicts []string, columns ...string) *inserter[T] { + fieldstr := strings.Join(conflicts, ", ") + i.q = i.q.On(fmt.Sprintf("CONFLICT (%s) DO UPDATE", fieldstr)) + for _, column := range columns { + i.q = i.q.Set(fmt.Sprintf("%s = EXCLUDED.%s", column, column)) + } + return i +} + +// Returning overrides the default Returning("*") clause +// Example: .Returning("id", "created_at") +func (i *inserter[T]) Returning(columns ...string) *inserter[T] { + if len(columns) == 0 { + return i + } + // Build column list as single string + columnList := strings.Join(columns, ", ") + i.q = i.q.Returning(columnList) + return i +} + +// WithAudit enables audit logging for this insert operation +// If the provided *AuditInfo is nil, will use reflection to automatically work out the details +func (i *inserter[T]) WithAudit(meta *AuditMeta, info *AuditInfo) *inserter[T] { + i.audit = meta + i.auditInfo = info + return i +} + +// Exec executes the insert and optionally logs to audit +// Returns an error if insert fails or if audit callback fails (triggering rollback) +func (i *inserter[T]) Exec(ctx context.Context) error { + // Execute insert + _, err := i.q.Exec(ctx) + if err != nil { + return errors.Wrap(err, "bun.InsertQuery.Exec") + } + + // Handle audit logging if enabled + if i.audit != nil { + if i.auditInfo == nil { + tableName := extractTableName[T]() + resourceType := extractResourceType(tableName) + action := buildAction(resourceType, "create") + i.auditInfo = &AuditInfo{ + Action: action, + ResourceType: resourceType, + ResourceID: nil, + Details: nil, + } + if i.isBulk { + i.auditInfo.Details = map[string]any{ + "count": len(i.models), + } + } else { + i.auditInfo.ResourceID = extractPrimaryKey(i.model) + i.auditInfo.Details = i.model + } + } + + err = LogSuccess(ctx, i.tx, i.audit, i.auditInfo) + if err != nil { + return errors.Wrap(err, "LogSuccess") + } + } + + return nil +} diff --git a/internal/db/isunique.go b/internal/db/isunique.go new file mode 100644 index 0000000..1f9ba01 --- /dev/null +++ b/internal/db/isunique.go @@ -0,0 +1,19 @@ +package db + +import ( + "context" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +func IsUnique(ctx context.Context, tx bun.Tx, model any, field, value string) (bool, error) { + count, err := tx.NewSelect(). + Model(model). + Where("? = ?", bun.Ident(field), value). + Count(ctx) + if err != nil { + return false, errors.Wrap(err, "tx.NewSelect") + } + return count == 0, nil +} diff --git a/internal/db/league.go b/internal/db/league.go new file mode 100644 index 0000000..edef8d7 --- /dev/null +++ b/internal/db/league.go @@ -0,0 +1,45 @@ +package db + +import ( + "context" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type League struct { + bun.BaseModel `bun:"table:leagues,alias:l"` + + ID int `bun:"id,pk,autoincrement" json:"id"` + Name string `bun:"name,unique,notnull" json:"name"` + ShortName string `bun:"short_name,unique,notnull" json:"short_name"` + Description string `bun:"description" json:"description"` + + Seasons []Season `bun:"m2m:season_leagues,join:League=Season" json:"-"` + Teams []Team `bun:"m2m:team_participations,join:League=Team" json:"-"` +} + +func GetLeagues(ctx context.Context, tx bun.Tx) ([]*League, error) { + return GetList[League](tx).Relation("Seasons").GetAll(ctx) +} + +func GetLeague(ctx context.Context, tx bun.Tx, shortname string) (*League, error) { + if shortname == "" { + return nil, errors.New("shortname cannot be empty") + } + return GetByField[League](tx, "short_name", shortname).Relation("Seasons").Get(ctx) +} + +func NewLeague(ctx context.Context, tx bun.Tx, name, shortname, description string, audit *AuditMeta) (*League, error) { + league := &League{ + Name: name, + ShortName: shortname, + Description: description, + } + err := Insert(tx, league). + WithAudit(audit, nil).Exec(ctx) + if err != nil { + return nil, errors.Wrap(err, "db.Insert") + } + return league, nil +} diff --git a/internal/db/migrate/migrate.go b/internal/db/migrate/migrate.go new file mode 100644 index 0000000..4393e3d --- /dev/null +++ b/internal/db/migrate/migrate.go @@ -0,0 +1,511 @@ +// Package migrate provides functions for managing database migrations +package migrate + +import ( + "bufio" + "context" + "fmt" + "os" + "os/exec" + "strconv" + "strings" + "text/tabwriter" + "time" + + "git.haelnorr.com/h/oslstats/internal/config" + "git.haelnorr.com/h/oslstats/internal/db" + "git.haelnorr.com/h/oslstats/internal/db/migrations" + "github.com/pkg/errors" + "github.com/uptrace/bun/migrate" +) + +// RunMigrations executes database migrations +func RunMigrations(ctx context.Context, cfg *config.Config, command string, countStr string) error { + conn := db.NewDB(cfg.DB) + defer func() { _ = conn.Close() }() + + migrator := migrate.NewMigrator(conn.DB, migrations.Migrations) + + // Initialize migration tables + if err := migrator.Init(ctx); err != nil { + return errors.Wrap(err, "migrator.Init") + } + + switch command { + case "up": + err := migrateUp(ctx, migrator, conn, cfg, countStr) + if err != nil { + // On error, automatically rollback the migrations that were just applied + fmt.Println("[WARN] Migration failed, attempting automatic rollback...") + // We need to figure out how many migrations were applied in this batch + // For now, we'll skip automatic rollback since it's complex with the new count system + // The user can manually rollback if needed + return err + } + return err + case "rollback": + return migrateRollback(ctx, migrator, conn, cfg, countStr) + case "status": + return migrateStatus(ctx, migrator) + default: + return fmt.Errorf("unknown migration command: %s", command) + } +} + +// migrateUp runs pending migrations +func migrateUp(ctx context.Context, migrator *migrate.Migrator, conn *db.DB, cfg *config.Config, countStr string) error { + // Parse count parameter + count, all, err := parseMigrationCount(countStr) + if err != nil { + return errors.Wrap(err, "parse migration count") + } + + fmt.Println("[INFO] Step 1/5: Validating migrations...") + if err := validateMigrations(ctx); err != nil { + return err + } + fmt.Println("[INFO] Migration validation passed ✓") + + fmt.Println("[INFO] Step 2/5: Checking for pending migrations...") + // Check for pending migrations using MigrationsWithStatus (read-only) + ms, err := migrator.MigrationsWithStatus(ctx) + if err != nil { + return errors.Wrap(err, "get migration status") + } + + unapplied := ms.Unapplied() + if len(unapplied) == 0 { + fmt.Println("[INFO] No pending migrations") + return nil + } + + // Select which migrations to apply + toApply := selectMigrationsToApply(unapplied, count, all) + if len(toApply) == 0 { + fmt.Println("[INFO] No migrations to run") + return nil + } + + // Print what we're about to do + if all { + fmt.Printf("[INFO] Running all %d pending migration(s):\n", len(toApply)) + } else { + fmt.Printf("[INFO] Running %d migration(s):\n", len(toApply)) + } + for _, m := range toApply { + fmt.Printf(" 📋 %s\n", m.Name) + } + + // Create backup unless --no-backup flag is set + if !cfg.Flags.MigrateNoBackup { + fmt.Println("[INFO] Step 3/5: Creating backup...") + _, err := db.CreateBackup(ctx, cfg.DB, "migration") + if err != nil { + return errors.Wrap(err, "create backup") + } + + // Clean old backups + if err := db.CleanOldBackups(cfg.DB, cfg.DB.BackupRetention); err != nil { + fmt.Printf("[WARN] Failed to clean old backups: %v\n", err) + } + } else { + fmt.Println("[INFO] Step 3/5: Skipping backup (--no-backup flag set)") + } + + // Acquire migration lock + fmt.Println("[INFO] Step 4/5: Acquiring migration lock...") + if err := acquireMigrationLock(ctx, conn); err != nil { + return errors.Wrap(err, "acquire migration lock") + } + defer releaseMigrationLock(ctx, conn) + fmt.Println("[INFO] Migration lock acquired") + + // Run migrations + fmt.Println("[INFO] Step 5/5: Applying migrations...") + group, err := executeUpMigrations(ctx, migrator, toApply) + if err != nil { + return errors.Wrap(err, "execute migrations") + } + + if group.IsZero() { + fmt.Println("[INFO] No migrations to run") + return nil + } + + fmt.Printf("[INFO] Migrated to group %d\n", group.ID) + for _, migration := range group.Migrations { + fmt.Printf(" ✅ %s\n", migration.Name) + } + + return nil +} + +// migrateRollback rolls back migrations +func migrateRollback(ctx context.Context, migrator *migrate.Migrator, conn *db.DB, cfg *config.Config, countStr string) error { + // Parse count parameter + count, all, err := parseMigrationCount(countStr) + if err != nil { + return errors.Wrap(err, "parse migration count") + } + + // Get all migrations with status + ms, err := migrator.MigrationsWithStatus(ctx) + if err != nil { + return errors.Wrap(err, "get migration status") + } + + applied := ms.Applied() + if len(applied) == 0 { + fmt.Println("[INFO] No migrations to rollback") + return nil + } + + // Select which migrations to rollback + toRollback := selectMigrationsToRollback(applied, count, all) + if len(toRollback) == 0 { + fmt.Println("[INFO] No migrations to rollback") + return nil + } + + // Print what we're about to do + if all { + fmt.Printf("[INFO] Rolling back all %d migration(s):\n", len(toRollback)) + } else { + fmt.Printf("[INFO] Rolling back %d migration(s):\n", len(toRollback)) + } + for _, m := range toRollback { + fmt.Printf(" 📋 %s (group %d)\n", m.Name, m.GroupID) + } + + // Create backup unless --no-backup flag is set + if !cfg.Flags.MigrateNoBackup { + fmt.Println("[INFO] Creating backup before rollback...") + _, err := db.CreateBackup(ctx, cfg.DB, "rollback") + if err != nil { + return errors.Wrap(err, "create backup") + } + + // Clean old backups + if err := db.CleanOldBackups(cfg.DB, cfg.DB.BackupRetention); err != nil { + fmt.Printf("[WARN] Failed to clean old backups: %v\n", err) + } + } else { + fmt.Println("[INFO] Skipping backup (--no-backup flag set)") + } + + // Acquire migration lock + fmt.Println("[INFO] Acquiring migration lock...") + if err := acquireMigrationLock(ctx, conn); err != nil { + return errors.Wrap(err, "acquire migration lock") + } + defer releaseMigrationLock(ctx, conn) + fmt.Println("[INFO] Migration lock acquired") + + // Rollback + fmt.Println("[INFO] Executing rollback...") + rolledBack, err := executeDownMigrations(ctx, migrator, toRollback) + if err != nil { + return errors.Wrap(err, "execute rollback") + } + + fmt.Printf("[INFO] Successfully rolled back %d migration(s)\n", len(rolledBack)) + for _, migration := range rolledBack { + fmt.Printf(" ↩️ %s\n", migration.Name) + } + + return nil +} + +// migrateStatus shows migration status +func migrateStatus(ctx context.Context, migrator *migrate.Migrator) error { + ms, err := migrator.MigrationsWithStatus(ctx) + if err != nil { + return errors.Wrap(err, "get migration status") + } + + fmt.Println("╔══════════════════════════════════════════════════════════╗") + fmt.Println("║ DATABASE MIGRATION STATUS ║") + fmt.Println("╚══════════════════════════════════════════════════════════╝") + + w := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0) + _, _ = fmt.Fprintln(w, "STATUS\tMIGRATION\tGROUP\tCOMMENT") + _, _ = fmt.Fprintln(w, "----------\t---------------\t-----\t---------------------------") + + appliedCount := 0 + for _, m := range ms { + status := "⏳ Pending" + group := "-" + + if m.GroupID > 0 { + status = "✅ Applied" + appliedCount++ + group = fmt.Sprint(m.GroupID) + } + + _, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%s\n", status, m.Name, group, m.Comment) + } + + _ = w.Flush() + + fmt.Printf("\n📊 Summary: %d applied, %d pending\n\n", + appliedCount, len(ms)-appliedCount) + + return nil +} + +// validateMigrations ensures migrations compile before running +func validateMigrations(ctx context.Context) error { + cmd := exec.CommandContext(ctx, "go", "build", + "-o", "/dev/null", "./internal/db/migrations") + + output, err := cmd.CombinedOutput() + if err != nil { + fmt.Println("[ERROR] Migration validation failed!") + fmt.Println(string(output)) + return errors.Wrap(err, "migration build failed") + } + + return nil +} + +// acquireMigrationLock prevents concurrent migrations using PostgreSQL advisory lock +func acquireMigrationLock(ctx context.Context, conn *db.DB) error { + const lockID = 1234567890 // Arbitrary unique ID for migration lock + const timeoutSeconds = 300 // 5 minutes + + // Set statement timeout for this session + _, err := conn.ExecContext(ctx, + fmt.Sprintf("SET statement_timeout = '%ds'", timeoutSeconds)) + if err != nil { + return errors.Wrap(err, "set timeout") + } + + var acquired bool + err = conn.NewRaw("SELECT pg_try_advisory_lock(?)", lockID). + Scan(ctx, &acquired) + if err != nil { + return errors.Wrap(err, "pg_try_advisory_lock") + } + + if !acquired { + return errors.New("migration already in progress (could not acquire lock)") + } + + return nil +} + +// releaseMigrationLock releases the migration lock +func releaseMigrationLock(ctx context.Context, conn *db.DB) { + const lockID = 1234567890 + + _, err := conn.NewRaw("SELECT pg_advisory_unlock(?)", lockID).Exec(ctx) + if err != nil { + fmt.Printf("[WARN] Failed to release migration lock: %v\n", err) + } else { + fmt.Println("[INFO] Migration lock released") + } +} + +// CreateMigration generates a new migration file +func CreateMigration(name string) error { + if name == "" { + return errors.New("migration name cannot be empty") + } + + // Sanitize name (replace spaces with underscores, lowercase) + name = strings.ToLower(strings.ReplaceAll(name, " ", "_")) + + // Generate timestamp + timestamp := time.Now().Format("20060102150405") + filename := fmt.Sprintf("internal/db/migrations/%s_%s.go", timestamp, name) + + // Template + template := `package migrations + +import ( + "context" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister( + // UP migration + func(ctx context.Context, conn *bun.DB) error { + // Add your migration code here + return nil + }, + // DOWN migration + func(ctx context.Context, conn *bun.DB) error { + // Add your rollback code here + return nil + }, + ) +} +` + + // Write file + if err := os.WriteFile(filename, []byte(template), 0o644); err != nil { + return errors.Wrap(err, "write migration file") + } + + fmt.Printf("✅ Created migration: %s\n", filename) + fmt.Println("📝 Next steps:") + fmt.Println(" 1. Edit the file and implement the UP and DOWN functions") + fmt.Println(" 2. Run: just migrate up") + + return nil +} + +// parseMigrationCount parses a migration count string +// Returns: (count, all, error) +// - "" (empty) → (1, false, nil) - default to 1 +// - "all" → (0, true, nil) - special case for all +// - "5" → (5, false, nil) - specific count +// - "invalid" → (0, false, error) +func parseMigrationCount(value string) (int, bool, error) { + // Default to 1 if empty + if value == "" { + return 1, false, nil + } + + // Special case for "all" + if value == "all" { + return 0, true, nil + } + + // Parse as integer + count, err := strconv.Atoi(value) + if err != nil { + return 0, false, errors.New("migration count must be a positive integer or 'all'") + } + if count < 1 { + return 0, false, errors.New("migration count must be a positive integer (1 or greater)") + } + + return count, false, nil +} + +// selectMigrationsToApply returns the subset of unapplied migrations to run +func selectMigrationsToApply(unapplied migrate.MigrationSlice, count int, all bool) migrate.MigrationSlice { + if all { + return unapplied + } + + count = min(count, len(unapplied)) + return unapplied[:count] +} + +// selectMigrationsToRollback returns the subset of applied migrations to rollback +// Returns migrations in reverse chronological order (most recent first) +func selectMigrationsToRollback(applied migrate.MigrationSlice, count int, all bool) migrate.MigrationSlice { + if len(applied) == 0 || all { + return applied + } + count = min(count, len(applied)) + return applied[:count] +} + +// executeUpMigrations executes a subset of UP migrations +func executeUpMigrations(ctx context.Context, migrator *migrate.Migrator, migrations migrate.MigrationSlice) (*migrate.MigrationGroup, error) { + if len(migrations) == 0 { + return &migrate.MigrationGroup{}, nil + } + + // Get the next group ID + ms, err := migrator.MigrationsWithStatus(ctx) + if err != nil { + return nil, errors.Wrap(err, "get migration status") + } + + lastGroup := ms.LastGroup() + groupID := int64(1) + if lastGroup.ID > 0 { + groupID = lastGroup.ID + 1 + } + + // Create the migration group + group := &migrate.MigrationGroup{ + ID: groupID, + Migrations: make(migrate.MigrationSlice, 0, len(migrations)), + } + + // Execute each migration + for i := range migrations { + migration := &migrations[i] + migration.GroupID = groupID + + // Mark as applied before execution (Bun's default behavior) + if err := migrator.MarkApplied(ctx, migration); err != nil { + return group, errors.Wrap(err, "mark applied") + } + + // Add to group + group.Migrations = append(group.Migrations, *migration) + + // Execute the UP function + if migration.Up != nil { + if err := migration.Up(ctx, migrator, migration); err != nil { + return group, errors.Wrap(err, fmt.Sprintf("migration %s failed", migration.Name)) + } + } + } + + return group, nil +} + +// executeDownMigrations executes a subset of DOWN migrations +func executeDownMigrations(ctx context.Context, migrator *migrate.Migrator, migrations migrate.MigrationSlice) (migrate.MigrationSlice, error) { + rolledBack := make(migrate.MigrationSlice, 0, len(migrations)) + + // Execute each migration in order (already reversed) + for i := range migrations { + migration := &migrations[i] + + // Execute the DOWN function + if migration.Down != nil { + if err := migration.Down(ctx, migrator, migration); err != nil { + return rolledBack, errors.Wrap(err, fmt.Sprintf("rollback %s failed", migration.Name)) + } + } + + // Mark as unapplied after execution + if err := migrator.MarkUnapplied(ctx, migration); err != nil { + return rolledBack, errors.Wrap(err, "mark unapplied") + } + + rolledBack = append(rolledBack, *migration) + } + + return rolledBack, nil +} + +// ResetDatabase drops and recreates all tables (destructive) +func ResetDatabase(ctx context.Context, cfg *config.Config) error { + fmt.Println("⚠️ WARNING - This will DELETE ALL DATA in the database!") + fmt.Print("Type 'yes' to continue: ") + + reader := bufio.NewReader(os.Stdin) + response, err := reader.ReadString('\n') + if err != nil { + return errors.Wrap(err, "read input") + } + + response = strings.TrimSpace(response) + if response != "yes" { + fmt.Println("❌ Reset cancelled") + return nil + } + conn := db.NewDB(cfg.DB) + defer func() { _ = conn.Close() }() + + models := conn.RegisterModels() + + for _, model := range models { + if err := conn.ResetModel(ctx, model); err != nil { + return errors.Wrap(err, "reset model") + } + } + + fmt.Println("✅ Database reset complete") + return nil +} diff --git a/internal/db/migrations/20250124000001_initial_schema.go b/internal/db/migrations/20250124000001_initial_schema.go new file mode 100644 index 0000000..f0ba125 --- /dev/null +++ b/internal/db/migrations/20250124000001_initial_schema.go @@ -0,0 +1,47 @@ +package migrations + +import ( + "context" + + "git.haelnorr.com/h/oslstats/internal/db" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister( + // UP: Create initial tables (users, discord_tokens) + func(ctx context.Context, conn *bun.DB) error { + // Create users table + _, err := conn.NewCreateTable(). + Model((*db.User)(nil)). + Exec(ctx) + if err != nil { + return err + } + + // Create discord_tokens table + _, err = conn.NewCreateTable(). + Model((*db.DiscordToken)(nil)). + Exec(ctx) + return err + }, + // DOWN: Drop tables in reverse order + func(ctx context.Context, conn *bun.DB) error { + // Drop discord_tokens first (has foreign key to users) + _, err := conn.NewDropTable(). + Model((*db.DiscordToken)(nil)). + IfExists(). + Exec(ctx) + if err != nil { + return err + } + + // Drop users table + _, err = conn.NewDropTable(). + Model((*db.User)(nil)). + IfExists(). + Exec(ctx) + return err + }, + ) +} diff --git a/internal/db/migrations/20260127194815_seasons.go b/internal/db/migrations/20260127194815_seasons.go new file mode 100644 index 0000000..49ac8ca --- /dev/null +++ b/internal/db/migrations/20260127194815_seasons.go @@ -0,0 +1,34 @@ +package migrations + +import ( + "context" + + "git.haelnorr.com/h/oslstats/internal/db" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister( + // UP migration + func(ctx context.Context, conn *bun.DB) error { + _, err := conn.NewCreateTable(). + Model((*db.Season)(nil)). + Exec(ctx) + if err != nil { + return err + } + return nil + }, + // DOWN migration + func(ctx context.Context, conn *bun.DB) error { + _, err := conn.NewDropTable(). + Model((*db.Season)(nil)). + IfExists(). + Exec(ctx) + if err != nil { + return err + } + return nil + }, + ) +} diff --git a/internal/db/migrations/20260202231414_add_rbac_system.go b/internal/db/migrations/20260202231414_add_rbac_system.go new file mode 100644 index 0000000..92fb60d --- /dev/null +++ b/internal/db/migrations/20260202231414_add_rbac_system.go @@ -0,0 +1,253 @@ +package migrations + +import ( + "context" + "time" + + "git.haelnorr.com/h/oslstats/internal/db" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister( + // UP migration + func(ctx context.Context, conn *bun.DB) error { + conn.RegisterModel((*db.RolePermission)(nil), (*db.UserRole)(nil)) + // Create permissions table + _, err := conn.NewCreateTable(). + Model((*db.Role)(nil)). + Exec(ctx) + if err != nil { + return err + } + + // Create permissions table + _, err = conn.NewCreateTable(). + Model((*db.Permission)(nil)). + Exec(ctx) + if err != nil { + return err + } + + // Create indexes for permissions + _, err = conn.NewCreateIndex(). + Model((*db.Permission)(nil)). + Index("idx_permissions_resource"). + Column("resource"). + Exec(ctx) + if err != nil { + return err + } + + _, err = conn.NewCreateIndex(). + Model((*db.Permission)(nil)). + Index("idx_permissions_action"). + Column("action"). + Exec(ctx) + if err != nil { + return err + } + + _, err = conn.NewCreateTable(). + Model((*db.RolePermission)(nil)). + Exec(ctx) + if err != nil { + return err + } + + _, err = conn.ExecContext(ctx, ` + CREATE INDEX idx_role_permissions_role ON role_permissions(role_id) + `) + if err != nil { + return err + } + + _, err = conn.ExecContext(ctx, ` + CREATE INDEX idx_role_permissions_permission ON role_permissions(permission_id) + `) + if err != nil { + return err + } + + // Create user_roles table + _, err = conn.NewCreateTable(). + Model((*db.UserRole)(nil)). + Exec(ctx) + if err != nil { + return err + } + + // Create indexes for user_roles + _, err = conn.NewCreateIndex(). + Model((*db.UserRole)(nil)). + Index("idx_user_roles_user"). + Column("user_id"). + Exec(ctx) + if err != nil { + return err + } + + _, err = conn.NewCreateIndex(). + Model((*db.UserRole)(nil)). + Index("idx_user_roles_role"). + Column("role_id"). + Exec(ctx) + if err != nil { + return err + } + + // Create audit_log table + _, err = conn.NewCreateTable(). + Model((*db.AuditLog)(nil)). + Exec(ctx) + if err != nil { + return err + } + + // Create indexes for audit_log + _, err = conn.NewCreateIndex(). + Model((*db.AuditLog)(nil)). + Index("idx_audit_log_user"). + Column("user_id"). + Exec(ctx) + if err != nil { + return err + } + + _, err = conn.NewCreateIndex(). + Model((*db.AuditLog)(nil)). + Index("idx_audit_log_action"). + Column("action"). + Exec(ctx) + if err != nil { + return err + } + + _, err = conn.NewCreateIndex(). + Model((*db.AuditLog)(nil)). + Index("idx_audit_log_resource"). + Column("resource_type", "resource_id"). + Exec(ctx) + if err != nil { + return err + } + + _, err = conn.NewCreateIndex(). + Model((*db.AuditLog)(nil)). + Index("idx_audit_log_created"). + Column("created_at"). + Exec(ctx) + if err != nil { + return err + } + + err = seedSystemRBAC(ctx, conn) + if err != nil { + return err + } + + return nil + }, + // DOWN migration + func(ctx context.Context, dbConn *bun.DB) error { + // Drop tables in reverse order + // Use raw SQL to avoid relationship resolution issues + tables := []string{ + "audit_log", + "user_roles", + "role_permissions", + "permissions", + "roles", + } + + for _, table := range tables { + _, err := dbConn.ExecContext(ctx, "DROP TABLE IF EXISTS "+table+" CASCADE") + if err != nil { + return err + } + } + + return nil + }, + ) +} + +func seedSystemRBAC(ctx context.Context, conn *bun.DB) error { + // Seed system roles + now := time.Now().Unix() + + adminRole := &db.Role{ + Name: "admin", + DisplayName: "Administrator", + Description: "Full system access with all permissions", + IsSystem: true, + CreatedAt: now, + } + + _, err := conn.NewInsert(). + Model(adminRole). + Returning("id"). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "dbConn.NewInsert") + } + + userRole := &db.Role{ + Name: "user", + DisplayName: "User", + Description: "Standard user with basic permissions", + IsSystem: true, + CreatedAt: now, + } + + _, err = conn.NewInsert(). + Model(userRole). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "dbConn.NewInsert") + } + + // Seed system permissions + permissionsData := []*db.Permission{ + {Name: "*", DisplayName: "Wildcard (All Permissions)", Description: "Grants access to all permissions, past, present, and future", Resource: "*", Action: "*", IsSystem: true, CreatedAt: now}, + {Name: "seasons.create", DisplayName: "Create Seasons", Description: "Create new seasons", Resource: "seasons", Action: "create", IsSystem: true, CreatedAt: now}, + {Name: "seasons.update", DisplayName: "Update Seasons", Description: "Update existing seasons", Resource: "seasons", Action: "update", IsSystem: true, CreatedAt: now}, + {Name: "seasons.delete", DisplayName: "Delete Seasons", Description: "Delete seasons", Resource: "seasons", Action: "delete", IsSystem: true, CreatedAt: now}, + {Name: "users.update", DisplayName: "Update Users", Description: "Update user information", Resource: "users", Action: "update", IsSystem: true, CreatedAt: now}, + {Name: "users.ban", DisplayName: "Ban Users", Description: "Ban users from the system", Resource: "users", Action: "ban", IsSystem: true, CreatedAt: now}, + {Name: "users.manage_roles", DisplayName: "Manage User Roles", Description: "Assign and revoke user roles", Resource: "users", Action: "manage_roles", IsSystem: true, CreatedAt: now}, + } + + _, err = conn.NewInsert(). + Model(&permissionsData). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "dbConn.NewInsert") + } + + // Grant wildcard permission to admin role using Bun + // First, get the IDs + var wildcardPerm db.Permission + err = conn.NewSelect(). + Model(&wildcardPerm). + Where("name = ?", "*"). + Scan(ctx) + if err != nil { + return err + } + + // Insert role_permission mapping + adminRolePerms := &db.RolePermission{ + RoleID: adminRole.ID, + PermissionID: wildcardPerm.ID, + } + _, err = conn.NewInsert(). + Model(adminRolePerms). + On("CONFLICT (role_id, permission_id) DO NOTHING"). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "dbConn.NewInsert") + } + return nil +} diff --git a/internal/db/migrations/20260210182212_add_leagues.go b/internal/db/migrations/20260210182212_add_leagues.go new file mode 100644 index 0000000..53599d8 --- /dev/null +++ b/internal/db/migrations/20260210182212_add_leagues.go @@ -0,0 +1,67 @@ +package migrations + +import ( + "context" + + "github.com/uptrace/bun" + + "git.haelnorr.com/h/oslstats/internal/db" +) + +func init() { + Migrations.MustRegister( + // UP migration + func(ctx context.Context, conn *bun.DB) error { + // Add slap_version column to seasons table + _, err := conn.NewAddColumn(). + Model((*db.Season)(nil)). + ColumnExpr("slap_version VARCHAR NOT NULL DEFAULT 'rebound'"). + IfNotExists(). + Exec(ctx) + if err != nil { + return err + } + + // Create leagues table + _, err = conn.NewCreateTable(). + Model((*db.League)(nil)). + Exec(ctx) + if err != nil { + return err + } + + // Create season_leagues join table + _, err = conn.NewCreateTable(). + Model((*db.SeasonLeague)(nil)). + Exec(ctx) + return err + }, + // DOWN migration + func(ctx context.Context, conn *bun.DB) error { + // Drop season_leagues join table first + _, err := conn.NewDropTable(). + Model((*db.SeasonLeague)(nil)). + IfExists(). + Exec(ctx) + if err != nil { + return err + } + + // Drop leagues table + _, err = conn.NewDropTable(). + Model((*db.League)(nil)). + IfExists(). + Exec(ctx) + if err != nil { + return err + } + + // Remove slap_version column from seasons table + _, err = conn.NewDropColumn(). + Model((*db.Season)(nil)). + ColumnExpr("slap_version"). + Exec(ctx) + return err + }, + ) +} diff --git a/internal/db/migrations/20260211225253_teams.go b/internal/db/migrations/20260211225253_teams.go new file mode 100644 index 0000000..1a9b99b --- /dev/null +++ b/internal/db/migrations/20260211225253_teams.go @@ -0,0 +1,49 @@ +package migrations + +import ( + "context" + + "git.haelnorr.com/h/oslstats/internal/db" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister( + // UP migration + func(ctx context.Context, conn *bun.DB) error { + // Add your migration code here + _, err := conn.NewCreateTable(). + Model((*db.Team)(nil)). + Exec(ctx) + if err != nil { + return err + } + _, err = conn.NewCreateTable(). + Model((*db.TeamParticipation)(nil)). + Exec(ctx) + if err != nil { + return err + } + return nil + }, + // DOWN migration + func(ctx context.Context, conn *bun.DB) error { + // Add your rollback code here + _, err := conn.NewDropTable(). + Model((*db.TeamParticipation)(nil)). + IfExists(). + Exec(ctx) + if err != nil { + return err + } + _, err = conn.NewDropTable(). + Model((*db.Team)(nil)). + IfExists(). + Exec(ctx) + if err != nil { + return err + } + return nil + }, + ) +} diff --git a/internal/db/migrations/20260213162216_missing_permissions.go b/internal/db/migrations/20260213162216_missing_permissions.go new file mode 100644 index 0000000..a6b035e --- /dev/null +++ b/internal/db/migrations/20260213162216_missing_permissions.go @@ -0,0 +1,44 @@ +package migrations + +import ( + "context" + "time" + + "git.haelnorr.com/h/oslstats/internal/db" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister( + // UP migration + func(ctx context.Context, conn *bun.DB) error { + // Add your migration code here + now := time.Now().Unix() + permissionsData := []*db.Permission{ + {Name: "seasons.add_league", DisplayName: "Add Leagues to Season", Description: "Assign an existing league to Seasons", Resource: "seasons", Action: "add_league", IsSystem: true, CreatedAt: now}, + {Name: "seasons.remove_league", DisplayName: "Remove Leagues from a Season", Description: "Remove an assigned league league from Seasons", Resource: "seasons", Action: "remove_league", IsSystem: true, CreatedAt: now}, + {Name: "leagues.create", DisplayName: "Create Leagues", Description: "Create new leagues", Resource: "leagues", Action: "create", IsSystem: true, CreatedAt: now}, + {Name: "leagues.update", DisplayName: "Update Leagues", Description: "Update existing leagues", Resource: "leagues", Action: "update", IsSystem: true, CreatedAt: now}, + {Name: "leagues.delete", DisplayName: "Delete Leagues", Description: "Delete leagues", Resource: "leagues", Action: "delete", IsSystem: true, CreatedAt: now}, + {Name: "teams.create", DisplayName: "Create Teams", Description: "Create new teams", Resource: "teams", Action: "create", IsSystem: true, CreatedAt: now}, + {Name: "teams.update", DisplayName: "Update Teams", Description: "Update existing teams", Resource: "teams", Action: "update", IsSystem: true, CreatedAt: now}, + {Name: "teams.delete", DisplayName: "Delete Teams", Description: "Delete teams", Resource: "teams", Action: "delete", IsSystem: true, CreatedAt: now}, + {Name: "teams.add_to_league", DisplayName: "Add Teams to League", Description: "Add an existing team to a league/season", Resource: "teams", Action: "add_to_league", IsSystem: true, CreatedAt: now}, + } + + _, err := conn.NewInsert(). + Model(&permissionsData). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "conn.NewInsert") + } + return nil + }, + // DOWN migration + func(ctx context.Context, conn *bun.DB) error { + // Add your rollback code here + return nil + }, + ) +} diff --git a/internal/db/migrations/20260215093841_add_fixtures.go b/internal/db/migrations/20260215093841_add_fixtures.go new file mode 100644 index 0000000..52d0836 --- /dev/null +++ b/internal/db/migrations/20260215093841_add_fixtures.go @@ -0,0 +1,52 @@ +package migrations + +import ( + "context" + "time" + + "git.haelnorr.com/h/oslstats/internal/db" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister( + // UP migration + func(ctx context.Context, conn *bun.DB) error { + // Add your migration code here + _, err := conn.NewCreateTable(). + Model((*db.Fixture)(nil)). + IfNotExists(). + Exec(ctx) + if err != nil { + return err + } + now := time.Now().Unix() + permissionsData := []*db.Permission{ + {Name: "fixtures.create", DisplayName: "Create Fixtures", Description: "Create new fixtures", Resource: "fixtures", Action: "create", IsSystem: true, CreatedAt: now}, + {Name: "fixtures.manage", DisplayName: "Manage Fixtures", Description: "Manage fixtures", Resource: "fixtures", Action: "manage", IsSystem: true, CreatedAt: now}, + {Name: "fixtures.delete", DisplayName: "Delete Fixtures", Description: "Delete fixtures", Resource: "fixtures", Action: "delete", IsSystem: true, CreatedAt: now}, + } + + _, err = conn.NewInsert(). + Model(&permissionsData). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "conn.NewInsert") + } + return nil + }, + // DOWN migration + func(ctx context.Context, conn *bun.DB) error { + // Add your rollback code here + _, err := conn.NewDropTable(). + Model((*db.Fixture)(nil)). + IfExists(). + Exec(ctx) + if err != nil { + return err + } + return nil + }, + ) +} diff --git a/internal/db/migrations/migrations.go b/internal/db/migrations/migrations.go new file mode 100644 index 0000000..661fe58 --- /dev/null +++ b/internal/db/migrations/migrations.go @@ -0,0 +1,9 @@ +// Package migrations defines the database migrations to apply when using the migrate tags +package migrations + +import ( + "github.com/uptrace/bun/migrate" +) + +// Migrations is the collection of all database migrations +var Migrations = migrate.NewMigrations() diff --git a/internal/db/paginate.go b/internal/db/paginate.go new file mode 100644 index 0000000..b9a7dae --- /dev/null +++ b/internal/db/paginate.go @@ -0,0 +1,200 @@ +package db + +import ( + "net/http" + "strings" + + "git.haelnorr.com/h/golib/hws" + "git.haelnorr.com/h/oslstats/internal/validation" + "github.com/uptrace/bun" +) + +type PageOpts struct { + Page int + PerPage int + Order bun.Order + OrderBy string +} + +type OrderOpts struct { + Order bun.Order + OrderBy string + Label string +} + +func GetPageOpts(s *hws.Server, w http.ResponseWriter, r *http.Request) (*PageOpts, bool) { + var getter validation.Getter + switch r.Method { + case "GET": + getter = validation.NewQueryGetter(r) + case "POST": + var ok bool + getter, ok = validation.ParseFormOrError(s, w, r) + if !ok { + return nil, false + } + default: + return nil, false + } + return getPageOpts(s, w, r, getter), true +} + +func getPageOpts(s *hws.Server, w http.ResponseWriter, r *http.Request, g validation.Getter) *PageOpts { + page := g.Int("page").Optional().Min(1).Value + perPage := g.Int("per_page").Optional().Min(1).Max(100).Value + order := g.String("order").TrimSpace().ToUpper().Optional().AllowedValues([]string{"ASC", "DESC"}).Value + orderBy := g.String("order_by").TrimSpace().Optional().ToLower().Value + valid := g.ValidateAndError(s, w, r) + if !valid { + return nil + } + pageOpts := &PageOpts{ + Page: page, + PerPage: perPage, + Order: bun.Order(order), + OrderBy: orderBy, + } + return pageOpts +} + +func setPageOpts(q *bun.SelectQuery, p, d *PageOpts, totalitems int) (*bun.SelectQuery, *PageOpts) { + if p == nil { + p = new(PageOpts) + } + if p.Page <= 0 { + p.Page = d.Page + } + if p.PerPage == 0 { + p.PerPage = d.PerPage + } + maxpage := p.TotalPages(totalitems) + if p.Page > maxpage && maxpage > 0 { + p.Page = maxpage + } + if p.Order == "" { + p.Order = d.Order + } + if p.OrderBy == "" { + p.OrderBy = d.OrderBy + } + p.OrderBy = sanitiseOrderBy(p.OrderBy) + q = q.OrderBy(p.OrderBy, p.Order). + Limit(p.PerPage). + Offset(p.PerPage * (p.Page - 1)) + return q, p +} + +func sanitiseOrderBy(orderby string) string { + result := strings.ToLower(orderby) + var builder strings.Builder + for _, r := range result { + if isValidChar(r) { + builder.WriteRune(r) + } + } + sanitized := builder.String() + + if sanitized == "" { + return "_" + } + + if !isValidFirstChar(rune(sanitized[0])) { + sanitized = "_" + sanitized + } + + if len(sanitized) > 63 { + sanitized = sanitized[:63] + } + + return sanitized +} + +func isValidChar(r rune) bool { + return (r >= 'a' && r <= 'z') || + (r >= '0' && r <= '9') || + r == '_' +} + +func isValidFirstChar(r rune) bool { + return (r >= 'a' && r <= 'z') || r == '_' +} + +// TotalPages calculates the total number of pages +func (p *PageOpts) TotalPages(total int) int { + if p.PerPage == 0 { + return 0 + } + pages := total / p.PerPage + if total%p.PerPage > 0 { + pages++ + } + return pages +} + +// HasPrevPage checks if there is a previous page +func (p *PageOpts) HasPrevPage() bool { + return p.Page > 1 +} + +// HasNextPage checks if there is a next page +func (p *PageOpts) HasNextPage(total int) bool { + return p.Page < p.TotalPages(total) +} + +// GetPageRange returns an array of page numbers to display +// maxButtons controls how many page buttons to show +func (p *PageOpts) GetPageRange(total int, maxButtons int) []int { + totalPages := p.TotalPages(total) + if totalPages == 0 { + return []int{} + } + + // If total pages is less than max buttons, show all pages + if totalPages <= maxButtons { + pages := make([]int, totalPages) + for i := range totalPages { + pages[i] = i + 1 + } + return pages + } + + // Calculate range around current page + halfButtons := maxButtons / 2 + start := p.Page - halfButtons + end := p.Page + halfButtons + + // Adjust if at beginning + if start < 1 { + start = 1 + end = maxButtons + } + + // Adjust if at end + if end > totalPages { + end = totalPages + start = totalPages - maxButtons + 1 + } + + pages := make([]int, 0, maxButtons) + for i := start; i <= end; i++ { + pages = append(pages, i) + } + return pages +} + +// StartItem returns the number of the first item on the current page +func (p *PageOpts) StartItem() int { + if p.Page < 1 { + return 0 + } + return (p.Page-1)*p.PerPage + 1 +} + +// EndItem returns the number of the last item on the current page +func (p *PageOpts) EndItem(total int) int { + end := p.Page * p.PerPage + if end > total { + return total + } + return end +} diff --git a/internal/db/permission.go b/internal/db/permission.go new file mode 100644 index 0000000..ec80cf3 --- /dev/null +++ b/internal/db/permission.go @@ -0,0 +1,96 @@ +package db + +import ( + "context" + + "git.haelnorr.com/h/oslstats/internal/permissions" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type Permission struct { + bun.BaseModel `bun:"table:permissions,alias:p"` + + ID int `bun:"id,pk,autoincrement"` + Name permissions.Permission `bun:"name,unique,notnull"` + DisplayName string `bun:"display_name,notnull"` + Description string `bun:"description"` + Resource string `bun:"resource,notnull"` + Action string `bun:"action,notnull"` + IsSystem bool `bun:"is_system,default:false"` + CreatedAt int64 `bun:"created_at,notnull"` + + Roles []Role `bun:"m2m:role_permissions,join:Permission=Role"` +} + +func (p Permission) isSystem() bool { + return p.IsSystem +} + +// GetPermissionByName queries the database for a permission matching the given name +// Returns a BadRequestNotFound error if no permission is found +func GetPermissionByName(ctx context.Context, tx bun.Tx, name permissions.Permission) (*Permission, error) { + if name == "" { + return nil, errors.New("name cannot be empty") + } + return GetByField[Permission](tx, "name", name).Get(ctx) +} + +// GetPermissionByID queries the database for a permission matching the given ID +// Returns a BadRequestNotFound error if no permission is found +func GetPermissionByID(ctx context.Context, tx bun.Tx, id int) (*Permission, error) { + if id <= 0 { + return nil, errors.New("id must be positive") + } + return GetByID[Permission](tx, id).Get(ctx) +} + +// GetPermissionsByResource queries for all permissions for a given resource +func GetPermissionsByResource(ctx context.Context, tx bun.Tx, resource string) ([]*Permission, error) { + if resource == "" { + return nil, errors.New("resource cannot be empty") + } + return GetList[Permission](tx). + Where("resource = ?", resource).GetAll(ctx) +} + +// ListAllPermissions returns all permissions +func ListAllPermissions(ctx context.Context, tx bun.Tx) ([]*Permission, error) { + return GetList[Permission](tx).GetAll(ctx) +} + +// CreatePermission creates a new permission +func CreatePermission(ctx context.Context, tx bun.Tx, perm *Permission) error { + if perm == nil { + return errors.New("permission cannot be nil") + } + if perm.Name == "" { + return errors.New("name cannot be empty") + } + if perm.DisplayName == "" { + return errors.New("display name cannot be empty") + } + if perm.Resource == "" { + return errors.New("resource cannot be empty") + } + if perm.Action == "" { + return errors.New("action cannot be empty") + } + + err := Insert(tx, perm). + Returning("id"). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "db.Insert") + } + + return nil +} + +// DeletePermission deletes a permission (checks IsSystem protection) +func DeletePermission(ctx context.Context, tx bun.Tx, id int) error { + if id <= 0 { + return errors.New("id must be positive") + } + return DeleteWithProtection[Permission](ctx, tx, id, nil) +} diff --git a/internal/db/role.go b/internal/db/role.go new file mode 100644 index 0000000..b73a8fb --- /dev/null +++ b/internal/db/role.go @@ -0,0 +1,137 @@ +package db + +import ( + "context" + "time" + + "git.haelnorr.com/h/oslstats/internal/roles" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type Role struct { + bun.BaseModel `bun:"table:roles,alias:r"` + + ID int `bun:"id,pk,autoincrement"` + Name roles.Role `bun:"name,unique,notnull"` + DisplayName string `bun:"display_name,notnull"` + Description string `bun:"description"` + IsSystem bool `bun:"is_system,default:false"` + CreatedAt int64 `bun:"created_at,notnull"` + UpdatedAt *int64 `bun:"updated_at"` + + // Relations (loaded on demand) + Users []User `bun:"m2m:user_roles,join:Role=User"` + Permissions []Permission `bun:"m2m:role_permissions,join:Role=Permission"` +} + +func (r Role) isSystem() bool { + return r.IsSystem +} + +// GetRoleByName queries the database for a role matching the given name +// Returns a BadRequestNotFound error if no role is found +func GetRoleByName(ctx context.Context, tx bun.Tx, name roles.Role) (*Role, error) { + if name == "" { + return nil, errors.New("name cannot be empty") + } + return GetByField[Role](tx, "name", name).Relation("Permissions").Get(ctx) +} + +// GetRoleByID queries the database for a role matching the given ID +// Returns a BadRequestNotFound error if no role is found +func GetRoleByID(ctx context.Context, tx bun.Tx, id int) (*Role, error) { + return GetByID[Role](tx, id).Relation("Permissions").Get(ctx) +} + +// ListAllRoles returns all roles +func ListAllRoles(ctx context.Context, tx bun.Tx) ([]*Role, error) { + return GetList[Role](tx).GetAll(ctx) +} + +// GetRoles returns a paginated list of roles +func GetRoles(ctx context.Context, tx bun.Tx, pageOpts *PageOpts) (*List[Role], error) { + defaults := &PageOpts{ + Page: 1, + PerPage: 25, + Order: bun.OrderAsc, + OrderBy: "display_name", + } + return GetList[Role](tx).GetPaged(ctx, pageOpts, defaults) +} + +// CreateRole creates a new role +func CreateRole(ctx context.Context, tx bun.Tx, role *Role, audit *AuditMeta) error { + if role == nil { + return errors.New("role cannot be nil") + } + role.CreatedAt = time.Now().Unix() + + err := Insert(tx, role). + Returning("id"). + WithAudit(audit, nil). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "db.Insert") + } + + return nil +} + +// UpdateRole updates an existing role +func UpdateRole(ctx context.Context, tx bun.Tx, role *Role, audit *AuditMeta) error { + if role == nil { + return errors.New("role cannot be nil") + } + if role.ID <= 0 { + return errors.New("role id must be positive") + } + + err := Update(tx, role). + WherePK(). + WithAudit(audit, nil). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "db.Update") + } + + return nil +} + +// DeleteRole deletes a role (checks IsSystem protection) +// Also cleans up join table entries in role_permissions and user_roles +func DeleteRole(ctx context.Context, tx bun.Tx, id int, audit *AuditMeta) error { + if id <= 0 { + return errors.New("id must be positive") + } + + // First check if role exists and is not system + role, err := GetRoleByID(ctx, tx, id) + if err != nil { + return errors.Wrap(err, "GetRoleByID") + } + if role.IsSystem { + return errors.New("cannot delete system roles") + } + + // Delete role_permissions entries + _, err = tx.NewDelete(). + Model((*RolePermission)(nil)). + Where("role_id = ?", id). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "delete role_permissions") + } + + // Delete user_roles entries + _, err = tx.NewDelete(). + Model((*UserRole)(nil)). + Where("role_id = ?", id). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "delete user_roles") + } + + // Finally delete the role + return DeleteWithProtection[Role](ctx, tx, id, audit) +} diff --git a/internal/db/rolepermission.go b/internal/db/rolepermission.go new file mode 100644 index 0000000..55b28d1 --- /dev/null +++ b/internal/db/rolepermission.go @@ -0,0 +1,99 @@ +package db + +import ( + "context" + "slices" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type RolePermission struct { + RoleID int `bun:",pk"` + Role *Role `bun:"rel:belongs-to,join:role_id=id"` + PermissionID int `bun:",pk"` + Permission *Permission `bun:"rel:belongs-to,join:permission_id=id"` +} + +func (r *Role) UpdatePermissions(ctx context.Context, tx bun.Tx, newPermissionsIDs []int, audit *AuditMeta) error { + addPerms, removePerms, err := detectChangedPermissions(ctx, tx, r, newPermissionsIDs) + if err != nil { + return errors.Wrap(err, "detectChangedPermissions") + } + addedPerms := []string{} + removedPerms := []string{} + for _, perm := range addPerms { + rolePerm := &RolePermission{ + RoleID: r.ID, + PermissionID: perm.ID, + } + err := Insert(tx, rolePerm). + ConflictNothing("role_id", "permission_id"). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "db.Insert") + } + addedPerms = append(addedPerms, perm.Name.String()) + } + for _, perm := range removePerms { + err := DeleteItem[RolePermission](tx). + Where("role_id = ?", r.ID). + Where("permission_id = ?", perm.ID). + Delete(ctx) + if err != nil { + return errors.Wrap(err, "DeleteItem") + } + removedPerms = append(removedPerms, perm.Name.String()) + } + // Log the permission changes + if len(addedPerms) > 0 || len(removedPerms) > 0 { + details := map[string]any{ + "role_name": string(r.Name), + } + if len(addedPerms) > 0 { + details["added_permissions"] = addedPerms + } + if len(removedPerms) > 0 { + details["removed_permissions"] = removedPerms + } + info := &AuditInfo{ + "roles.update_permissions", + "role", + r.ID, + details, + } + err = LogSuccess(ctx, tx, audit, info) + if err != nil { + return errors.Wrap(err, "LogSuccess") + } + } + return nil +} + +func detectChangedPermissions(ctx context.Context, tx bun.Tx, role *Role, permissionIDs []int) ([]*Permission, []*Permission, error) { + allPermissions, err := ListAllPermissions(ctx, tx) + if err != nil { + return nil, nil, errors.Wrap(err, "ListAllPermissions") + } + // Build map of current permissions + currentPermIDs := make(map[int]bool) + for _, perm := range role.Permissions { + currentPermIDs[perm.ID] = true + } + + var addedPerms []*Permission + var removedPerms []*Permission + + // Determine what to add and remove + for _, perm := range allPermissions { + hasNow := currentPermIDs[perm.ID] + shouldHave := slices.Contains(permissionIDs, perm.ID) + + if shouldHave && !hasNow { + addedPerms = append(addedPerms, perm) + } else if !shouldHave && hasNow { + removedPerms = append(removedPerms, perm) + } + } + return addedPerms, removedPerms, nil +} diff --git a/internal/db/season.go b/internal/db/season.go new file mode 100644 index 0000000..ec3c572 --- /dev/null +++ b/internal/db/season.go @@ -0,0 +1,183 @@ +package db + +import ( + "context" + "strings" + "time" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +// SeasonStatus represents the current status of a season +type SeasonStatus string + +const ( + // StatusUpcoming means the season has not started yet + StatusUpcoming SeasonStatus = "upcoming" + // StatusInProgress means the regular season is active + StatusInProgress SeasonStatus = "in_progress" + // StatusFinalsSoon means regular season ended, finals upcoming + StatusFinalsSoon SeasonStatus = "finals_soon" + // StatusFinals means finals are in progress + StatusFinals SeasonStatus = "finals" + // StatusCompleted means the season has finished + StatusCompleted SeasonStatus = "completed" +) + +type Season struct { + bun.BaseModel `bun:"table:seasons,alias:s"` + + ID int `bun:"id,pk,autoincrement" json:"id"` + Name string `bun:"name,unique,notnull" json:"name"` + ShortName string `bun:"short_name,unique,notnull" json:"short_name"` + StartDate time.Time `bun:"start_date,notnull" json:"start_date"` + EndDate bun.NullTime `bun:"end_date" json:"end_date"` + FinalsStartDate bun.NullTime `bun:"finals_start_date" json:"finals_start_date"` + FinalsEndDate bun.NullTime `bun:"finals_end_date" json:"finals_end_date"` + SlapVersion string `bun:"slap_version,notnull,default:'rebound'" json:"slap_version"` + + Leagues []League `bun:"m2m:season_leagues,join:Season=League" json:"-"` + Teams []Team `bun:"m2m:team_participations,join:Season=Team" json:"-"` +} + +// NewSeason creats a new season +func NewSeason(ctx context.Context, tx bun.Tx, name, version, shortname string, + start time.Time, audit *AuditMeta, +) (*Season, error) { + season := &Season{ + Name: name, + ShortName: strings.ToUpper(shortname), + StartDate: start.Truncate(time.Hour * 24), + SlapVersion: version, + } + err := Insert(tx, season). + WithAudit(audit, nil).Exec(ctx) + if err != nil { + return nil, errors.WithMessage(err, "db.Insert") + } + return season, nil +} + +func ListSeasons(ctx context.Context, tx bun.Tx, pageOpts *PageOpts) (*List[Season], error) { + defaults := &PageOpts{ + 1, + 10, + bun.OrderDesc, + "start_date", + } + return GetList[Season](tx).Relation("Leagues").GetPaged(ctx, pageOpts, defaults) +} + +func GetSeason(ctx context.Context, tx bun.Tx, shortname string) (*Season, error) { + if shortname == "" { + return nil, errors.New("short_name not provided") + } + return GetByField[Season](tx, "short_name", shortname).Relation("Leagues").Relation("Teams").Get(ctx) +} + +// Update updates the season struct. It does not insert to the database +func (s *Season) Update(ctx context.Context, tx bun.Tx, version string, + start, end, finalsStart, finalsEnd time.Time, audit *AuditMeta, +) error { + s.SlapVersion = version + s.StartDate = start.Truncate(time.Hour * 24) + if !end.IsZero() { + s.EndDate.Time = end.Truncate(time.Hour * 24) + } + if !finalsStart.IsZero() { + s.FinalsStartDate.Time = finalsStart.Truncate(time.Hour * 24) + } + if !finalsEnd.IsZero() { + s.FinalsEndDate.Time = finalsEnd.Truncate(time.Hour * 24) + } + return Update(tx, s).WherePK(). + Column("slap_version", "start_date", "end_date", "finals_start_date", "finals_end_date"). + WithAudit(audit, nil).Exec(ctx) +} + +func (s *Season) MapTeamsToLeagues(ctx context.Context, tx bun.Tx) ([]LeagueWithTeams, error) { + // For each league, get the teams + leaguesWithTeams := make([]LeagueWithTeams, len(s.Leagues)) + for i, league := range s.Leagues { + var teams []*Team + err := tx.NewSelect(). + Model(&teams). + Join("INNER JOIN team_participations AS tp ON tp.team_id = t.id"). + Where("tp.season_id = ? AND tp.league_id = ?", s.ID, league.ID). + Order("t.name ASC"). + Scan(ctx) + if err != nil { + return nil, errors.Wrap(err, "tx.NewSelect") + } + leaguesWithTeams[i] = LeagueWithTeams{ + League: &league, + Teams: teams, + } + } + return leaguesWithTeams, nil +} + +type LeagueWithTeams struct { + League *League + Teams []*Team +} + +// GetStatus returns the current status of the season based on dates +func (s *Season) GetStatus() SeasonStatus { + now := time.Now() + + if now.Before(s.StartDate) { + return StatusUpcoming + } + + if !s.FinalsStartDate.IsZero() { + if !s.FinalsEndDate.IsZero() && now.After(s.FinalsEndDate.Time) { + return StatusCompleted + } + if now.After(s.FinalsStartDate.Time) { + return StatusFinals + } + if !s.EndDate.IsZero() && now.After(s.EndDate.Time) { + return StatusFinalsSoon + } + return StatusInProgress + } + + if !s.EndDate.IsZero() && now.After(s.EndDate.Time) { + return StatusCompleted + } + + return StatusInProgress +} + +// GetDefaultTab returns the default tab to show based on the season status +func (s *Season) GetDefaultTab() string { + switch s.GetStatus() { + case StatusInProgress: + return "table" + case StatusUpcoming: + return "teams" + default: + return "finals" + } +} + +func (s *Season) HasLeague(league *League) bool { + for _, league_ := range s.Leagues { + if league_.ID == league.ID { + return true + } + } + return false +} + +func (s *Season) GetLeague(leagueShortName string) (*League, error) { + for _, league := range s.Leagues { + if league.ShortName == leagueShortName { + return &league, nil + } + } + return nil, BadRequestNotAssociated("season", "league", + "id", "short_name", s.ID, leagueShortName) +} diff --git a/internal/db/seasonleague.go b/internal/db/seasonleague.go new file mode 100644 index 0000000..7ab55ea --- /dev/null +++ b/internal/db/seasonleague.go @@ -0,0 +1,111 @@ +package db + +import ( + "context" + + "git.haelnorr.com/h/oslstats/internal/permissions" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type SeasonLeague struct { + SeasonID int `bun:",pk"` + Season *Season `bun:"rel:belongs-to,join:season_id=id"` + LeagueID int `bun:",pk"` + League *League `bun:"rel:belongs-to,join:league_id=id"` +} + +// GetSeasonLeague retrieves a specific season-league combination with teams +func GetSeasonLeague(ctx context.Context, tx bun.Tx, seasonShortName, leagueShortName string) (*Season, *League, []*Team, error) { + if seasonShortName == "" { + return nil, nil, nil, errors.New("season short_name cannot be empty") + } + if leagueShortName == "" { + return nil, nil, nil, errors.New("league short_name cannot be empty") + } + + season, err := GetSeason(ctx, tx, seasonShortName) + if err != nil { + return nil, nil, nil, errors.Wrap(err, "GetSeason") + } + + league, err := season.GetLeague(leagueShortName) + if err != nil { + return nil, nil, nil, errors.Wrap(err, "season.GetLeague") + } + + // Get all teams participating in this season+league + var teams []*Team + err = tx.NewSelect(). + Model(&teams). + Join("INNER JOIN team_participations AS tp ON tp.team_id = t.id"). + Where("tp.season_id = ? AND tp.league_id = ?", season.ID, league.ID). + Order("t.name ASC"). + Scan(ctx) + if err != nil { + return nil, nil, nil, errors.Wrap(err, "tx.Select teams") + } + + return season, league, teams, nil +} + +func NewSeasonLeague(ctx context.Context, tx bun.Tx, seasonShortName, leagueShortName string, audit *AuditMeta) error { + season, err := GetSeason(ctx, tx, seasonShortName) + if err != nil { + return errors.Wrap(err, "GetSeason") + } + league, err := GetLeague(ctx, tx, leagueShortName) + if err != nil { + return errors.Wrap(err, "GetLeague") + } + if season.HasLeague(league) { + return BadRequestAssociated("season", "league", + "id", "id", season.ID, league.ID) + } + seasonLeague := &SeasonLeague{ + SeasonID: season.ID, + LeagueID: league.ID, + } + info := &AuditInfo{ + string(permissions.SeasonsAddLeague), + "season", + season.ID, + map[string]any{"league_id": league.ID}, + } + err = Insert(tx, seasonLeague).WithAudit(audit, info).Exec(ctx) + if err != nil { + return errors.Wrap(err, "db.Insert") + } + return nil +} + +func (s *Season) RemoveLeague(ctx context.Context, tx bun.Tx, leagueShortName string, audit *AuditMeta) error { + league, err := s.GetLeague(leagueShortName) + if err != nil { + return errors.Wrap(err, "s.GetLeague") + } + info := &AuditInfo{ + string(permissions.SeasonsRemoveLeague), + "season", + s.ID, + map[string]any{"league_id": league.ID}, + } + err = DeleteItem[SeasonLeague](tx). + Where("season_id = ?", s.ID). + Where("league_id = ?", league.ID). + WithAudit(audit, info). + Delete(ctx) + if err != nil { + return errors.Wrap(err, "db.DeleteItem") + } + return nil +} + +func (t *Team) InTeams(teams []*Team) bool { + for _, team := range teams { + if t.ID == team.ID { + return true + } + } + return false +} diff --git a/internal/db/setup.go b/internal/db/setup.go new file mode 100644 index 0000000..e5b39e4 --- /dev/null +++ b/internal/db/setup.go @@ -0,0 +1,56 @@ +package db + +import ( + "database/sql" + "fmt" + "time" + + "github.com/uptrace/bun" + "github.com/uptrace/bun/dialect/pgdialect" + "github.com/uptrace/bun/driver/pgdriver" +) + +type DB struct { + *bun.DB +} + +func (db *DB) Close() error { + return db.DB.Close() +} + +func (db *DB) RegisterModels() []any { + models := []any{ + (*RolePermission)(nil), + (*UserRole)(nil), + (*SeasonLeague)(nil), + (*TeamParticipation)(nil), + (*User)(nil), + (*DiscordToken)(nil), + (*Season)(nil), + (*League)(nil), + (*Team)(nil), + (*Role)(nil), + (*Permission)(nil), + (*AuditLog)(nil), + (*Fixture)(nil), + } + db.RegisterModel(models...) + return models +} + +func NewDB(cfg *Config) *DB { + dsn := fmt.Sprintf("postgres://%s:%s@%s:%v/%s?sslmode=%s", + cfg.User, cfg.Password, cfg.Host, cfg.Port, cfg.DB, cfg.SSL) + sqldb := sql.OpenDB(pgdriver.NewConnector(pgdriver.WithDSN(dsn))) + + sqldb.SetMaxOpenConns(25) + sqldb.SetMaxIdleConns(10) + sqldb.SetConnMaxLifetime(5 * time.Minute) + sqldb.SetConnMaxIdleTime(5 * time.Minute) + + db := &DB{ + bun.NewDB(sqldb, pgdialect.New()), + } + db.RegisterModels() + return db +} diff --git a/internal/db/team.go b/internal/db/team.go new file mode 100644 index 0000000..b0d9054 --- /dev/null +++ b/internal/db/team.go @@ -0,0 +1,73 @@ +package db + +import ( + "context" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type Team struct { + bun.BaseModel `bun:"table:teams,alias:t"` + ID int `bun:"id,pk,autoincrement" json:"id"` + Name string `bun:"name,unique,notnull" json:"name"` + ShortName string `bun:"short_name,notnull,unique:short_names" json:"short_name"` + AltShortName string `bun:"alt_short_name,notnull,unique:short_names" json:"alt_short_name"` + Color string `bun:"color" json:"color,omitempty"` + + Seasons []Season `bun:"m2m:team_participations,join:Team=Season" json:"-"` + Leagues []League `bun:"m2m:team_participations,join:Team=League" json:"-"` +} + +func NewTeam(ctx context.Context, tx bun.Tx, name, shortName, altShortName, color string, audit *AuditMeta) (*Team, error) { + team := &Team{ + Name: name, + ShortName: shortName, + AltShortName: altShortName, + Color: color, + } + err := Insert(tx, team). + WithAudit(audit, nil).Exec(ctx) + if err != nil { + return nil, errors.Wrap(err, "db.Insert") + } + return team, nil +} + +func ListTeams(ctx context.Context, tx bun.Tx, pageOpts *PageOpts) (*List[Team], error) { + defaults := &PageOpts{ + 1, + 10, + bun.OrderAsc, + "name", + } + return GetList[Team](tx).GetPaged(ctx, pageOpts, defaults) +} + +func GetTeam(ctx context.Context, tx bun.Tx, id int) (*Team, error) { + if id == 0 { + return nil, errors.New("id not provided") + } + return GetByID[Team](tx, id).Relation("Seasons").Relation("Leagues").Get(ctx) +} + +func TeamShortNamesUnique(ctx context.Context, tx bun.Tx, shortName, altShortName string) (bool, error) { + // Check if this combination of short_name and alt_short_name exists + count, err := tx.NewSelect(). + Model((*Team)(nil)). + Where("short_name = ? AND alt_short_name = ?", shortName, altShortName). + Count(ctx) + if err != nil { + return false, errors.Wrap(err, "tx.Select") + } + return count == 0, nil +} + +func (t *Team) InSeason(seasonID int) bool { + for _, season := range t.Seasons { + if season.ID == seasonID { + return true + } + } + return false +} diff --git a/internal/db/teamparticipation.go b/internal/db/teamparticipation.go new file mode 100644 index 0000000..1f44e45 --- /dev/null +++ b/internal/db/teamparticipation.go @@ -0,0 +1,56 @@ +package db + +import ( + "context" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type TeamParticipation struct { + SeasonID int `bun:",pk,unique:season_team"` + Season *Season `bun:"rel:belongs-to,join:season_id=id"` + LeagueID int `bun:",pk"` + League *League `bun:"rel:belongs-to,join:league_id=id"` + TeamID int `bun:",pk,unique:season_team"` + Team *Team `bun:"rel:belongs-to,join:team_id=id"` +} + +func NewTeamParticipation(ctx context.Context, tx bun.Tx, + seasonShortName, leagueShortName string, teamID int, audit *AuditMeta, +) (*Team, *Season, *League, error) { + season, err := GetSeason(ctx, tx, seasonShortName) + if err != nil { + return nil, nil, nil, errors.Wrap(err, "GetSeason") + } + league, err := season.GetLeague(leagueShortName) + if err != nil { + return nil, nil, nil, errors.Wrap(err, "season.GetLeague") + } + team, err := GetTeam(ctx, tx, teamID) + if err != nil { + return nil, nil, nil, errors.Wrap(err, "GetTeam") + } + if team.InSeason(season.ID) { + return nil, nil, nil, BadRequestAssociated("season", "team", + "id", "id", season.ID, team.ID) + } + participation := &TeamParticipation{ + SeasonID: season.ID, + LeagueID: league.ID, + TeamID: team.ID, + } + + info := &AuditInfo{ + "teams.join_season", + "team", + teamID, + map[string]any{"season_id": season.ID, "league_id": league.ID}, + } + err = Insert(tx, participation). + WithAudit(audit, info).Exec(ctx) + if err != nil { + return nil, nil, nil, errors.Wrap(err, "db.Insert") + } + return team, season, league, nil +} diff --git a/internal/db/txhelpers.go b/internal/db/txhelpers.go new file mode 100644 index 0000000..ad81787 --- /dev/null +++ b/internal/db/txhelpers.go @@ -0,0 +1,113 @@ +package db + +import ( + "context" + "net/http" + "time" + + "git.haelnorr.com/h/golib/hws" + "git.haelnorr.com/h/oslstats/internal/notify" + "git.haelnorr.com/h/oslstats/internal/throw" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +// TxFunc is a function that runs within a database transaction +type ( + TxFunc func(ctx context.Context, tx bun.Tx) (bool, error) + TxFuncSilent func(ctx context.Context, tx bun.Tx) error +) + +var timeout = 15 * time.Second + +// WithReadTx executes a read-only transaction with automatic rollback +// Returns true if successful, false if error was thrown to client +func (db *DB) WithReadTx( + s *hws.Server, + w http.ResponseWriter, + r *http.Request, + fn TxFunc, +) bool { + ctx, cancel := context.WithTimeout(r.Context(), timeout) + defer cancel() + ok, err := db.withTx(ctx, fn, false) + if err != nil { + throw.InternalServiceError(s, w, r, "Database error", err) + } + return ok +} + +// WithTxFailSilently executes a transaction with automatic rollback +// Returns true if successful, false if error occured. +// Does not throw any errors to the client. +func (db *DB) WithTxFailSilently( + ctx context.Context, + fn TxFuncSilent, +) error { + fnc := func(ctx context.Context, tx bun.Tx) (bool, error) { + err := fn(ctx, tx) + return err == nil, err + } + _, err := db.withTx(ctx, fnc, true) + return err +} + +// WithWriteTx executes a write transaction with automatic rollback on error +// Commits only if fn returns nil. Returns true if successful. +func (db *DB) WithWriteTx( + s *hws.Server, + w http.ResponseWriter, + r *http.Request, + fn TxFunc, +) bool { + ctx, cancel := context.WithTimeout(r.Context(), timeout) + defer cancel() + ok, err := db.withTx(ctx, fn, true) + if err != nil { + throw.InternalServiceError(s, w, r, "Database error", err) + } + return ok +} + +// WithNotifyTx executes a transaction with notification-based error handling +// Uses notifyInternalServiceError instead of throwInternalServiceError +func (db *DB) WithNotifyTx( + s *hws.Server, + w http.ResponseWriter, + r *http.Request, + fn TxFunc, +) bool { + ctx, cancel := context.WithTimeout(r.Context(), timeout) + defer cancel() + ok, err := db.withTx(ctx, fn, true) + if err != nil { + notify.InternalServiceError(s, w, r, "Database error", err) + } + return ok +} + +// withTx executes a transaction with automatic rollback on error +func (db *DB) withTx( + ctx context.Context, + fn TxFunc, + write bool, +) (bool, error) { + tx, err := db.BeginTx(ctx, nil) + if err != nil { + return false, errors.Wrap(err, "conn.BeginTx") + } + defer func() { _ = tx.Rollback() }() + ok, err := fn(ctx, tx) + if err != nil || !ok { + return false, err + } + if write { + err = tx.Commit() + if err != nil { + return false, errors.Wrap(err, "tx.Commit") + } + } else { + _ = tx.Commit() + } + return true, nil +} diff --git a/internal/db/update.go b/internal/db/update.go new file mode 100644 index 0000000..de41b83 --- /dev/null +++ b/internal/db/update.go @@ -0,0 +1,122 @@ +package db + +import ( + "context" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type updater[T any] struct { + tx bun.Tx + q *bun.UpdateQuery + model *T + columns []string + audit *AuditMeta + auditInfo *AuditInfo +} + +// Update creates an updater for a model +// You must specify which columns to update via .Column() or use .WherePK() +func Update[T any](tx bun.Tx, model *T) *updater[T] { + if model == nil { + panic("model cannot be nil") + } + return &updater[T]{ + tx: tx, + q: tx.NewUpdate().Model(model), + model: model, + } +} + +// UpdateByID creates an updater with an ID where clause +// You must still specify which columns to update via .Column() +func UpdateByID[T any](tx bun.Tx, id int, model *T) *updater[T] { + if id <= 0 { + panic("id must be positive") + } + return Update(tx, model).Where("id = ?", id) +} + +// Column specifies which columns to update +// Example: .Column("start_date", "end_date") +func (u *updater[T]) Column(columns ...string) *updater[T] { + u.columns = append(u.columns, columns...) + u.q = u.q.Column(columns...) + return u +} + +// Where adds a WHERE clause +// Example: .Where("id = ?", 123) +func (u *updater[T]) Where(query string, args ...any) *updater[T] { + u.q = u.q.Where(query, args...) + return u +} + +// WherePK adds a WHERE clause on the primary key +// The model must have its primary key field populated +func (u *updater[T]) WherePK() *updater[T] { + u.q = u.q.WherePK() + return u +} + +// Set adds a raw SET clause for complex updates +// Example: .Set("updated_at = NOW()") +func (u *updater[T]) Set(query string, args ...any) *updater[T] { + u.q = u.q.Set(query, args...) + return u +} + +// WithAudit enables audit logging for this update operation +// If the provided *AuditInfo is nil, will use reflection to automatically work out the details +func (u *updater[T]) WithAudit(meta *AuditMeta, info *AuditInfo) *updater[T] { + u.audit = meta + u.auditInfo = info + return u +} + +// Exec executes the update and optionally logs to audit +// Returns an error if update fails or if audit callback fails (triggering rollback) +func (u *updater[T]) Exec(ctx context.Context) error { + // Build audit details BEFORE update (captures changed fields) + var details map[string]any + if u.audit != nil && len(u.columns) > 0 { + details = extractChangedFields(u.model, u.columns) + } + + // Execute update + result, err := u.q.Exec(ctx) + if err != nil { + return errors.Wrap(err, "bun.UpdateQuery.Exec") + } + rows, err := result.RowsAffected() + if err != nil { + return errors.Wrap(err, "result.RowsAffected") + } + if rows == 0 { + resource := extractResourceType(extractTableName[T]()) + return BadRequestNotFound(resource, "id", extractPrimaryKey(u.model)) + } + + // Handle audit logging if enabled + if u.audit != nil { + if u.auditInfo == nil { + tableName := extractTableName[T]() + resourceType := extractResourceType(tableName) + action := buildAction(resourceType, "update") + + u.auditInfo = &AuditInfo{ + Action: action, + ResourceType: resourceType, + ResourceID: extractPrimaryKey(u.model), + Details: details, // Changed fields only + } + } + err = LogSuccess(ctx, u.tx, u.audit, u.auditInfo) + if err != nil { + return errors.Wrap(err, "LogSuccess") + } + } + + return nil +} diff --git a/internal/db/user.go b/internal/db/user.go index 40c5ca8..0a5bb23 100644 --- a/internal/db/user.go +++ b/internal/db/user.go @@ -2,46 +2,35 @@ package db import ( "context" - "fmt" "time" "git.haelnorr.com/h/golib/hwsauth" + "git.haelnorr.com/h/oslstats/internal/permissions" + "git.haelnorr.com/h/oslstats/internal/roles" "github.com/bwmarrin/discordgo" "github.com/pkg/errors" "github.com/uptrace/bun" ) -var CurrentUser hwsauth.ContextLoader[*User] - type User struct { bun.BaseModel `bun:"table:users,alias:u"` - ID int `bun:"id,pk,autoincrement"` // Integer ID (index primary key) - Username string `bun:"username,unique"` // Username (unique) - CreatedAt int64 `bun:"created_at"` // Epoch timestamp when the user was added to the database - DiscordID string `bun:"discord_id,unique"` + ID int `bun:"id,pk,autoincrement" json:"id"` + Username string `bun:"username,unique" json:"username"` + CreatedAt int64 `bun:"created_at" json:"created_at"` + DiscordID string `bun:"discord_id,unique" json:"discord_id"` + + Roles []*Role `bun:"m2m:user_roles,join:User=Role" json:"-"` } -func (user *User) GetID() int { - return user.ID +func (u *User) GetID() int { + return u.ID } -// Change the user's username -func (user *User) ChangeUsername(ctx context.Context, tx bun.Tx, newUsername string) error { - _, err := tx.NewUpdate(). - Model(user). - Set("username = ?", newUsername). - Where("id = ?", user.ID). - Exec(ctx) - if err != nil { - return errors.Wrap(err, "tx.Update") - } - user.Username = newUsername - return nil -} +var CurrentUser hwsauth.ContextLoader[*User] // CreateUser creates a new user with the given username and password -func CreateUser(ctx context.Context, tx bun.Tx, username string, discorduser *discordgo.User) (*User, error) { +func CreateUser(ctx context.Context, tx bun.Tx, username string, discorduser *discordgo.User, audit *AuditMeta) (*User, error) { if discorduser == nil { return nil, errors.New("user cannot be nil") } @@ -50,81 +39,113 @@ func CreateUser(ctx context.Context, tx bun.Tx, username string, discorduser *di CreatedAt: time.Now().Unix(), DiscordID: discorduser.ID, } + audit.u = user - _, err := tx.NewInsert(). - Model(user). + err := Insert(tx, user). + WithAudit(audit, nil). + Returning("id"). Exec(ctx) if err != nil { - return nil, errors.Wrap(err, "tx.Insert") + return nil, errors.Wrap(err, "db.Insert") } return user, nil } // GetUserByID queries the database for a user matching the given ID -// Returns nil, nil if no user is found +// Returns a BadRequestNotFound error if no user is found func GetUserByID(ctx context.Context, tx bun.Tx, id int) (*User, error) { - fmt.Printf("user id requested: %v", id) - user := new(User) - err := tx.NewSelect(). - Model(user). - Where("id = ?", id). - Limit(1). - Scan(ctx) - if err != nil { - if err.Error() == "sql: no rows in result set" { - return nil, nil - } - return nil, errors.Wrap(err, "tx.Select") - } - return user, nil + return GetByID[User](tx, id).Get(ctx) } // GetUserByUsername queries the database for a user matching the given username -// Returns nil, nil if no user is found +// Returns a BadRequestNotFound error if no user is found func GetUserByUsername(ctx context.Context, tx bun.Tx, username string) (*User, error) { - user := new(User) - err := tx.NewSelect(). - Model(user). - Where("username = ?", username). - Limit(1). - Scan(ctx) - if err != nil { - if err.Error() == "sql: no rows in result set" { - return nil, nil - } - return nil, errors.Wrap(err, "tx.Select") + if username == "" { + return nil, errors.New("username not provided") } - return user, nil + return GetByField[User](tx, "username", username).Get(ctx) } // GetUserByDiscordID queries the database for a user matching the given discord id -// Returns nil, nil if no user is found +// Returns a BadRequestNotFound error if no user is found func GetUserByDiscordID(ctx context.Context, tx bun.Tx, discordID string) (*User, error) { - user := new(User) - err := tx.NewSelect(). - Model(user). - Where("discord_id = ?", discordID). - Limit(1). - Scan(ctx) - if err != nil { - if err.Error() == "sql: no rows in result set" { - return nil, nil - } - return nil, errors.Wrap(err, "tx.Select") + if discordID == "" { + return nil, errors.New("discord_id not provided") } - return user, nil + return GetByField[User](tx, "discord_id", discordID).Get(ctx) } -// IsUsernameUnique checks if the given username is unique (not already taken) -// Returns true if the username is available, false if it's taken -func IsUsernameUnique(ctx context.Context, tx bun.Tx, username string) (bool, error) { - count, err := tx.NewSelect(). - Model((*User)(nil)). - Where("username = ?", username). - Count(ctx) - if err != nil { - return false, errors.Wrap(err, "tx.Count") +// GetRoles loads all the roles for this user +func (u *User) GetRoles(ctx context.Context, tx bun.Tx) ([]*Role, error) { + if u == nil { + return nil, errors.New("user cannot be nil") } - return count == 0, nil + u, err := GetByField[User](tx, "id", u.ID). + Relation("Roles").Get(ctx) + if err != nil { + return nil, errors.Wrap(err, "GetByField") + } + return u.Roles, nil +} + +// GetPermissions loads and returns all permissions for this user +func (u *User) GetPermissions(ctx context.Context, tx bun.Tx) ([]*Permission, error) { + if u == nil { + return nil, errors.New("user cannot be nil") + } + return GetList[Permission](tx). + Join("JOIN role_permissions AS rp on rp.permission_id = p.id"). + Join("JOIN user_roles AS ur ON ur.role_id = rp.role_id"). + Where("ur.user_id = ?", u.ID). + GetAll(ctx) +} + +// HasPermission checks if user has a specific permission (including wildcard check) +func (u *User) HasPermission(ctx context.Context, tx bun.Tx, permissionName permissions.Permission) (bool, error) { + if u == nil { + return false, errors.New("user cannot be nil") + } + if permissionName == "" { + return false, errors.New("permissionName cannot be empty") + } + + perms, err := u.GetPermissions(ctx, tx) + if err != nil { + return false, err + } + + for _, p := range perms { + if p.Name == permissionName || p.Name == permissions.Wildcard { + return true, nil + } + } + return false, nil +} + +// HasRole checks if user has a specific role +func (u *User) HasRole(ctx context.Context, tx bun.Tx, roleName roles.Role) (bool, error) { + if u == nil { + return false, errors.New("user cannot be nil") + } + return HasRole(ctx, tx, u.ID, roleName) +} + +// IsAdmin is a convenience method to check if user has admin role +func (u *User) IsAdmin(ctx context.Context, tx bun.Tx) (bool, error) { + if u == nil { + return false, errors.New("user cannot be nil") + } + return u.HasRole(ctx, tx, "admin") +} + +func GetUsers(ctx context.Context, tx bun.Tx, pageOpts *PageOpts) (*List[User], error) { + defaults := &PageOpts{1, 50, bun.OrderAsc, "id"} + return GetList[User](tx).GetPaged(ctx, pageOpts, defaults) +} + +// GetUsersWithRoles queries the database for users with their roles preloaded +func GetUsersWithRoles(ctx context.Context, tx bun.Tx, pageOpts *PageOpts) (*List[User], error) { + defaults := &PageOpts{1, 25, bun.OrderAsc, "id"} + return GetList[User](tx).Relation("Roles").GetPaged(ctx, pageOpts, defaults) } diff --git a/internal/db/userrole.go b/internal/db/userrole.go new file mode 100644 index 0000000..38c1bd1 --- /dev/null +++ b/internal/db/userrole.go @@ -0,0 +1,103 @@ +package db + +import ( + "context" + + "git.haelnorr.com/h/oslstats/internal/permissions" + "git.haelnorr.com/h/oslstats/internal/roles" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type UserRole struct { + UserID int `bun:",pk"` + User *User `bun:"rel:belongs-to,join:user_id=id"` + RoleID int `bun:",pk"` + Role *Role `bun:"rel:belongs-to,join:role_id=id"` +} + +// AssignRole grants a role to a user +func AssignRole(ctx context.Context, tx bun.Tx, userID, roleID int, audit *AuditMeta) error { + if userID <= 0 { + return errors.New("userID must be positive") + } + if roleID <= 0 { + return errors.New("roleID must be positive") + } + + userRole := &UserRole{ + UserID: userID, + RoleID: roleID, + } + details := map[string]any{ + "action": "grant", + "role_id": roleID, + } + info := &AuditInfo{ + string(permissions.UsersManageRoles), + "user", + userID, + details, + } + err := Insert(tx, userRole). + ConflictNothing("user_id", "role_id"). + WithAudit(audit, info). + Exec(ctx) + if err != nil { + return errors.Wrap(err, "db.Insert") + } + + return nil +} + +// RevokeRole removes a role from a user +func RevokeRole(ctx context.Context, tx bun.Tx, userID, roleID int, audit *AuditMeta) error { + if userID <= 0 { + return errors.New("userID must be positive") + } + if roleID <= 0 { + return errors.New("roleID must be positive") + } + + details := map[string]any{ + "action": "revoke", + "role_id": roleID, + } + info := &AuditInfo{ + string(permissions.UsersManageRoles), + "user", + userID, + details, + } + err := DeleteItem[UserRole](tx). + Where("user_id = ?", userID). + Where("role_id = ?", roleID). + WithAudit(audit, info). + Delete(ctx) + if err != nil { + return errors.Wrap(err, "DeleteItem") + } + + return nil +} + +// HasRole checks if a user has a specific role +func HasRole(ctx context.Context, tx bun.Tx, userID int, roleName roles.Role) (bool, error) { + if userID <= 0 { + return false, errors.New("userID must be positive") + } + if roleName == "" { + return false, errors.New("roleName cannot be empty") + } + user, err := GetByID[User](tx, userID). + Relation("Roles").Get(ctx) + if err != nil { + return false, errors.Wrap(err, "GetByID") + } + for _, role := range user.Roles { + if role.Name == roleName { + return true, nil + } + } + return false, nil +} diff --git a/internal/discord/api.go b/internal/discord/api.go index aa490b5..fb8c96e 100644 --- a/internal/discord/api.go +++ b/internal/discord/api.go @@ -10,26 +10,6 @@ import ( "github.com/pkg/errors" ) -type OAuthSession struct { - *discordgo.Session -} - -func NewOAuthSession(token *Token) (*OAuthSession, error) { - session, err := discordgo.New("Bearer " + token.AccessToken) - if err != nil { - return nil, errors.Wrap(err, "discordgo.New") - } - return &OAuthSession{Session: session}, nil -} - -func (s *OAuthSession) GetUser() (*discordgo.User, error) { - user, err := s.User("@me") - if err != nil { - return nil, errors.Wrap(err, "s.User") - } - return user, nil -} - // APIClient is an HTTP client wrapper that handles Discord API rate limits type APIClient struct { cfg *Config @@ -38,6 +18,7 @@ type APIClient struct { mu sync.RWMutex buckets map[string]*RateLimitState trustedHost string + bot *BotSession } // NewAPIClient creates a new Discord API client with rate limit handling @@ -51,11 +32,20 @@ func NewAPIClient(cfg *Config, logger *hlog.Logger, trustedhost string) (*APICli if trustedhost == "" { return nil, errors.New("trustedhost cannot be empty") } + bot, err := newBotSession(cfg) + if err != nil { + return nil, errors.Wrap(err, "newBotSession") + } return &APIClient{ client: &http.Client{Timeout: 30 * time.Second}, logger: logger, buckets: make(map[string]*RateLimitState), cfg: cfg, trustedHost: trustedhost, + bot: bot, }, nil } + +func (api *APIClient) Ping() (*discordgo.Application, error) { + return api.bot.Application("@me") +} diff --git a/internal/discord/bot.go b/internal/discord/bot.go new file mode 100644 index 0000000..cc96280 --- /dev/null +++ b/internal/discord/bot.go @@ -0,0 +1,22 @@ +package discord + +import ( + "github.com/bwmarrin/discordgo" + "github.com/pkg/errors" +) + +type BotSession struct { + *discordgo.Session +} + +func newBotSession(cfg *Config) (*BotSession, error) { + session, err := discordgo.New("Bot " + cfg.BotToken) + if err != nil { + return nil, errors.Wrap(err, "discordgo.New") + } + return &BotSession{Session: session}, nil +} + +func (api *APIClient) Bot() *BotSession { + return api.bot +} diff --git a/internal/discord/config.go b/internal/discord/config.go index cc086ff..11af109 100644 --- a/internal/discord/config.go +++ b/internal/discord/config.go @@ -12,6 +12,7 @@ type Config struct { ClientSecret string // ENV DISCORD_CLIENT_SECRET: Discord application client secret (required) OAuthScopes string // Authorisation scopes for OAuth RedirectPath string // ENV DISCORD_REDIRECT_PATH: Path for the OAuth redirect handler (required) + BotToken string // ENV DISCORD_BOT_TOKEN: Token for the discord bot (required) } func ConfigFromEnv() (any, error) { @@ -20,6 +21,7 @@ func ConfigFromEnv() (any, error) { ClientSecret: env.String("DISCORD_CLIENT_SECRET", ""), OAuthScopes: getOAuthScopes(), RedirectPath: env.String("DISCORD_REDIRECT_PATH", ""), + BotToken: env.String("DISCORD_BOT_TOKEN", ""), } // Check required fields @@ -32,6 +34,9 @@ func ConfigFromEnv() (any, error) { if cfg.RedirectPath == "" { return nil, errors.New("Envar not set: DISCORD_REDIRECT_PATH") } + if cfg.BotToken == "" { + return nil, errors.New("Envar not set: DISCORD_BOT_TOKEN") + } return cfg, nil } diff --git a/internal/discord/oauth.go b/internal/discord/oauth.go index faff919..5bb8d04 100644 --- a/internal/discord/oauth.go +++ b/internal/discord/oauth.go @@ -8,9 +8,30 @@ import ( "net/url" "strings" + "github.com/bwmarrin/discordgo" "github.com/pkg/errors" ) +type OAuthSession struct { + *discordgo.Session +} + +func NewOAuthSession(token *Token) (*OAuthSession, error) { + session, err := discordgo.New("Bearer " + token.AccessToken) + if err != nil { + return nil, errors.Wrap(err, "discordgo.New") + } + return &OAuthSession{Session: session}, nil +} + +func (s *OAuthSession) GetUser() (*discordgo.User, error) { + user, err := s.User("@me") + if err != nil { + return nil, errors.Wrap(err, "s.User") + } + return user, nil +} + // Token represents a response from the Discord OAuth API after a successful authorization request type Token struct { AccessToken string `json:"access_token"` diff --git a/internal/embedfs/embedfs.go b/internal/embedfs/embedfs.go new file mode 100644 index 0000000..3172721 --- /dev/null +++ b/internal/embedfs/embedfs.go @@ -0,0 +1,21 @@ +// Package embedfs creates an embedded filesystem with the static web assets +package embedfs + +import ( + "embed" + "io/fs" + + "github.com/pkg/errors" +) + +//go:embed web/* +var embeddedFiles embed.FS + +// GetEmbeddedFS gets the embedded files +func GetEmbeddedFS() (*fs.FS, error) { + subFS, err := fs.Sub(embeddedFiles, "web") + if err != nil { + return nil, errors.Wrap(err, "fs.Sub") + } + return &subFS, nil +} diff --git a/pkg/embedfs/files/assets/error.png b/internal/embedfs/web/assets/error.png similarity index 100% rename from pkg/embedfs/files/assets/error.png rename to internal/embedfs/web/assets/error.png diff --git a/internal/embedfs/web/assets/favicon.ico b/internal/embedfs/web/assets/favicon.ico new file mode 100644 index 0000000..b6215aa Binary files /dev/null and b/internal/embedfs/web/assets/favicon.ico differ diff --git a/internal/embedfs/web/assets/logo.png b/internal/embedfs/web/assets/logo.png new file mode 100644 index 0000000..457d36b Binary files /dev/null and b/internal/embedfs/web/assets/logo.png differ diff --git a/internal/embedfs/web/css/flatpickr-catppuccin.css b/internal/embedfs/web/css/flatpickr-catppuccin.css new file mode 100644 index 0000000..5746d6c --- /dev/null +++ b/internal/embedfs/web/css/flatpickr-catppuccin.css @@ -0,0 +1,151 @@ +/* Flatpickr Catppuccin Mocha Theme */ +/* Override flatpickr colors to match our custom theme */ + +.flatpickr-calendar { + background: #1e1e2e; /* mantle */ + border: 1px solid #45475a; /* surface1 */ + box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.3); +} + +.flatpickr-months { + background: #181825; /* base */ + border-bottom: 1px solid #45475a; /* surface1 */ +} + +.flatpickr-month { + color: #cdd6f4; /* text */ +} + +.flatpickr-current-month .flatpickr-monthDropdown-months { + background: #1e1e2e; /* mantle */ + color: #cdd6f4; /* text */ + border: 1px solid #45475a; /* surface1 */ +} + +.flatpickr-current-month .flatpickr-monthDropdown-months:hover { + background: #313244; /* surface0 */ +} + +.flatpickr-current-month input.cur-year { + color: #cdd6f4; /* text */ + background: #1e1e2e; /* mantle */ +} + +.flatpickr-current-month input.cur-year:hover { + background: #313244; /* surface0 */ +} + +.flatpickr-prev-month, +.flatpickr-next-month { + color: #cdd6f4; /* text */ +} + +.flatpickr-prev-month:hover, +.flatpickr-next-month:hover { + color: #89b4fa; /* blue */ +} + +.flatpickr-weekdays { + background: #181825; /* base */ + border-bottom: 1px solid #45475a; /* surface1 */ +} + +span.flatpickr-weekday { + color: #bac2de; /* subtext0 */ + font-weight: 600; +} + +.flatpickr-days { + background: #1e1e2e; /* mantle */ +} + +.flatpickr-day { + color: #cdd6f4; /* text */ + border: 1px solid transparent; +} + +.flatpickr-day.today { + border-color: #89b4fa; /* blue */ + background: #89b4fa20; /* blue with transparency */ + color: #89b4fa; /* blue */ +} + +.flatpickr-day.today:hover { + background: #89b4fa40; /* blue with more transparency */ + border-color: #89b4fa; /* blue */ + color: #89b4fa; /* blue */ +} + +.flatpickr-day.selected, +.flatpickr-day.startRange, +.flatpickr-day.endRange { + background: #89b4fa; /* blue */ + border-color: #89b4fa; /* blue */ + color: #181825; /* base */ +} + +.flatpickr-day.selected:hover, +.flatpickr-day.startRange:hover, +.flatpickr-day.endRange:hover { + background: #74a7f9; /* slightly lighter blue */ + border-color: #74a7f9; +} + +.flatpickr-day:hover { + background: #313244; /* surface0 */ + border-color: #45475a; /* surface1 */ +} + +.flatpickr-day.prevMonthDay, +.flatpickr-day.nextMonthDay { + color: #585b70; /* surface2 */ +} + +.flatpickr-day.flatpickr-disabled, +.flatpickr-day.flatpickr-disabled:hover { + color: #585b70; /* surface2 */ + cursor: not-allowed; +} + +.flatpickr-day.inRange { + background: #89b4fa30; /* blue with light transparency */ + border-color: transparent; + box-shadow: -5px 0 0 #89b4fa30, 5px 0 0 #89b4fa30; +} + +.flatpickr-time { + background: #181825; /* base */ + border-top: 1px solid #45475a; /* surface1 */ +} + +.flatpickr-time input { + color: #cdd6f4; /* text */ + background: #1e1e2e; /* mantle */ +} + +.flatpickr-time input:hover, +.flatpickr-time input:focus { + background: #313244; /* surface0 */ +} + +.flatpickr-time .flatpickr-time-separator, +.flatpickr-time .flatpickr-am-pm { + color: #cdd6f4; /* text */ +} + +.flatpickr-time .flatpickr-am-pm:hover, +.flatpickr-time .flatpickr-am-pm:focus { + background: #313244; /* surface0 */ +} + +.flatpickr-time .numInputWrapper span.arrowUp:after { + border-bottom-color: #cdd6f4; /* text */ +} + +.flatpickr-time .numInputWrapper span.arrowDown:after { + border-top-color: #cdd6f4; /* text */ +} + +.flatpickr-time .numInputWrapper span:hover { + background: #313244; /* surface0 */ +} diff --git a/pkg/embedfs/files/css/input.css b/internal/embedfs/web/css/input.css similarity index 65% rename from pkg/embedfs/files/css/input.css rename to internal/embedfs/web/css/input.css index a673dd5..2686382 100644 --- a/pkg/embedfs/files/css/input.css +++ b/internal/embedfs/web/css/input.css @@ -15,11 +15,14 @@ --color-maroon: var(--maroon); --color-peach: var(--peach); --color-yellow: var(--yellow); + --color-dark-yellow: var(--dark-yellow); --color-green: var(--green); + --color-dark-green: var(--dark-green); --color-teal: var(--teal); --color-sky: var(--sky); --color-sapphire: var(--sapphire); --color-blue: var(--blue); + --color-dark-blue: var(--dark-blue); --color-lavender: var(--lavender); --color-text: var(--text); --color-subtext1: var(--subtext1); @@ -45,11 +48,14 @@ --maroon: hsl(355, 76%, 59%); --peach: hsl(22, 99%, 52%); --yellow: hsl(35, 77%, 49%); + --dark-yellow: hsl(35, 50%, 85%); --green: hsl(109, 58%, 40%); + --dark-green: hsl(109, 35%, 85%); --teal: hsl(183, 74%, 35%); --sky: hsl(197, 97%, 46%); --sapphire: hsl(189, 70%, 42%); --blue: hsl(220, 91%, 54%); + --dark-blue: hsl(220, 50%, 85%); --lavender: hsl(231, 97%, 72%); --text: hsl(234, 16%, 35%); --subtext1: hsl(233, 13%, 41%); @@ -75,11 +81,14 @@ --maroon: hsl(350, 65%, 77%); --peach: hsl(23, 92%, 75%); --yellow: hsl(41, 86%, 83%); + --dark-yellow: hsl(41, 30%, 25%); --green: hsl(115, 54%, 76%); + --dark-green: hsl(115, 25%, 22%); --teal: hsl(170, 57%, 73%); --sky: hsl(189, 71%, 73%); --sapphire: hsl(199, 76%, 69%); --blue: hsl(217, 92%, 76%); + --dark-blue: hsl(217, 30%, 25%); --lavender: hsl(232, 97%, 85%); --text: hsl(226, 64%, 88%); --subtext1: hsl(227, 35%, 80%); @@ -118,3 +127,74 @@ font-weight: 700; font-style: italic; } + +/* Custom Scrollbar Styles - Catppuccin Theme */ + +/* Firefox */ +* { + scrollbar-width: thin; + scrollbar-color: var(--surface1) var(--mantle); +} + +/* Webkit browsers (Chrome, Safari, Edge) */ +::-webkit-scrollbar { + width: 8px; + height: 8px; +} + +::-webkit-scrollbar-track { + background: var(--mantle); + border-radius: 4px; +} + +::-webkit-scrollbar-thumb { + background: var(--surface1); + border-radius: 4px; + border: 2px solid var(--mantle); +} + +::-webkit-scrollbar-thumb:hover { + background: var(--surface2); +} + +::-webkit-scrollbar-thumb:active { + background: var(--overlay0); +} + +/* Specific styling for multi-select dropdowns */ +.multi-select-dropdown::-webkit-scrollbar { + width: 6px; +} + +.multi-select-dropdown::-webkit-scrollbar-track { + background: var(--base); + border-radius: 3px; +} + +.multi-select-dropdown::-webkit-scrollbar-thumb { + background: var(--surface2); + border-radius: 3px; + border: 1px solid var(--base); +} + +.multi-select-dropdown::-webkit-scrollbar-thumb:hover { + background: var(--overlay0); +} + +/* Specific styling for modal content */ +.modal-scrollable::-webkit-scrollbar { + width: 8px; +} + +.modal-scrollable::-webkit-scrollbar-track { + background: var(--base); +} + +.modal-scrollable::-webkit-scrollbar-thumb { + background: var(--surface1); + border-radius: 4px; +} + +.modal-scrollable::-webkit-scrollbar-thumb:hover { + background: var(--surface2); +} diff --git a/pkg/embedfs/files/css/output.css b/internal/embedfs/web/css/output.css similarity index 51% rename from pkg/embedfs/files/css/output.css rename to internal/embedfs/web/css/output.css index 1f8ff44..2f3e384 100644 --- a/pkg/embedfs/files/css/output.css +++ b/internal/embedfs/web/css/output.css @@ -9,9 +9,15 @@ --font-mono: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, 'Liberation Mono', 'Courier New', monospace; --spacing: 0.25rem; + --breakpoint-lg: 64rem; --breakpoint-xl: 80rem; + --breakpoint-2xl: 96rem; + --container-sm: 24rem; --container-md: 28rem; + --container-lg: 32rem; --container-2xl: 42rem; + --container-3xl: 48rem; + --container-5xl: 64rem; --container-7xl: 80rem; --text-xs: 0.75rem; --text-xs--line-height: calc(1 / 0.75); @@ -31,14 +37,18 @@ --text-6xl--line-height: 1; --text-9xl: 8rem; --text-9xl--line-height: 1; + --font-weight-normal: 400; --font-weight-medium: 500; --font-weight-semibold: 600; --font-weight-bold: 700; --tracking-tight: -0.025em; + --tracking-wider: 0.05em; --leading-relaxed: 1.625; - --radius-sm: 0.25rem; --radius-lg: 0.5rem; --radius-xl: 0.75rem; + --ease-in: cubic-bezier(0.4, 0, 1, 1); + --ease-out: cubic-bezier(0, 0, 0.2, 1); + --animate-spin: spin 1s linear infinite; --default-transition-duration: 150ms; --default-transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); --default-font-family: var(--font-sans); @@ -194,6 +204,15 @@ } } @layer utilities { + .pointer-events-auto { + pointer-events: auto; + } + .pointer-events-none { + pointer-events: none; + } + .collapse { + visibility: collapse; + } .visible { visibility: visible; } @@ -211,12 +230,18 @@ .absolute { position: absolute; } + .fixed { + position: fixed; + } .relative { position: relative; } .static { position: static; } + .inset-0 { + inset: calc(var(--spacing) * 0); + } .end-0 { inset-inline-end: calc(var(--spacing) * 0); } @@ -226,12 +251,24 @@ .top-0 { top: calc(var(--spacing) * 0); } + .top-1\/2 { + top: calc(1/2 * 100%); + } .top-4 { top: calc(var(--spacing) * 4); } + .top-20 { + top: calc(var(--spacing) * 20); + } .right-0 { right: calc(var(--spacing) * 0); } + .right-3 { + right: calc(var(--spacing) * 3); + } + .right-5 { + right: calc(var(--spacing) * 5); + } .bottom-0 { bottom: calc(var(--spacing) * 0); } @@ -241,15 +278,51 @@ .z-10 { z-index: 10; } + .z-40 { + z-index: 40; + } + .z-50 { + z-index: 50; + } + .container { + width: 100%; + @media (width >= 40rem) { + max-width: 40rem; + } + @media (width >= 48rem) { + max-width: 48rem; + } + @media (width >= 64rem) { + max-width: 64rem; + } + @media (width >= 80rem) { + max-width: 80rem; + } + @media (width >= 96rem) { + max-width: 96rem; + } + } .mx-auto { margin-inline: auto; } + .-mt-2 { + margin-top: calc(var(--spacing) * -2); + } + .mt-0\.5 { + margin-top: calc(var(--spacing) * 0.5); + } + .mt-1 { + margin-top: calc(var(--spacing) * 1); + } .mt-1\.5 { margin-top: calc(var(--spacing) * 1.5); } .mt-2 { margin-top: calc(var(--spacing) * 2); } + .mt-3 { + margin-top: calc(var(--spacing) * 3); + } .mt-4 { margin-top: calc(var(--spacing) * 4); } @@ -271,24 +344,54 @@ .mt-12 { margin-top: calc(var(--spacing) * 12); } - .mt-20 { - margin-top: calc(var(--spacing) * 20); - } .mt-24 { margin-top: calc(var(--spacing) * 24); } - .mr-5 { - margin-right: calc(var(--spacing) * 5); + .mt-25 { + margin-top: calc(var(--spacing) * 25); + } + .mt-auto { + margin-top: auto; + } + .mb-1 { + margin-bottom: calc(var(--spacing) * 1); + } + .mb-2 { + margin-bottom: calc(var(--spacing) * 2); + } + .mb-3 { + margin-bottom: calc(var(--spacing) * 3); + } + .mb-4 { + margin-bottom: calc(var(--spacing) * 4); + } + .mb-6 { + margin-bottom: calc(var(--spacing) * 6); + } + .mb-8 { + margin-bottom: calc(var(--spacing) * 8); } .mb-auto { margin-bottom: auto; } + .ml-1 { + margin-left: calc(var(--spacing) * 1); + } .ml-2 { margin-left: calc(var(--spacing) * 2); } + .ml-4 { + margin-left: calc(var(--spacing) * 4); + } .ml-auto { margin-left: auto; } + .line-clamp-2 { + overflow: hidden; + display: -webkit-box; + -webkit-box-orient: vertical; + -webkit-line-clamp: 2; + } .block { display: block; } @@ -310,14 +413,31 @@ .inline-flex { display: inline-flex; } + .table { + display: table; + } .size-5 { width: calc(var(--spacing) * 5); height: calc(var(--spacing) * 5); } - .size-6 { - width: calc(var(--spacing) * 6); + .h-1 { + height: calc(var(--spacing) * 1); + } + .h-3 { + height: calc(var(--spacing) * 3); + } + .h-4 { + height: calc(var(--spacing) * 4); + } + .h-5 { + height: calc(var(--spacing) * 5); + } + .h-6 { height: calc(var(--spacing) * 6); } + .h-12 { + height: calc(var(--spacing) * 12); + } .h-16 { height: calc(var(--spacing) * 16); } @@ -327,14 +447,59 @@ .h-screen { height: 100vh; } + .max-h-60 { + max-height: calc(var(--spacing) * 60); + } + .max-h-96 { + max-height: calc(var(--spacing) * 96); + } + .max-h-\[90vh\] { + max-height: 90vh; + } + .max-h-\[600px\] { + max-height: 600px; + } + .min-h-48 { + min-height: calc(var(--spacing) * 48); + } + .min-h-96 { + min-height: calc(var(--spacing) * 96); + } + .min-h-\[calc\(100vh-200px\)\] { + min-height: calc(100vh - 200px); + } + .min-h-full { + min-height: 100%; + } + .w-3 { + width: calc(var(--spacing) * 3); + } + .w-4 { + width: calc(var(--spacing) * 4); + } + .w-5 { + width: calc(var(--spacing) * 5); + } + .w-6 { + width: calc(var(--spacing) * 6); + } + .w-12 { + width: calc(var(--spacing) * 12); + } + .w-20 { + width: calc(var(--spacing) * 20); + } + .w-24 { + width: calc(var(--spacing) * 24); + } .w-26 { width: calc(var(--spacing) * 26); } - .w-36 { - width: calc(var(--spacing) * 36); + .w-48 { + width: calc(var(--spacing) * 48); } - .w-82 { - width: calc(var(--spacing) * 82); + .w-80 { + width: calc(var(--spacing) * 80); } .w-fit { width: fit-content; @@ -345,35 +510,105 @@ .max-w-2xl { max-width: var(--container-2xl); } + .max-w-3xl { + max-width: var(--container-3xl); + } + .max-w-5xl { + max-width: var(--container-5xl); + } .max-w-7xl { max-width: var(--container-7xl); } + .max-w-48 { + max-width: calc(var(--spacing) * 48); + } + .max-w-80 { + max-width: calc(var(--spacing) * 80); + } .max-w-100 { max-width: calc(var(--spacing) * 100); } .max-w-md { max-width: var(--container-md); } + .max-w-screen-2xl { + max-width: var(--breakpoint-2xl); + } + .max-w-screen-lg { + max-width: var(--breakpoint-lg); + } .max-w-screen-xl { max-width: var(--breakpoint-xl); } + .max-w-sm { + max-width: var(--container-sm); + } + .min-w-0 { + min-width: calc(var(--spacing) * 0); + } .flex-1 { flex: 1; } - .translate-x-0 { - --tw-translate-x: calc(var(--spacing) * 0); + .flex-shrink-0 { + flex-shrink: 0; + } + .shrink-0 { + flex-shrink: 0; + } + .-translate-y-1\/2 { + --tw-translate-y: calc(calc(1/2 * 100%) * -1); translate: var(--tw-translate-x) var(--tw-translate-y); } - .translate-x-\[100\%\] { - --tw-translate-x: 100%; + .translate-y-0 { + --tw-translate-y: calc(var(--spacing) * 0); translate: var(--tw-translate-x) var(--tw-translate-y); } + .translate-y-4 { + --tw-translate-y: calc(var(--spacing) * 4); + translate: var(--tw-translate-x) var(--tw-translate-y); + } + .scale-95 { + --tw-scale-x: 95%; + --tw-scale-y: 95%; + --tw-scale-z: 95%; + scale: var(--tw-scale-x) var(--tw-scale-y); + } + .scale-100 { + --tw-scale-x: 100%; + --tw-scale-y: 100%; + --tw-scale-z: 100%; + scale: var(--tw-scale-x) var(--tw-scale-y); + } .transform { transform: var(--tw-rotate-x,) var(--tw-rotate-y,) var(--tw-rotate-z,) var(--tw-skew-x,) var(--tw-skew-y,); } + .animate-spin { + animation: var(--animate-spin); + } + .cursor-grab { + cursor: grab; + } + .cursor-not-allowed { + cursor: not-allowed; + } .cursor-pointer { cursor: pointer; } + .resize { + resize: both; + } + .resize-none { + resize: none; + } + .grid-cols-1 { + grid-template-columns: repeat(1, minmax(0, 1fr)); + } + .grid-cols-2 { + grid-template-columns: repeat(2, minmax(0, 1fr)); + } + .grid-cols-7 { + grid-template-columns: repeat(7, minmax(0, 1fr)); + } .flex-col { flex-direction: column; } @@ -386,6 +621,9 @@ .items-center { align-items: center; } + .items-start { + align-items: flex-start; + } .justify-between { justify-content: space-between; } @@ -395,9 +633,18 @@ .justify-end { justify-content: flex-end; } + .gap-1 { + gap: calc(var(--spacing) * 1); + } .gap-2 { gap: calc(var(--spacing) * 2); } + .gap-3 { + gap: calc(var(--spacing) * 3); + } + .gap-4 { + gap: calc(var(--spacing) * 4); + } .gap-6 { gap: calc(var(--spacing) * 6); } @@ -411,12 +658,70 @@ margin-block-end: calc(calc(var(--spacing) * 1) * calc(1 - var(--tw-space-y-reverse))); } } + .space-y-2 { + :where(& > :not(:last-child)) { + --tw-space-y-reverse: 0; + margin-block-start: calc(calc(var(--spacing) * 2) * var(--tw-space-y-reverse)); + margin-block-end: calc(calc(var(--spacing) * 2) * calc(1 - var(--tw-space-y-reverse))); + } + } + .space-y-3 { + :where(& > :not(:last-child)) { + --tw-space-y-reverse: 0; + margin-block-start: calc(calc(var(--spacing) * 3) * var(--tw-space-y-reverse)); + margin-block-end: calc(calc(var(--spacing) * 3) * calc(1 - var(--tw-space-y-reverse))); + } + } + .space-y-4 { + :where(& > :not(:last-child)) { + --tw-space-y-reverse: 0; + margin-block-start: calc(calc(var(--spacing) * 4) * var(--tw-space-y-reverse)); + margin-block-end: calc(calc(var(--spacing) * 4) * calc(1 - var(--tw-space-y-reverse))); + } + } + .space-y-6 { + :where(& > :not(:last-child)) { + --tw-space-y-reverse: 0; + margin-block-start: calc(calc(var(--spacing) * 6) * var(--tw-space-y-reverse)); + margin-block-end: calc(calc(var(--spacing) * 6) * calc(1 - var(--tw-space-y-reverse))); + } + } .gap-x-2 { column-gap: calc(var(--spacing) * 2); } + .gap-x-6 { + column-gap: calc(var(--spacing) * 6); + } + .space-x-2 { + :where(& > :not(:last-child)) { + --tw-space-x-reverse: 0; + margin-inline-start: calc(calc(var(--spacing) * 2) * var(--tw-space-x-reverse)); + margin-inline-end: calc(calc(var(--spacing) * 2) * calc(1 - var(--tw-space-x-reverse))); + } + } + .space-x-3 { + :where(& > :not(:last-child)) { + --tw-space-x-reverse: 0; + margin-inline-start: calc(calc(var(--spacing) * 3) * var(--tw-space-x-reverse)); + margin-inline-end: calc(calc(var(--spacing) * 3) * calc(1 - var(--tw-space-x-reverse))); + } + } + .space-x-4 { + :where(& > :not(:last-child)) { + --tw-space-x-reverse: 0; + margin-inline-start: calc(calc(var(--spacing) * 4) * var(--tw-space-x-reverse)); + margin-inline-end: calc(calc(var(--spacing) * 4) * calc(1 - var(--tw-space-x-reverse))); + } + } + .gap-y-3 { + row-gap: calc(var(--spacing) * 3); + } .gap-y-4 { row-gap: calc(var(--spacing) * 4); } + .gap-y-5 { + row-gap: calc(var(--spacing) * 5); + } .divide-y { :where(& > :not(:last-child)) { --tw-divide-y-reverse: 0; @@ -426,11 +731,21 @@ border-bottom-width: calc(1px * calc(1 - var(--tw-divide-y-reverse))); } } + .divide-surface1 { + :where(& > :not(:last-child)) { + border-color: var(--surface1); + } + } .divide-surface2 { :where(& > :not(:last-child)) { border-color: var(--surface2); } } + .truncate { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + } .overflow-hidden { overflow: hidden; } @@ -440,6 +755,9 @@ .overflow-x-hidden { overflow-x: hidden; } + .overflow-y-auto { + overflow-y: auto; + } .rounded { border-radius: 0.25rem; } @@ -449,12 +767,13 @@ .rounded-lg { border-radius: var(--radius-lg); } - .rounded-sm { - border-radius: var(--radius-sm); - } .rounded-xl { border-radius: var(--radius-xl); } + .rounded-t-lg { + border-top-left-radius: var(--radius-lg); + border-top-right-radius: var(--radius-lg); + } .border { border-style: var(--tw-border-style); border-width: 1px; @@ -463,6 +782,31 @@ border-style: var(--tw-border-style); border-width: 2px; } + .border-t { + border-top-style: var(--tw-border-style); + border-top-width: 1px; + } + .border-b { + border-bottom-style: var(--tw-border-style); + border-bottom-width: 1px; + } + .border-b-2 { + border-bottom-style: var(--tw-border-style); + border-bottom-width: 2px; + } + .border-dashed { + --tw-border-style: dashed; + border-style: dashed; + } + .border-blue { + border-color: var(--blue); + } + .border-blue\/50 { + border-color: var(--blue); + @supports (color: color-mix(in lab, red, red)) { + border-color: color-mix(in oklab, var(--blue) 50%, transparent); + } + } .border-green { border-color: var(--green); } @@ -472,39 +816,144 @@ .border-red { border-color: var(--red); } + .border-red\/30 { + border-color: var(--red); + @supports (color: color-mix(in lab, red, red)) { + border-color: color-mix(in oklab, var(--red) 30%, transparent); + } + } .border-surface1 { border-color: var(--surface1); } + .border-surface2 { + border-color: var(--surface2); + } .border-transparent { border-color: transparent; } + .border-yellow { + border-color: var(--yellow); + } + .border-yellow\/30 { + border-color: var(--yellow); + @supports (color: color-mix(in lab, red, red)) { + border-color: color-mix(in oklab, var(--yellow) 30%, transparent); + } + } + .border-yellow\/40 { + border-color: var(--yellow); + @supports (color: color-mix(in lab, red, red)) { + border-color: color-mix(in oklab, var(--yellow) 40%, transparent); + } + } .bg-base { background-color: var(--base); } + .bg-base\/75 { + background-color: var(--base); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--base) 75%, transparent); + } + } + .bg-blue { + background-color: var(--blue); + } + .bg-blue\/20 { + background-color: var(--blue); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--blue) 20%, transparent); + } + } .bg-crust { background-color: var(--crust); } - .bg-dark-red { - background-color: var(--dark-red); + .bg-crust\/80 { + background-color: var(--crust); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--crust) 80%, transparent); + } + } + .bg-dark-blue { + background-color: var(--dark-blue); + } + .bg-dark-green { + background-color: var(--dark-green); + } + .bg-dark-yellow { + background-color: var(--dark-yellow); } .bg-green { background-color: var(--green); } + .bg-green\/20 { + background-color: var(--green); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--green) 20%, transparent); + } + } .bg-mantle { background-color: var(--mantle); } .bg-mauve { background-color: var(--mauve); } + .bg-peach { + background-color: var(--peach); + } + .bg-red { + background-color: var(--red); + } + .bg-red\/10 { + background-color: var(--red); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--red) 10%, transparent); + } + } + .bg-red\/20 { + background-color: var(--red); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--red) 20%, transparent); + } + } .bg-sapphire { background-color: var(--sapphire); } .bg-surface0 { background-color: var(--surface0); } + .bg-surface1 { + background-color: var(--surface1); + } + .bg-surface1\/30 { + background-color: var(--surface1); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--surface1) 30%, transparent); + } + } + .bg-surface2 { + background-color: var(--surface2); + } .bg-teal { background-color: var(--teal); } + .bg-yellow { + background-color: var(--yellow); + } + .bg-yellow\/10 { + background-color: var(--yellow); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--yellow) 10%, transparent); + } + } + .bg-yellow\/20 { + background-color: var(--yellow); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--yellow) 20%, transparent); + } + } + .p-1 { + padding: calc(var(--spacing) * 1); + } .p-2 { padding: calc(var(--spacing) * 2); } @@ -514,6 +963,12 @@ .p-4 { padding: calc(var(--spacing) * 4); } + .p-6 { + padding: calc(var(--spacing) * 6); + } + .p-8 { + padding: calc(var(--spacing) * 8); + } .px-2 { padding-inline: calc(var(--spacing) * 2); } @@ -526,6 +981,12 @@ .px-5 { padding-inline: calc(var(--spacing) * 5); } + .px-6 { + padding-inline: calc(var(--spacing) * 6); + } + .py-0\.5 { + padding-block: calc(var(--spacing) * 0.5); + } .py-1 { padding-block: calc(var(--spacing) * 1); } @@ -535,15 +996,45 @@ .py-3 { padding-block: calc(var(--spacing) * 3); } + .py-4 { + padding-block: calc(var(--spacing) * 4); + } .py-6 { padding-block: calc(var(--spacing) * 6); } .py-8 { padding-block: calc(var(--spacing) * 8); } + .pt-2 { + padding-top: calc(var(--spacing) * 2); + } + .pt-4 { + padding-top: calc(var(--spacing) * 4); + } + .pt-5 { + padding-top: calc(var(--spacing) * 5); + } + .pt-6 { + padding-top: calc(var(--spacing) * 6); + } + .pr-2 { + padding-right: calc(var(--spacing) * 2); + } + .pr-10 { + padding-right: calc(var(--spacing) * 10); + } + .pb-3 { + padding-bottom: calc(var(--spacing) * 3); + } + .pb-4 { + padding-bottom: calc(var(--spacing) * 4); + } .pb-6 { padding-bottom: calc(var(--spacing) * 6); } + .pl-3 { + padding-left: calc(var(--spacing) * 3); + } .text-center { text-align: center; } @@ -588,6 +1079,10 @@ font-size: var(--text-xs); line-height: var(--tw-leading, var(--text-xs--line-height)); } + .leading-6 { + --tw-leading: calc(var(--spacing) * 6); + line-height: calc(var(--spacing) * 6); + } .leading-relaxed { --tw-leading: var(--leading-relaxed); line-height: var(--leading-relaxed); @@ -600,6 +1095,10 @@ --tw-font-weight: var(--font-weight-medium); font-weight: var(--font-weight-medium); } + .font-normal { + --tw-font-weight: var(--font-weight-normal); + font-weight: var(--font-weight-normal); + } .font-semibold { --tw-font-weight: var(--font-weight-semibold); font-weight: var(--font-weight-semibold); @@ -608,15 +1107,34 @@ --tw-tracking: var(--tracking-tight); letter-spacing: var(--tracking-tight); } + .tracking-wider { + --tw-tracking: var(--tracking-wider); + letter-spacing: var(--tracking-wider); + } + .text-wrap { + text-wrap: wrap; + } + .break-words { + overflow-wrap: break-word; + } .break-all { word-break: break-all; } + .whitespace-nowrap { + white-space: nowrap; + } .whitespace-pre-wrap { white-space: pre-wrap; } + .text-blue { + color: var(--blue); + } .text-crust { color: var(--crust); } + .text-green { + color: var(--green); + } .text-mantle { color: var(--mantle); } @@ -635,9 +1153,33 @@ .text-text { color: var(--text); } + .text-yellow { + color: var(--yellow); + } + .text-yellow\/80 { + color: var(--yellow); + @supports (color: color-mix(in lab, red, red)) { + color: color-mix(in oklab, var(--yellow) 80%, transparent); + } + } + .capitalize { + text-transform: capitalize; + } + .lowercase { + text-transform: lowercase; + } + .uppercase { + text-transform: uppercase; + } + .italic { + font-style: italic; + } .opacity-0 { opacity: 0%; } + .opacity-50 { + opacity: 50%; + } .opacity-100 { opacity: 100%; } @@ -649,15 +1191,57 @@ --tw-shadow: 0 1px 3px 0 var(--tw-shadow-color, rgb(0 0 0 / 0.1)), 0 1px 2px -1px var(--tw-shadow-color, rgb(0 0 0 / 0.1)); box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); } + .shadow-xl { + --tw-shadow: 0 20px 25px -5px var(--tw-shadow-color, rgb(0 0 0 / 0.1)), 0 8px 10px -6px var(--tw-shadow-color, rgb(0 0 0 / 0.1)); + box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); + } + .blur { + --tw-blur: blur(8px); + filter: var(--tw-blur,) var(--tw-brightness,) var(--tw-contrast,) var(--tw-grayscale,) var(--tw-hue-rotate,) var(--tw-invert,) var(--tw-saturate,) var(--tw-sepia,) var(--tw-drop-shadow,); + } + .filter { + filter: var(--tw-blur,) var(--tw-brightness,) var(--tw-contrast,) var(--tw-grayscale,) var(--tw-hue-rotate,) var(--tw-invert,) var(--tw-saturate,) var(--tw-sepia,) var(--tw-drop-shadow,); + } .transition { transition-property: color, background-color, border-color, outline-color, text-decoration-color, fill, stroke, --tw-gradient-from, --tw-gradient-via, --tw-gradient-to, opacity, box-shadow, transform, translate, scale, rotate, filter, -webkit-backdrop-filter, backdrop-filter, display, content-visibility, overlay, pointer-events; transition-timing-function: var(--tw-ease, var(--default-transition-timing-function)); transition-duration: var(--tw-duration, var(--default-transition-duration)); } + .transition-all { + transition-property: all; + transition-timing-function: var(--tw-ease, var(--default-transition-timing-function)); + transition-duration: var(--tw-duration, var(--default-transition-duration)); + } + .transition-colors { + transition-property: color, background-color, border-color, outline-color, text-decoration-color, fill, stroke, --tw-gradient-from, --tw-gradient-via, --tw-gradient-to; + transition-timing-function: var(--tw-ease, var(--default-transition-timing-function)); + transition-duration: var(--tw-duration, var(--default-transition-duration)); + } + .transition-opacity { + transition-property: opacity; + transition-timing-function: var(--tw-ease, var(--default-transition-timing-function)); + transition-duration: var(--tw-duration, var(--default-transition-duration)); + } + .duration-150 { + --tw-duration: 150ms; + transition-duration: 150ms; + } .duration-200 { --tw-duration: 200ms; transition-duration: 200ms; } + .duration-300 { + --tw-duration: 300ms; + transition-duration: 300ms; + } + .ease-in { + --tw-ease: var(--ease-in); + transition-timing-function: var(--ease-in); + } + .ease-out { + --tw-ease: var(--ease-out); + transition-timing-function: var(--ease-out); + } .outline-none { --tw-outline-style: none; outline-style: none; @@ -673,6 +1257,40 @@ } } } + .hover\:border-blue { + &:hover { + @media (hover: hover) { + border-color: var(--blue); + } + } + } + .hover\:border-surface2 { + &:hover { + @media (hover: hover) { + border-color: var(--surface2); + } + } + } + .hover\:bg-blue\/75 { + &:hover { + @media (hover: hover) { + background-color: var(--blue); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--blue) 75%, transparent); + } + } + } + } + .hover\:bg-blue\/80 { + &:hover { + @media (hover: hover) { + background-color: var(--blue); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--blue) 80%, transparent); + } + } + } + } .hover\:bg-crust { &:hover { @media (hover: hover) { @@ -690,6 +1308,13 @@ } } } + .hover\:bg-maroon { + &:hover { + @media (hover: hover) { + background-color: var(--maroon); + } + } + } .hover\:bg-mauve\/75 { &:hover { @media (hover: hover) { @@ -710,6 +1335,26 @@ } } } + .hover\:bg-red\/75 { + &:hover { + @media (hover: hover) { + background-color: var(--red); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--red) 75%, transparent); + } + } + } + } + .hover\:bg-red\/80 { + &:hover { + @media (hover: hover) { + background-color: var(--red); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--red) 80%, transparent); + } + } + } + } .hover\:bg-sapphire\/75 { &:hover { @media (hover: hover) { @@ -720,6 +1365,37 @@ } } } + .hover\:bg-sky { + &:hover { + @media (hover: hover) { + background-color: var(--sky); + } + } + } + .hover\:bg-surface0 { + &:hover { + @media (hover: hover) { + background-color: var(--surface0); + } + } + } + .hover\:bg-surface1 { + &:hover { + @media (hover: hover) { + background-color: var(--surface1); + } + } + } + .hover\:bg-surface1\/50 { + &:hover { + @media (hover: hover) { + background-color: var(--surface1); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--surface1) 50%, transparent); + } + } + } + } .hover\:bg-surface2 { &:hover { @media (hover: hover) { @@ -727,6 +1403,13 @@ } } } + .hover\:bg-teal { + &:hover { + @media (hover: hover) { + background-color: var(--teal); + } + } + } .hover\:bg-teal\/75 { &:hover { @media (hover: hover) { @@ -737,6 +1420,23 @@ } } } + .hover\:text-blue { + &:hover { + @media (hover: hover) { + color: var(--blue); + } + } + } + .hover\:text-blue\/80 { + &:hover { + @media (hover: hover) { + color: var(--blue); + @supports (color: color-mix(in lab, red, red)) { + color: color-mix(in oklab, var(--blue) 80%, transparent); + } + } + } + } .hover\:text-green { &:hover { @media (hover: hover) { @@ -754,6 +1454,33 @@ } } } + .hover\:text-red { + &:hover { + @media (hover: hover) { + color: var(--red); + } + } + } + .hover\:text-red\/75 { + &:hover { + @media (hover: hover) { + color: var(--red); + @supports (color: color-mix(in lab, red, red)) { + color: color-mix(in oklab, var(--red) 75%, transparent); + } + } + } + } + .hover\:text-red\/80 { + &:hover { + @media (hover: hover) { + color: var(--red); + @supports (color: color-mix(in lab, red, red)) { + color: color-mix(in oklab, var(--red) 80%, transparent); + } + } + } + } .hover\:text-subtext1 { &:hover { @media (hover: hover) { @@ -783,6 +1510,33 @@ border-color: var(--red); } } + .focus\:ring-2 { + &:focus { + --tw-ring-shadow: var(--tw-ring-inset,) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color, currentcolor); + box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); + } + } + .focus\:ring-blue { + &:focus { + --tw-ring-color: var(--blue); + } + } + .focus\:ring-mauve { + &:focus { + --tw-ring-color: var(--mauve); + } + } + .focus\:outline-none { + &:focus { + --tw-outline-style: none; + outline-style: none; + } + } + .active\:cursor-grabbing { + &:active { + cursor: grabbing; + } + } .disabled\:pointer-events-none { &:disabled { pointer-events: none; @@ -793,6 +1547,27 @@ cursor: default; } } + .disabled\:cursor-not-allowed { + &:disabled { + cursor: not-allowed; + } + } + .disabled\:bg-blue\/40 { + &:disabled { + background-color: var(--blue); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--blue) 40%, transparent); + } + } + } + .disabled\:bg-green\/40 { + &:disabled { + background-color: var(--green); + @supports (color: color-mix(in lab, red, red)) { + background-color: color-mix(in oklab, var(--green) 40%, transparent); + } + } + } .disabled\:bg-green\/60 { &:disabled { background-color: var(--green); @@ -811,6 +1586,21 @@ inset-inline-end: calc(var(--spacing) * 6); } } + .sm\:mx-0 { + @media (width >= 40rem) { + margin-inline: calc(var(--spacing) * 0); + } + } + .sm\:mt-0 { + @media (width >= 40rem) { + margin-top: calc(var(--spacing) * 0); + } + } + .sm\:ml-4 { + @media (width >= 40rem) { + margin-left: calc(var(--spacing) * 4); + } + } .sm\:block { @media (width >= 40rem) { display: block; @@ -826,6 +1616,78 @@ display: none; } } + .sm\:inline { + @media (width >= 40rem) { + display: inline; + } + } + .sm\:h-10 { + @media (width >= 40rem) { + height: calc(var(--spacing) * 10); + } + } + .sm\:w-10 { + @media (width >= 40rem) { + width: calc(var(--spacing) * 10); + } + } + .sm\:w-auto { + @media (width >= 40rem) { + width: auto; + } + } + .sm\:w-full { + @media (width >= 40rem) { + width: 100%; + } + } + .sm\:max-w-lg { + @media (width >= 40rem) { + max-width: var(--container-lg); + } + } + .sm\:translate-y-0 { + @media (width >= 40rem) { + --tw-translate-y: calc(var(--spacing) * 0); + translate: var(--tw-translate-x) var(--tw-translate-y); + } + } + .sm\:scale-95 { + @media (width >= 40rem) { + --tw-scale-x: 95%; + --tw-scale-y: 95%; + --tw-scale-z: 95%; + scale: var(--tw-scale-x) var(--tw-scale-y); + } + } + .sm\:scale-100 { + @media (width >= 40rem) { + --tw-scale-x: 100%; + --tw-scale-y: 100%; + --tw-scale-z: 100%; + scale: var(--tw-scale-x) var(--tw-scale-y); + } + } + .sm\:flex-row { + @media (width >= 40rem) { + flex-direction: row; + } + } + .sm\:flex-row-reverse { + @media (width >= 40rem) { + flex-direction: row-reverse; + } + } + .sm\:items-center { + @media (width >= 40rem) { + align-items: center; + } + } + .sm\:items-start { + @media (width >= 40rem) { + align-items: flex-start; + } + } .sm\:justify-between { @media (width >= 40rem) { justify-content: space-between; @@ -836,22 +1698,52 @@ gap: calc(var(--spacing) * 2); } } + .sm\:p-6 { + @media (width >= 40rem) { + padding: calc(var(--spacing) * 6); + } + } .sm\:p-7 { @media (width >= 40rem) { padding: calc(var(--spacing) * 7); } } + .sm\:p-8 { + @media (width >= 40rem) { + padding: calc(var(--spacing) * 8); + } + } .sm\:px-6 { @media (width >= 40rem) { padding-inline: calc(var(--spacing) * 6); } } + .sm\:pb-4 { + @media (width >= 40rem) { + padding-bottom: calc(var(--spacing) * 4); + } + } + .sm\:text-left { + @media (width >= 40rem) { + text-align: left; + } + } .sm\:text-4xl { @media (width >= 40rem) { font-size: var(--text-4xl); line-height: var(--tw-leading, var(--text-4xl--line-height)); } } + .md\:col-span-2 { + @media (width >= 48rem) { + grid-column: span 2 / span 2; + } + } + .md\:grid-cols-2 { + @media (width >= 48rem) { + grid-template-columns: repeat(2, minmax(0, 1fr)); + } + } .md\:gap-8 { @media (width >= 48rem) { gap: calc(var(--spacing) * 8); @@ -872,6 +1764,16 @@ inset-inline-end: calc(var(--spacing) * 8); } } + .lg\:col-span-2 { + @media (width >= 64rem) { + grid-column: span 2 / span 2; + } + } + .lg\:col-span-3 { + @media (width >= 64rem) { + grid-column: span 3 / span 3; + } + } .lg\:mt-0 { @media (width >= 64rem) { margin-top: calc(var(--spacing) * 0); @@ -887,6 +1789,11 @@ display: inline; } } + .lg\:grid-cols-3 { + @media (width >= 64rem) { + grid-template-columns: repeat(3, minmax(0, 1fr)); + } + } .lg\:items-end { @media (width >= 64rem) { align-items: flex-end; @@ -937,11 +1844,14 @@ --maroon: hsl(355, 76%, 59%); --peach: hsl(22, 99%, 52%); --yellow: hsl(35, 77%, 49%); + --dark-yellow: hsl(35, 50%, 85%); --green: hsl(109, 58%, 40%); + --dark-green: hsl(109, 35%, 85%); --teal: hsl(183, 74%, 35%); --sky: hsl(197, 97%, 46%); --sapphire: hsl(189, 70%, 42%); --blue: hsl(220, 91%, 54%); + --dark-blue: hsl(220, 50%, 85%); --lavender: hsl(231, 97%, 72%); --text: hsl(234, 16%, 35%); --subtext1: hsl(233, 13%, 41%); @@ -966,11 +1876,14 @@ --maroon: hsl(350, 65%, 77%); --peach: hsl(23, 92%, 75%); --yellow: hsl(41, 86%, 83%); + --dark-yellow: hsl(41, 30%, 25%); --green: hsl(115, 54%, 76%); + --dark-green: hsl(115, 25%, 22%); --teal: hsl(170, 57%, 73%); --sky: hsl(189, 71%, 73%); --sapphire: hsl(199, 76%, 69%); --blue: hsl(217, 92%, 76%); + --dark-blue: hsl(217, 30%, 25%); --lavender: hsl(232, 97%, 85%); --text: hsl(226, 64%, 88%); --subtext1: hsl(227, 35%, 80%); @@ -1005,6 +1918,57 @@ font-weight: 700; font-style: italic; } +* { + scrollbar-width: thin; + scrollbar-color: var(--surface1) var(--mantle); +} +::-webkit-scrollbar { + width: 8px; + height: 8px; +} +::-webkit-scrollbar-track { + background: var(--mantle); + border-radius: 4px; +} +::-webkit-scrollbar-thumb { + background: var(--surface1); + border-radius: 4px; + border: 2px solid var(--mantle); +} +::-webkit-scrollbar-thumb:hover { + background: var(--surface2); +} +::-webkit-scrollbar-thumb:active { + background: var(--overlay0); +} +.multi-select-dropdown::-webkit-scrollbar { + width: 6px; +} +.multi-select-dropdown::-webkit-scrollbar-track { + background: var(--base); + border-radius: 3px; +} +.multi-select-dropdown::-webkit-scrollbar-thumb { + background: var(--surface2); + border-radius: 3px; + border: 1px solid var(--base); +} +.multi-select-dropdown::-webkit-scrollbar-thumb:hover { + background: var(--overlay0); +} +.modal-scrollable::-webkit-scrollbar { + width: 8px; +} +.modal-scrollable::-webkit-scrollbar-track { + background: var(--base); +} +.modal-scrollable::-webkit-scrollbar-thumb { + background: var(--surface1); + border-radius: 4px; +} +.modal-scrollable::-webkit-scrollbar-thumb:hover { + background: var(--surface2); +} @property --tw-translate-x { syntax: "*"; inherits: false; @@ -1020,6 +1984,21 @@ inherits: false; initial-value: 0; } +@property --tw-scale-x { + syntax: "*"; + inherits: false; + initial-value: 1; +} +@property --tw-scale-y { + syntax: "*"; + inherits: false; + initial-value: 1; +} +@property --tw-scale-z { + syntax: "*"; + inherits: false; + initial-value: 1; +} @property --tw-rotate-x { syntax: "*"; inherits: false; @@ -1045,6 +2024,11 @@ inherits: false; initial-value: 0; } +@property --tw-space-x-reverse { + syntax: "*"; + inherits: false; + initial-value: 0; +} @property --tw-divide-y-reverse { syntax: "*"; inherits: false; @@ -1132,22 +2116,88 @@ inherits: false; initial-value: 0 0 #0000; } +@property --tw-blur { + syntax: "*"; + inherits: false; +} +@property --tw-brightness { + syntax: "*"; + inherits: false; +} +@property --tw-contrast { + syntax: "*"; + inherits: false; +} +@property --tw-grayscale { + syntax: "*"; + inherits: false; +} +@property --tw-hue-rotate { + syntax: "*"; + inherits: false; +} +@property --tw-invert { + syntax: "*"; + inherits: false; +} +@property --tw-opacity { + syntax: "*"; + inherits: false; +} +@property --tw-saturate { + syntax: "*"; + inherits: false; +} +@property --tw-sepia { + syntax: "*"; + inherits: false; +} +@property --tw-drop-shadow { + syntax: "*"; + inherits: false; +} +@property --tw-drop-shadow-color { + syntax: "*"; + inherits: false; +} +@property --tw-drop-shadow-alpha { + syntax: ""; + inherits: false; + initial-value: 100%; +} +@property --tw-drop-shadow-size { + syntax: "*"; + inherits: false; +} @property --tw-duration { syntax: "*"; inherits: false; } +@property --tw-ease { + syntax: "*"; + inherits: false; +} +@keyframes spin { + to { + transform: rotate(360deg); + } +} @layer properties { @supports ((-webkit-hyphens: none) and (not (margin-trim: inline))) or ((-moz-orient: inline) and (not (color:rgb(from red r g b)))) { *, ::before, ::after, ::backdrop { --tw-translate-x: 0; --tw-translate-y: 0; --tw-translate-z: 0; + --tw-scale-x: 1; + --tw-scale-y: 1; + --tw-scale-z: 1; --tw-rotate-x: initial; --tw-rotate-y: initial; --tw-rotate-z: initial; --tw-skew-x: initial; --tw-skew-y: initial; --tw-space-y-reverse: 0; + --tw-space-x-reverse: 0; --tw-divide-y-reverse: 0; --tw-border-style: solid; --tw-leading: initial; @@ -1167,7 +2217,21 @@ --tw-ring-offset-width: 0px; --tw-ring-offset-color: #fff; --tw-ring-offset-shadow: 0 0 #0000; + --tw-blur: initial; + --tw-brightness: initial; + --tw-contrast: initial; + --tw-grayscale: initial; + --tw-hue-rotate: initial; + --tw-invert: initial; + --tw-opacity: initial; + --tw-saturate: initial; + --tw-sepia: initial; + --tw-drop-shadow: initial; + --tw-drop-shadow-color: initial; + --tw-drop-shadow-alpha: 100%; + --tw-drop-shadow-size: initial; --tw-duration: initial; + --tw-ease: initial; } } } diff --git a/internal/embedfs/web/js/admin.js b/internal/embedfs/web/js/admin.js new file mode 100644 index 0000000..7b66770 --- /dev/null +++ b/internal/embedfs/web/js/admin.js @@ -0,0 +1,263 @@ +// Admin dashboard utilities + +// Format JSON for display in modals +function formatJSON(json) { + try { + const parsed = typeof json === "string" ? JSON.parse(json) : json; + return JSON.stringify(parsed, null, 2); + } catch (e) { + return json; + } +} + +// Initialize flatpickr for all date inputs +function initFlatpickr() { + document.querySelectorAll(".flatpickr-date").forEach(function (input) { + if (!input._flatpickr) { + flatpickr(input, { + dateFormat: "d/m/Y", + allowInput: true, + }); + } + }); +} + +// Submit the audit filter form with specific page/perPage/order params +function submitAuditFilter(page, perPage, order, orderBy) { + const form = document.getElementById("audit-filters-form"); + if (!form) return; + + // Create hidden inputs for pagination/sorting if they don't exist + let pageInput = form.querySelector('input[name="page"]'); + if (!pageInput) { + pageInput = document.createElement("input"); + pageInput.type = "hidden"; + pageInput.name = "page"; + form.appendChild(pageInput); + } + pageInput.value = page; + + let perPageInput = form.querySelector('input[name="per_page"]'); + if (!perPageInput) { + perPageInput = document.createElement("input"); + perPageInput.type = "hidden"; + perPageInput.name = "per_page"; + form.appendChild(perPageInput); + } + perPageInput.value = perPage; + + let orderInput = form.querySelector('input[name="order"]'); + if (!orderInput) { + orderInput = document.createElement("input"); + orderInput.type = "hidden"; + orderInput.name = "order"; + form.appendChild(orderInput); + } + orderInput.value = order; + + let orderByInput = form.querySelector('input[name="order_by"]'); + if (!orderByInput) { + orderByInput = document.createElement("input"); + orderByInput.type = "hidden"; + orderByInput.name = "order_by"; + form.appendChild(orderByInput); + } + orderByInput.value = orderBy; + + htmx.trigger(form, "submit"); +} + +// Sort by column - toggle direction if same column +function sortAuditColumn(field, currentOrder, currentOrderBy) { + const page = 1; // Reset to first page when sorting + const perPageSelect = document.getElementById("per-page-select"); + const perPage = perPageSelect ? parseInt(perPageSelect.value) || 25 : 25; + let newOrder, newOrderBy; + + if (currentOrderBy === field) { + // Toggle order + newOrder = currentOrder === "ASC" ? "DESC" : "ASC"; + newOrderBy = field; + } else { + // New column, default to DESC + newOrder = "DESC"; + newOrderBy = field; + } + + submitAuditFilter(page, perPage, newOrder, newOrderBy); +} + +// Clear all audit filters +function clearAuditFilters() { + const form = document.getElementById("audit-filters-form"); + if (!form) return; + + form.reset(); + + // Clear flatpickr instances + document.querySelectorAll(".flatpickr-date").forEach(function (input) { + var fp = input._flatpickr; + if (fp) fp.clear(); + }); + + // Clear multi-select dropdowns + document.querySelectorAll(".multi-select-container").forEach(function (container) { + var hiddenInput = container.querySelector('input[type="hidden"]'); + if (hiddenInput) hiddenInput.value = ""; + var selectedDisplay = container.querySelector(".multi-select-selected"); + if (selectedDisplay) + selectedDisplay.innerHTML = 'Select...'; + container.querySelectorAll(".multi-select-option").forEach(function (opt) { + opt.classList.remove("bg-blue", "text-mantle"); + opt.classList.add("hover:bg-surface1"); + }); + }); + + // Trigger form submission with reset pagination + submitAuditFilter(1, 25, "DESC", "created_at"); +} + +// Toggle multi-select dropdown visibility +function toggleMultiSelect(containerId) { + var dropdown = document.getElementById(containerId + "-dropdown"); + if (dropdown) { + dropdown.classList.toggle("hidden"); + } +} + +// Toggle multi-select option selection +function toggleMultiSelectOption(containerId, value, label) { + var container = document.getElementById(containerId); + var hiddenInput = container.querySelector('input[type="hidden"]'); + var selectedDisplay = container.querySelector(".multi-select-selected"); + + var values = hiddenInput.value ? hiddenInput.value.split(",") : []; + var index = values.indexOf(value); + + if (index > -1) { + values.splice(index, 1); + } else { + values.push(value); + } + + hiddenInput.value = values.join(","); + + var option = container.querySelector('[data-value="' + value + '"]'); + if (option) { + if (index > -1) { + option.classList.remove("bg-blue", "text-mantle"); + option.classList.add("hover:bg-surface1"); + } else { + option.classList.add("bg-blue", "text-mantle"); + option.classList.remove("hover:bg-surface1"); + } + } + + if (values.length === 0) { + selectedDisplay.innerHTML = 'Select...'; + } else if (values.length === 1) { + selectedDisplay.innerHTML = "" + label + ""; + } else { + selectedDisplay.innerHTML = "" + values.length + " selected"; + } + + // Trigger form submission + document.getElementById("audit-filters-form").requestSubmit(); +} + +// Submit the users page with specific page/perPage/order params +function submitUsersPage(page, perPage, order, orderBy) { + const formData = new FormData(); + formData.append("page", page); + formData.append("per_page", perPage); + formData.append("order", order); + formData.append("order_by", orderBy); + + htmx.ajax("POST", "/admin/users", { + target: "#users-list-container", + swap: "outerHTML", + values: Object.fromEntries(formData), + }); +} + +// Sort users column - toggle direction if same column +function sortUsersColumn(field, currentOrder, currentOrderBy) { + const page = 1; // Reset to first page when sorting + const perPageSelect = document.getElementById("users-per-page-select"); + const perPage = perPageSelect ? parseInt(perPageSelect.value) || 25 : 25; + let newOrder, newOrderBy; + + if (currentOrderBy === field) { + // Toggle order + newOrder = currentOrder === "ASC" ? "DESC" : "ASC"; + newOrderBy = field; + } else { + // New column, default to ASC + newOrder = "ASC"; + newOrderBy = field; + } + + submitUsersPage(page, perPage, newOrder, newOrderBy); +} + +// Submit the roles page with specific page/perPage/order params +function submitRolesPage(page, perPage, order, orderBy) { + const formData = new FormData(); + formData.append("page", page); + formData.append("per_page", perPage); + formData.append("order", order); + formData.append("order_by", orderBy); + + htmx.ajax("POST", "/admin/roles", { + target: "#roles-list-container", + swap: "outerHTML", + values: Object.fromEntries(formData), + }); +} + +// Sort roles column - toggle direction if same column +function sortRolesColumn(field, currentOrder, currentOrderBy) { + const page = 1; // Reset to first page when sorting + const perPageSelect = document.getElementById("roles-per-page-select"); + const perPage = perPageSelect ? parseInt(perPageSelect.value) || 25 : 25; + let newOrder, newOrderBy; + + if (currentOrderBy === field) { + // Toggle order + newOrder = currentOrder === "ASC" ? "DESC" : "ASC"; + newOrderBy = field; + } else { + // New column, default to ASC + newOrder = "ASC"; + newOrderBy = field; + } + + submitRolesPage(page, perPage, newOrder, newOrderBy); +} + +// Handle HTMX navigation and initialization +// Tab navigation active state is handled by tabs.js (generic). +// This file only handles admin-specific concerns (flatpickr, multi-select). +document.addEventListener("DOMContentLoaded", function () { + // Initialize flatpickr on page load + initFlatpickr(); + + document.body.addEventListener("htmx:afterSwap", function (event) { + // Re-initialize flatpickr after admin content swap + if ( + event.detail.target.id === "admin-content" || + event.detail.target.id === "audit-results-container" + ) { + initFlatpickr(); + } + }); + + // Close multi-select dropdowns when clicking outside + document.addEventListener("click", function (evt) { + if (!evt.target.closest(".multi-select-container")) { + document.querySelectorAll(".multi-select-dropdown").forEach(function (d) { + d.classList.add("hidden"); + }); + } + }); +}); diff --git a/internal/embedfs/web/js/copytoclipboard.js b/internal/embedfs/web/js/copytoclipboard.js new file mode 100644 index 0000000..1800876 --- /dev/null +++ b/internal/embedfs/web/js/copytoclipboard.js @@ -0,0 +1,17 @@ +function copyToClipboard(elementId, buttonId) { + const element = document.getElementById(elementId); + const button = document.getElementById(buttonId); + + navigator.clipboard.writeText(element.innerText) + .then(() => { + const originalText = button.innerText; + button.innerText = 'Copied!'; + setTimeout(() => { + button.innerText = originalText; + }, 2000); + }) + .catch(err => { + console.error('Failed to copy:', err); + button.innerText = 'Failed'; + }); +} diff --git a/internal/embedfs/web/js/pagination.js b/internal/embedfs/web/js/pagination.js new file mode 100644 index 0000000..3174797 --- /dev/null +++ b/internal/embedfs/web/js/pagination.js @@ -0,0 +1,58 @@ +function paginateData( + formID, + rootPath, + initPage, + initPerPage, + initOrder, + initOrderBy, +) { + return { + page: initPage, + perPage: initPerPage, + order: initOrder || "ASC", + orderBy: initOrderBy || "name", + + goToPage(n) { + this.page = n; + this.submit(); + }, + + handleSortChange(value) { + const [field, direction] = value.split("|"); + this.orderBy = field; + this.order = direction; + this.page = 1; // Reset to first page when sorting + this.submit(); + }, + + sortByColumn(field) { + if (this.orderBy === field) { + // Toggle order if same column + this.order = this.order === "ASC" ? "DESC" : "ASC"; + } else { + // New column, default to DESC + this.orderBy = field; + this.order = "DESC"; + } + this.page = 1; // Reset to first page when sorting + this.submit(); + }, + + setPerPage(n) { + this.perPage = n; + this.page = 1; // Reset to first page when changing per page + this.submit(); + }, + + submit() { + var url = `${rootPath}?page=${this.page}&per_page=${this.perPage}&order=${this.order}&order_by=${this.orderBy}`; + htmx.find("#pagination-page").value = this.page; + htmx.find("#pagination-per-page").value = this.perPage; + htmx.find("#sort-order").value = this.order; + htmx.find("#sort-order-by").value = this.orderBy; + htmx.find(`#${formID}`).setAttribute("hx-post", url); + htmx.process(`#${formID}`); + htmx.trigger(`#${formID}`, "submit"); + }, + }; +} diff --git a/internal/embedfs/web/js/tabs.js b/internal/embedfs/web/js/tabs.js new file mode 100644 index 0000000..57309f7 --- /dev/null +++ b/internal/embedfs/web/js/tabs.js @@ -0,0 +1,50 @@ +// Generic tab navigation handler +// Manages active tab styling after HTMX content swaps. +// +// Usage: Add data-tab-nav="" to your