mirror of
https://github.com/get-drexa/drive.git
synced 2025-11-30 21:41:39 +00:00
feat: initial backend scaffolding
migrating away from convex Co-authored-by: Ona <no-reply@ona.com>
This commit is contained in:
19
apps/backend/internal/auth/err.go
Normal file
19
apps/backend/internal/auth/err.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package auth
|
||||
|
||||
import "fmt"
|
||||
|
||||
type InvalidAccessTokenError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func newInvalidAccessTokenError(err error) *InvalidAccessTokenError {
|
||||
return &InvalidAccessTokenError{err}
|
||||
}
|
||||
|
||||
func (e *InvalidAccessTokenError) Error() string {
|
||||
return fmt.Sprintf("invalid access token: %v", e.err)
|
||||
}
|
||||
|
||||
func (e *InvalidAccessTokenError) Unwrap() error {
|
||||
return e.err
|
||||
}
|
||||
90
apps/backend/internal/auth/http.go
Normal file
90
apps/backend/internal/auth/http.go
Normal file
@@ -0,0 +1,90 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
const authServiceKey = "authService"
|
||||
|
||||
type loginRequest struct {
|
||||
Email string `json:"email"`
|
||||
Password string `json:"password"`
|
||||
}
|
||||
|
||||
type registerRequest struct {
|
||||
Email string `json:"email"`
|
||||
Password string `json:"password"`
|
||||
DisplayName string `json:"displayName"`
|
||||
}
|
||||
|
||||
type loginResponse struct {
|
||||
User User `json:"user"`
|
||||
AccessToken string `json:"accessToken"`
|
||||
RefreshToken string `json:"refreshToken"`
|
||||
}
|
||||
|
||||
func RegisterAPIRoutes(api fiber.Router, s *Service) {
|
||||
auth := api.Group("/auth", func(c *fiber.Ctx) error {
|
||||
c.Locals(authServiceKey, s)
|
||||
return c.Next()
|
||||
})
|
||||
|
||||
auth.Post("/login", login)
|
||||
auth.Post("/register", register)
|
||||
}
|
||||
|
||||
func mustAuthService(c *fiber.Ctx) *Service {
|
||||
return c.Locals(authServiceKey).(*Service)
|
||||
}
|
||||
|
||||
func login(c *fiber.Ctx) error {
|
||||
s := mustAuthService(c)
|
||||
|
||||
req := new(loginRequest)
|
||||
if err := c.BodyParser(req); err != nil {
|
||||
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request"})
|
||||
}
|
||||
|
||||
result, err := s.LoginWithEmailAndPassword(c.Context(), req.Email, req.Password)
|
||||
if err != nil {
|
||||
if errors.Is(err, ErrInvalidCredentials) {
|
||||
return c.Status(fiber.StatusUnauthorized).JSON(fiber.Map{"error": "Invalid credentials"})
|
||||
}
|
||||
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Internal server error"})
|
||||
}
|
||||
|
||||
return c.JSON(loginResponse{
|
||||
User: result.User,
|
||||
AccessToken: result.AccessToken,
|
||||
RefreshToken: result.RefreshToken,
|
||||
})
|
||||
}
|
||||
|
||||
func register(c *fiber.Ctx) error {
|
||||
s := mustAuthService(c)
|
||||
|
||||
req := new(registerRequest)
|
||||
if err := c.BodyParser(req); err != nil {
|
||||
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request"})
|
||||
}
|
||||
|
||||
result, err := s.Register(c.Context(), registerOptions{
|
||||
email: req.Email,
|
||||
password: req.Password,
|
||||
displayName: req.DisplayName,
|
||||
})
|
||||
if err != nil {
|
||||
if errors.Is(err, ErrUserExists) {
|
||||
return c.Status(fiber.StatusConflict).JSON(fiber.Map{"error": "User already exists"})
|
||||
}
|
||||
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Internal server error"})
|
||||
}
|
||||
|
||||
return c.JSON(loginResponse{
|
||||
User: result.User,
|
||||
AccessToken: result.AccessToken,
|
||||
RefreshToken: result.RefreshToken,
|
||||
})
|
||||
}
|
||||
132
apps/backend/internal/auth/password.go
Normal file
132
apps/backend/internal/auth/password.go
Normal file
@@ -0,0 +1,132 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/subtle"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/crypto/argon2"
|
||||
)
|
||||
|
||||
// argon2id parameters
|
||||
const (
|
||||
memory = 64 * 1024
|
||||
iterations = 3
|
||||
parallelism = 2
|
||||
saltLength = 16
|
||||
keyLength = 32
|
||||
)
|
||||
|
||||
var (
|
||||
ErrInvalidHash = errors.New("invalid hash format")
|
||||
ErrIncompatibleHash = errors.New("incompatible hash algorithm")
|
||||
ErrIncompatibleVersion = errors.New("incompatible argon2 version")
|
||||
)
|
||||
|
||||
type argon2Hash struct {
|
||||
memory uint32
|
||||
iterations uint32
|
||||
parallelism uint8
|
||||
salt []byte
|
||||
hash []byte
|
||||
}
|
||||
|
||||
func HashPassword(password string) (string, error) {
|
||||
salt := make([]byte, saltLength)
|
||||
if _, err := rand.Read(salt); err != nil {
|
||||
return "", fmt.Errorf("failed to generate salt: %w", err)
|
||||
}
|
||||
|
||||
hash := argon2.IDKey(
|
||||
[]byte(password),
|
||||
salt,
|
||||
iterations,
|
||||
memory,
|
||||
parallelism,
|
||||
keyLength,
|
||||
)
|
||||
|
||||
b64Salt := base64.RawStdEncoding.EncodeToString(salt)
|
||||
b64Hash := base64.RawStdEncoding.EncodeToString(hash)
|
||||
|
||||
encodedHash := fmt.Sprintf(
|
||||
"$argon2id$v=%d$m=%d,t=%d,p=%d$%s$%s",
|
||||
argon2.Version,
|
||||
memory,
|
||||
iterations,
|
||||
parallelism,
|
||||
b64Salt,
|
||||
b64Hash,
|
||||
)
|
||||
|
||||
return encodedHash, nil
|
||||
}
|
||||
|
||||
func VerifyPassword(password, encodedHash string) (bool, error) {
|
||||
h, err := decodeHash(encodedHash)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
otherHash := argon2.IDKey(
|
||||
[]byte(password),
|
||||
h.salt,
|
||||
h.iterations,
|
||||
h.memory,
|
||||
h.parallelism,
|
||||
uint32(len(h.hash)),
|
||||
)
|
||||
|
||||
if subtle.ConstantTimeCompare(h.hash, otherHash) == 1 {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func decodeHash(encodedHash string) (*argon2Hash, error) {
|
||||
parts := strings.Split(encodedHash, "$")
|
||||
if len(parts) != 6 {
|
||||
return nil, ErrInvalidHash
|
||||
}
|
||||
|
||||
if parts[1] != "argon2id" {
|
||||
return nil, ErrIncompatibleHash
|
||||
}
|
||||
|
||||
var version int
|
||||
if _, err := fmt.Sscanf(parts[2], "v=%d", &version); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse version: %w", err)
|
||||
}
|
||||
if version != argon2.Version {
|
||||
return nil, ErrIncompatibleVersion
|
||||
}
|
||||
|
||||
h := &argon2Hash{}
|
||||
if _, err := fmt.Sscanf(
|
||||
parts[3],
|
||||
"m=%d,t=%d,p=%d",
|
||||
&h.memory,
|
||||
&h.iterations,
|
||||
&h.parallelism,
|
||||
); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse parameters: %w", err)
|
||||
}
|
||||
|
||||
salt, err := base64.RawStdEncoding.DecodeString(parts[4])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode salt: %w", err)
|
||||
}
|
||||
h.salt = salt
|
||||
|
||||
hash, err := base64.RawStdEncoding.DecodeString(parts[5])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode hash: %w", err)
|
||||
}
|
||||
h.hash = hash
|
||||
|
||||
return h, nil
|
||||
}
|
||||
112
apps/backend/internal/auth/service.go
Normal file
112
apps/backend/internal/auth/service.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type LoginResult struct {
|
||||
User User
|
||||
AccessToken string
|
||||
RefreshToken string
|
||||
}
|
||||
|
||||
var ErrInvalidCredentials = errors.New("invalid credentials")
|
||||
var ErrUserExists = errors.New("user already exists")
|
||||
|
||||
type Service struct {
|
||||
db *bun.DB
|
||||
tokenConfig TokenConfig
|
||||
}
|
||||
|
||||
type registerOptions struct {
|
||||
displayName string
|
||||
email string
|
||||
password string
|
||||
}
|
||||
|
||||
func NewService(db *bun.DB, tokenConfig TokenConfig) *Service {
|
||||
return &Service{
|
||||
db: db,
|
||||
tokenConfig: tokenConfig,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Service) LoginWithEmailAndPassword(ctx context.Context, email, password string) (*LoginResult, error) {
|
||||
var user User
|
||||
|
||||
err := s.db.NewSelect().Model(&user).Where("email = ?", email).Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ok, err := VerifyPassword(password, user.Password)
|
||||
if err != nil || !ok {
|
||||
return nil, ErrInvalidCredentials
|
||||
}
|
||||
|
||||
at, err := GenerateAccessToken(&user, &s.tokenConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rt, err := GenerateRefreshToken(&user, &s.tokenConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = s.db.NewInsert().Model(rt).Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &LoginResult{
|
||||
User: user,
|
||||
AccessToken: at,
|
||||
RefreshToken: hex.EncodeToString(rt.Token),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *Service) Register(ctx context.Context, opts registerOptions) (*LoginResult, error) {
|
||||
user := User{
|
||||
Email: opts.email,
|
||||
DisplayName: opts.displayName,
|
||||
}
|
||||
|
||||
exists, err := s.db.NewSelect().Model(&user).Where("email = ?", opts.email).Exists(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if exists {
|
||||
return nil, ErrUserExists
|
||||
}
|
||||
|
||||
user.Password, err = HashPassword(opts.password)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = s.db.NewInsert().Model(&user).Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
at, err := GenerateAccessToken(&user, &s.tokenConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rt, err := GenerateRefreshToken(&user, &s.tokenConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &LoginResult{
|
||||
User: user,
|
||||
AccessToken: at,
|
||||
RefreshToken: hex.EncodeToString(rt.Token),
|
||||
}, nil
|
||||
}
|
||||
91
apps/backend/internal/auth/tokens.go
Normal file
91
apps/backend/internal/auth/tokens.go
Normal file
@@ -0,0 +1,91 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/golang-jwt/jwt/v5"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
const (
|
||||
accessTokenValidFor = time.Minute * 15
|
||||
refreshTokenByteLength = 32
|
||||
refreshTokenValidFor = time.Hour * 24 * 30
|
||||
)
|
||||
|
||||
type TokenConfig struct {
|
||||
Issuer string
|
||||
Audience string
|
||||
SecretKey []byte
|
||||
}
|
||||
|
||||
type RefreshToken struct {
|
||||
bun.BaseModel `bun:"refresh_tokens"`
|
||||
|
||||
ID uuid.UUID `bun:",pk,type:uuid"`
|
||||
UserID uuid.UUID `bun:"user_id,notnull"`
|
||||
Token []byte `bun:"-"`
|
||||
TokenHash string `bun:"token_hash,notnull"`
|
||||
ExpiresAt time.Time `bun:"expires_at,notnull"`
|
||||
CreatedAt time.Time `bun:"created_at,notnull"`
|
||||
}
|
||||
|
||||
func GenerateAccessToken(user *User, c *TokenConfig) (string, error) {
|
||||
now := time.Now()
|
||||
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.RegisteredClaims{
|
||||
Issuer: c.Issuer,
|
||||
Audience: jwt.ClaimStrings{c.Audience},
|
||||
Subject: user.ID.String(),
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(accessTokenValidFor)),
|
||||
IssuedAt: jwt.NewNumericDate(now),
|
||||
})
|
||||
|
||||
signed, err := token.SignedString(c.SecretKey)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to sign token: %w", err)
|
||||
}
|
||||
|
||||
return signed, nil
|
||||
}
|
||||
|
||||
func GenerateRefreshToken(user *User, c *TokenConfig) (*RefreshToken, error) {
|
||||
now := time.Now()
|
||||
|
||||
buf := make([]byte, refreshTokenByteLength)
|
||||
if _, err := rand.Read(buf); err != nil {
|
||||
return nil, fmt.Errorf("failed to generate refresh token: %w", err)
|
||||
}
|
||||
|
||||
id, err := uuid.NewV7()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to generate refresh token: %w", err)
|
||||
}
|
||||
|
||||
h := sha256.Sum256(buf)
|
||||
hex := hex.EncodeToString(h[:])
|
||||
|
||||
return &RefreshToken{
|
||||
ID: id,
|
||||
UserID: user.ID,
|
||||
Token: buf,
|
||||
TokenHash: hex,
|
||||
ExpiresAt: now.Add(refreshTokenValidFor),
|
||||
CreatedAt: now,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func ParseAccessToken(token string, c *TokenConfig) (*jwt.RegisteredClaims, error) {
|
||||
parsed, err := jwt.ParseWithClaims(token, &jwt.RegisteredClaims{}, func(token *jwt.Token) (any, error) {
|
||||
return c.SecretKey, nil
|
||||
}, jwt.WithIssuer(c.Issuer), jwt.WithExpirationRequired(), jwt.WithAudience(c.Audience))
|
||||
if err != nil {
|
||||
return nil, newInvalidAccessTokenError(err)
|
||||
}
|
||||
return parsed.Claims.(*jwt.RegisteredClaims), nil
|
||||
}
|
||||
17
apps/backend/internal/auth/user.go
Normal file
17
apps/backend/internal/auth/user.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
bun.BaseModel `bun:"users"`
|
||||
|
||||
ID uuid.UUID `bun:",pk,type:uuid" json:"id"`
|
||||
DisplayName string `bun:"display_name,notnull" json:"displayName"`
|
||||
Email string `bun:"email,unique,notnull" json:"email"`
|
||||
Password string `bun:"password,notnull" json:"-"`
|
||||
StorageUsageBytes int64 `bun:"storage_usage_bytes,notnull" json:"storageUsageBytes"`
|
||||
StorageQuotaBytes int64 `bun:"storage_quota_bytes,notnull" json:"storageQuotaBytes"`
|
||||
}
|
||||
17
apps/backend/internal/database/migrate.go
Normal file
17
apps/backend/internal/database/migrate.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"embed"
|
||||
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
//go:embed migrations/*.sql
|
||||
var sqlMigrations embed.FS
|
||||
|
||||
// RunMigrations discovers and runs all migrations in the migrations directory.
|
||||
// Currently, the migrations directory is in internal/db/migrations.
|
||||
func RunMigrations() error {
|
||||
m := migrate.NewMigrations()
|
||||
return m.Discover(sqlMigrations)
|
||||
}
|
||||
122
apps/backend/internal/database/migrations/initial.sql
Normal file
122
apps/backend/internal/database/migrations/initial.sql
Normal file
@@ -0,0 +1,122 @@
|
||||
-- Enable UUID extension for UUIDv7 support
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- UUIDv7 generation function (timestamp-ordered UUIDs)
|
||||
-- Based on the draft RFC: https://datatracker.ietf.org/doc/html/draft-peabody-dispatch-new-uuid-format
|
||||
CREATE OR REPLACE FUNCTION uuid_generate_v7()
|
||||
RETURNS UUID
|
||||
AS $$
|
||||
DECLARE
|
||||
unix_ts_ms BIGINT;
|
||||
uuid_bytes BYTEA;
|
||||
BEGIN
|
||||
unix_ts_ms = (EXTRACT(EPOCH FROM CLOCK_TIMESTAMP()) * 1000)::BIGINT;
|
||||
uuid_bytes = OVERLAY(gen_random_bytes(16) PLACING
|
||||
SUBSTRING(INT8SEND(unix_ts_ms) FROM 3) FROM 1 FOR 6
|
||||
);
|
||||
-- Set version (7) and variant bits
|
||||
uuid_bytes = SET_BYTE(uuid_bytes, 6, (GET_BYTE(uuid_bytes, 6) & 15) | 112);
|
||||
uuid_bytes = SET_BYTE(uuid_bytes, 8, (GET_BYTE(uuid_bytes, 8) & 63) | 128);
|
||||
RETURN ENCODE(uuid_bytes, 'hex')::UUID;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql VOLATILE;
|
||||
|
||||
-- ============================================================================
|
||||
-- Application Tables
|
||||
-- ============================================================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v7(),
|
||||
display_name TEXT,
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
password TEXT NOT NULL,
|
||||
storage_usage_bytes BIGINT NOT NULL,
|
||||
storage_quota_bytes BIGINT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_users_email ON users(email);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS refresh_tokens (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v7(),
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
token_hash TEXT NOT NULL UNIQUE,
|
||||
expires_at TIMESTAMPTZ NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_refresh_tokens_user_id ON refresh_tokens(user_id);
|
||||
CREATE INDEX idx_refresh_tokens_token_hash ON refresh_tokens(token_hash);
|
||||
CREATE INDEX idx_refresh_tokens_expires_at ON refresh_tokens(expires_at);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS directories (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v7(),
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
parent_id UUID REFERENCES directories(id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
deleted_at TIMESTAMPTZ,
|
||||
CONSTRAINT unique_directory_path UNIQUE NULLS NOT DISTINCT (user_id, parent_id, name, deleted_at)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_directories_user_id ON directories(user_id, deleted_at);
|
||||
CREATE INDEX idx_directories_path ON directories(user_id, path, deleted_at);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS files (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v7(),
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
directory_id UUID REFERENCES directories(id) ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
size BIGINT NOT NULL,
|
||||
mime_type TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
deleted_at TIMESTAMPTZ,
|
||||
last_accessed_at TIMESTAMPTZ,
|
||||
CONSTRAINT unique_file_in_directory UNIQUE NULLS NOT DISTINCT (user_id, directory_id, name, deleted_at)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_files_user_id ON files(user_id, deleted_at);
|
||||
CREATE INDEX idx_files_directory_id ON files(directory_id) WHERE directory_id IS NOT NULL;
|
||||
CREATE INDEX idx_files_path ON files(user_id, path, deleted_at);
|
||||
CREATE INDEX idx_files_deleted_at ON files(deleted_at) WHERE deleted_at IS NOT NULL;
|
||||
CREATE INDEX idx_files_last_accessed_at ON files(user_id, deleted_at, last_accessed_at);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS file_shares (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v7(),
|
||||
file_id UUID NOT NULL REFERENCES files(id) ON DELETE CASCADE,
|
||||
share_token TEXT NOT NULL UNIQUE,
|
||||
expires_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_file_shares_share_token ON file_shares(share_token);
|
||||
CREATE INDEX idx_file_shares_file_id ON file_shares(file_id);
|
||||
CREATE INDEX idx_file_shares_expires_at ON file_shares(expires_at) WHERE expires_at IS NOT NULL;
|
||||
|
||||
-- ============================================================================
|
||||
-- Triggers for updated_at timestamps
|
||||
-- ============================================================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER update_users_updated_at BEFORE UPDATE ON users
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_directories_updated_at BEFORE UPDATE ON directories
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_files_updated_at BEFORE UPDATE ON files
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_file_shares_updated_at BEFORE UPDATE ON file_shares
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
15
apps/backend/internal/database/postgres.go
Normal file
15
apps/backend/internal/database/postgres.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/dialect/pgdialect"
|
||||
"github.com/uptrace/bun/driver/pgdriver"
|
||||
)
|
||||
|
||||
func NewFromPostgres(url string) *bun.DB {
|
||||
sqldb := sql.OpenDB(pgdriver.NewConnector(pgdriver.WithDSN(url)))
|
||||
db := bun.NewDB(sqldb, pgdialect.New())
|
||||
return db
|
||||
}
|
||||
23
apps/backend/internal/drexa/err.go
Normal file
23
apps/backend/internal/drexa/err.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package drexa
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ServerConfigError struct {
|
||||
Errors []error
|
||||
}
|
||||
|
||||
func NewServerConfigError(errs ...error) *ServerConfigError {
|
||||
return &ServerConfigError{Errors: errs}
|
||||
}
|
||||
|
||||
func (e *ServerConfigError) Error() string {
|
||||
sb := strings.Builder{}
|
||||
sb.WriteString("invalid server config:\n")
|
||||
for _, err := range e.Errors {
|
||||
sb.WriteString(fmt.Sprintf(" - %s\n", err.Error()))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
84
apps/backend/internal/drexa/server.go
Normal file
84
apps/backend/internal/drexa/server.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package drexa
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/auth"
|
||||
"github.com/get-drexa/drexa/internal/database"
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
type ServerConfig struct {
|
||||
Port int
|
||||
PostgresURL string
|
||||
JWTIssuer string
|
||||
JWTAudience string
|
||||
JWTSecretKey []byte
|
||||
}
|
||||
|
||||
func NewServer(c ServerConfig) *fiber.App {
|
||||
app := fiber.New()
|
||||
db := database.NewFromPostgres(c.PostgresURL)
|
||||
|
||||
authService := auth.NewService(db, auth.TokenConfig{
|
||||
Issuer: c.JWTIssuer,
|
||||
Audience: c.JWTAudience,
|
||||
SecretKey: c.JWTSecretKey,
|
||||
})
|
||||
|
||||
api := app.Group("/api")
|
||||
auth.RegisterAPIRoutes(api, authService)
|
||||
|
||||
return app
|
||||
}
|
||||
|
||||
// ServerConfigFromEnv creates a ServerConfig from environment variables.
|
||||
func ServerConfigFromEnv() (*ServerConfig, error) {
|
||||
c := ServerConfig{
|
||||
PostgresURL: os.Getenv("POSTGRES_URL"),
|
||||
JWTIssuer: os.Getenv("JWT_ISSUER"),
|
||||
JWTAudience: os.Getenv("JWT_AUDIENCE"),
|
||||
}
|
||||
|
||||
errs := []error{}
|
||||
|
||||
keyHex := os.Getenv("JWT_SECRET_KEY")
|
||||
if keyHex == "" {
|
||||
errs = append(errs, errors.New("JWT_SECRET_KEY is required"))
|
||||
} else {
|
||||
k, err := hex.DecodeString(keyHex)
|
||||
if err != nil {
|
||||
errs = append(errs, fmt.Errorf("failed to decode JWT_SECRET_KEY: %w", err))
|
||||
}
|
||||
c.JWTSecretKey = k
|
||||
}
|
||||
|
||||
p, err := strconv.Atoi(os.Getenv("PORT"))
|
||||
if err != nil {
|
||||
errs = append(errs, fmt.Errorf("failed to parse PORT: %w", err))
|
||||
}
|
||||
c.Port = p
|
||||
|
||||
if c.PostgresURL == "" {
|
||||
errs = append(errs, errors.New("POSTGRES_URL is required"))
|
||||
}
|
||||
if c.JWTIssuer == "" {
|
||||
errs = append(errs, errors.New("JWT_ISSUER is required"))
|
||||
}
|
||||
if c.JWTAudience == "" {
|
||||
errs = append(errs, errors.New("JWT_AUDIENCE is required"))
|
||||
}
|
||||
if len(c.JWTSecretKey) == 0 {
|
||||
errs = append(errs, errors.New("JWT_SECRET_KEY is required"))
|
||||
}
|
||||
|
||||
if len(errs) > 0 {
|
||||
return nil, NewServerConfigError(errs...)
|
||||
}
|
||||
|
||||
return &c, nil
|
||||
}
|
||||
Reference in New Issue
Block a user