mirror of
https://github.com/get-drexa/drive.git
synced 2025-11-30 21:41:39 +00:00
Compare commits
84 Commits
feat/keybo
...
6984bb209e
| Author | SHA1 | Date | |
|---|---|---|---|
| 6984bb209e | |||
| 629d56b5ab | |||
| 5e4e08c255 | |||
| 42b805fbd1 | |||
| ab4c14bc09 | |||
| fd3b2d3908 | |||
| 39824e45d9 | |||
| 6aee150a59 | |||
|
9ea76d2021
|
|||
|
987f36e1d2
|
|||
|
797b40a35c
|
|||
| e32e00a230 | |||
| b1e34f878c | |||
| c0e2f7ff37 | |||
| 834517f3c0 | |||
| 06c3951293 | |||
| 389fe35a0a | |||
| 81e3f7af75 | |||
|
1feac70f7f
|
|||
|
5cc13a34b2
|
|||
|
879287f8bf
|
|||
|
ad99bca7fd
|
|||
|
b241f4e211
|
|||
| 027a315a04 | |||
| 015524cd63 | |||
| 4ebb3fe620 | |||
| b8c46217f7 | |||
| 94d6a22ab2 | |||
| f20f1a93c7 | |||
| acfe1523df | |||
|
9b8367ade4
|
|||
|
d2c09f5d0f
|
|||
|
952a0e41b4
|
|||
|
8f194eec55
|
|||
|
a8c7a8f60b
|
|||
|
7fe5184e81
|
|||
|
3209ce1cd2
|
|||
|
af5d887bd1
|
|||
|
a862442979
|
|||
|
6234c5efd3
|
|||
|
6eded27121
|
|||
|
0307cbbf61
|
|||
|
d0893e13be
|
|||
|
a4544a3f09
|
|||
|
14e2ee1e28
|
|||
|
e58caa6b16
|
|||
|
c0f852ad35
|
|||
|
efd4eefa49
|
|||
|
1ae649850a
|
|||
| cd9dee9371 | |||
|
25796ab609
|
|||
|
83a5f92506
|
|||
|
b802cb5aec
|
|||
|
49b76934b2
|
|||
|
2ed8be94f1
|
|||
|
b17de812b9
|
|||
|
0c7e4c43e7
|
|||
|
03d36a2c80
|
|||
|
5eff2fa756
|
|||
|
0e460370da
|
|||
|
bcc0f9f5e2
|
|||
|
bf2087cded
|
|||
|
f7bc5fd958
|
|||
|
1fcdaf4f86
|
|||
|
483aa19351
|
|||
|
b654f50ddd
|
|||
|
33b235517c
|
|||
|
4978a173a8
|
|||
|
4686744fd0
|
|||
|
b745ad273e
|
|||
|
7c29f642f1
|
|||
|
6eef4d9c30
|
|||
|
9149243e95
|
|||
|
b43a88c6fc
|
|||
|
94b35df0e5
|
|||
|
19e52feebb
|
|||
|
e806d442b7
|
|||
|
57369d10fe
|
|||
|
9282e75bef
|
|||
|
875aae74e8
|
|||
|
c2d9010508
|
|||
|
0e686a1f85
|
|||
|
1d8a117b93
|
|||
|
022f3c4726
|
@@ -1,5 +1,4 @@
|
|||||||
{
|
{
|
||||||
"name": "React + Bun + Convex Development",
|
|
||||||
"build": {
|
"build": {
|
||||||
"context": ".",
|
"context": ".",
|
||||||
"dockerfile": "Dockerfile"
|
"dockerfile": "Dockerfile"
|
||||||
@@ -7,11 +6,12 @@
|
|||||||
"features": {
|
"features": {
|
||||||
"ghcr.io/devcontainers/features/git:1": {},
|
"ghcr.io/devcontainers/features/git:1": {},
|
||||||
"ghcr.io/devcontainers/features/github-cli:1": {},
|
"ghcr.io/devcontainers/features/github-cli:1": {},
|
||||||
"ghcr.io/devcontainers/features/docker-in-docker:2": {
|
|
||||||
"moby": false
|
|
||||||
},
|
|
||||||
"ghcr.io/tailscale/codespace/tailscale": {
|
"ghcr.io/tailscale/codespace/tailscale": {
|
||||||
"version": "latest"
|
"version": "latest"
|
||||||
|
},
|
||||||
|
"ghcr.io/devcontainers/features/go:1": {
|
||||||
|
"version": "1.25.4",
|
||||||
|
"golangciLintVersion": "2.6.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"postCreateCommand": "./scripts/setup-git.sh",
|
"postCreateCommand": "./scripts/setup-git.sh",
|
||||||
@@ -20,21 +20,11 @@
|
|||||||
"extensions": [
|
"extensions": [
|
||||||
"biomejs.biome",
|
"biomejs.biome",
|
||||||
"bradlc.vscode-tailwindcss",
|
"bradlc.vscode-tailwindcss",
|
||||||
"ms-vscode.vscode-typescript-next",
|
|
||||||
"esbenp.prettier-vscode",
|
|
||||||
"ms-vscode.vscode-json",
|
|
||||||
"formulahendry.auto-rename-tag",
|
|
||||||
"christian-kohler.path-intellisense",
|
"christian-kohler.path-intellisense",
|
||||||
"ms-vscode.vscode-eslint",
|
"golang.go"
|
||||||
"convex.convex-vscode"
|
|
||||||
],
|
],
|
||||||
"settings": {
|
"settings": {
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
"editor.codeActionsOnSave": {
|
|
||||||
"source.organizeImports.biome": "explicit",
|
|
||||||
"source.fixAll.biome": "explicit"
|
|
||||||
},
|
|
||||||
"typescript.preferences.importModuleSpecifier": "relative",
|
"typescript.preferences.importModuleSpecifier": "relative",
|
||||||
"typescript.suggest.autoImports": true,
|
"typescript.suggest.autoImports": true,
|
||||||
"emmet.includeLanguages": {
|
"emmet.includeLanguages": {
|
||||||
@@ -44,7 +34,63 @@
|
|||||||
"tailwindCSS.experimental.classRegex": [
|
"tailwindCSS.experimental.classRegex": [
|
||||||
["cva\\(([^)]*)\\)", "[\"'`]([^\"'`]*).*?[\"'`]"],
|
["cva\\(([^)]*)\\)", "[\"'`]([^\"'`]*).*?[\"'`]"],
|
||||||
["cx\\(([^)]*)\\)", "(?:'|\"|`)([^']*)(?:'|\"|`)"]
|
["cx\\(([^)]*)\\)", "(?:'|\"|`)([^']*)(?:'|\"|`)"]
|
||||||
]
|
],
|
||||||
|
"[javascript]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports.biome": "explicit",
|
||||||
|
"source.fixAll.biome": "explicit"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"[javascriptreact]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports.biome": "explicit",
|
||||||
|
"source.fixAll.biome": "explicit"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"[typescript]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports.biome": "explicit",
|
||||||
|
"source.fixAll.biome": "explicit"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"[typescriptreact]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports.biome": "explicit",
|
||||||
|
"source.fixAll.biome": "explicit"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"[json]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.fixAll.biome": "explicit"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"[jsonc]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.fixAll.biome": "explicit"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"[go]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.defaultFormatter": "golang.go",
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": "explicit"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"go.formatTool": "goimports",
|
||||||
|
"go.lintTool": "golangci-lint",
|
||||||
|
"go.useLanguageServer": true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
15
.env.sample
15
.env.sample
@@ -1,10 +1,13 @@
|
|||||||
|
# this is the url to the convex instance (NOT THE DASHBOARD)
|
||||||
CONVEX_SELF_HOSTED_URL=
|
CONVEX_SELF_HOSTED_URL=
|
||||||
CONVEX_SELF_HOSTED_ADMIN_KEY=
|
CONVEX_SELF_HOSTED_ADMIN_KEY=
|
||||||
CONVEX_URL=
|
|
||||||
WORKOS_CLIENT_ID=
|
|
||||||
WORKOS_CLIENT_SECRET=
|
|
||||||
WORKOS_API_KEY=
|
|
||||||
|
|
||||||
|
# this is the url to the convex instance (NOT THE DASHBOARD)
|
||||||
|
CONVEX_URL=
|
||||||
|
# this is the convex url for invoking http actions
|
||||||
|
CONVEX_SITE_URL=
|
||||||
|
|
||||||
|
# this is the url to the convex instance (NOT THE DASHBOARD)
|
||||||
BUN_PUBLIC_CONVEX_URL=
|
BUN_PUBLIC_CONVEX_URL=
|
||||||
BUN_PUBLIC_WORKOS_CLIENT_ID=
|
# this is the convex url for invoking http actions
|
||||||
BUN_PUBLIC_WORKOS_REDIRECT_URI=
|
BUN_PUBLIC_CONVEX_SITE_URL=
|
||||||
|
|||||||
@@ -5,7 +5,8 @@ backend: convex
|
|||||||
# Project structure
|
# Project structure
|
||||||
This project uses npm workspaces.
|
This project uses npm workspaces.
|
||||||
- `packages/convex` - convex functions and models
|
- `packages/convex` - convex functions and models
|
||||||
- `packages/web` - frontend dashboard
|
- `apps/drive-web` - frontend dashboard
|
||||||
|
- `apps/file-proxy` - proxies uploaded files via opaque share tokens
|
||||||
- `packages/path` - path utils
|
- `packages/path` - path utils
|
||||||
|
|
||||||
# General Guidelines
|
# General Guidelines
|
||||||
|
|||||||
34
apps/backend/cmd/drexa/main.go
Normal file
34
apps/backend/cmd/drexa/main.go
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/drexa"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
configPath := flag.String("config", "", "path to config file (required)")
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
if *configPath == "" {
|
||||||
|
fmt.Fprintln(os.Stderr, "error: --config is required")
|
||||||
|
flag.Usage()
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
config, err := drexa.ConfigFromFile(*configPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("failed to load config: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
server, err := drexa.NewServer(*config)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("starting server on :%d", config.Server.Port)
|
||||||
|
log.Fatal(server.Listen(fmt.Sprintf(":%d", config.Server.Port)))
|
||||||
|
}
|
||||||
37
apps/backend/cmd/migration/main.go
Normal file
37
apps/backend/cmd/migration/main.go
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/database"
|
||||||
|
"github.com/get-drexa/drexa/internal/drexa"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
configPath := flag.String("config", "", "path to config file (required)")
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
if *configPath == "" {
|
||||||
|
fmt.Fprintln(os.Stderr, "error: --config is required")
|
||||||
|
flag.Usage()
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
config, err := drexa.ConfigFromFile(*configPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("failed to load config: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
db := database.NewFromPostgres(config.Database.PostgresURL)
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
log.Println("running migrations...")
|
||||||
|
if err := database.RunMigrations(context.Background(), db); err != nil {
|
||||||
|
log.Fatalf("failed to run migrations: %v", err)
|
||||||
|
}
|
||||||
|
log.Println("migrations completed successfully")
|
||||||
|
}
|
||||||
30
apps/backend/config.example.yaml
Normal file
30
apps/backend/config.example.yaml
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Drexa Backend Configuration
|
||||||
|
# Copy this file to config.yaml and adjust values for your environment.
|
||||||
|
|
||||||
|
server:
|
||||||
|
port: 8080
|
||||||
|
|
||||||
|
database:
|
||||||
|
postgres_url: postgres://user:password@localhost:5432/drexa?sslmode=disable
|
||||||
|
|
||||||
|
jwt:
|
||||||
|
issuer: drexa
|
||||||
|
audience: drexa-api
|
||||||
|
# Secret key can be provided via (in order of precedence):
|
||||||
|
# 1. JWT_SECRET_KEY environment variable (base64 encoded)
|
||||||
|
# 2. secret_key_base64 below (base64 encoded)
|
||||||
|
# 3. secret_key_path below (file with base64 encoded content)
|
||||||
|
# secret_key_base64: "base64encodedkey"
|
||||||
|
secret_key_path: /run/secrets/jwt_secret_key
|
||||||
|
|
||||||
|
storage:
|
||||||
|
# Mode: "flat" (UUID-based keys) or "hierarchical" (path-based keys)
|
||||||
|
# Note: S3 backend only supports "flat" mode
|
||||||
|
mode: flat
|
||||||
|
# Backend: "fs" (filesystem) or "s3" (not yet implemented)
|
||||||
|
backend: fs
|
||||||
|
# Required when backend is "fs"
|
||||||
|
root_path: /var/lib/drexa/blobs
|
||||||
|
# Required when backend is "s3"
|
||||||
|
# bucket: my-drexa-bucket
|
||||||
|
|
||||||
15
apps/backend/config.yaml
Normal file
15
apps/backend/config.yaml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
server:
|
||||||
|
port: 8080
|
||||||
|
|
||||||
|
database:
|
||||||
|
postgres_url: postgres://drexa:hunter2@helian:5433/drexa?sslmode=disable
|
||||||
|
|
||||||
|
jwt:
|
||||||
|
issuer: drexa
|
||||||
|
audience: drexa-api
|
||||||
|
secret_key_base64: "pNeUExoqdakfecZLFL53NJpY4iB9zFot9EuEBItlYKY="
|
||||||
|
|
||||||
|
storage:
|
||||||
|
mode: hierarchical
|
||||||
|
backend: fs
|
||||||
|
root_path: ./data
|
||||||
40
apps/backend/go.mod
Normal file
40
apps/backend/go.mod
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
module github.com/get-drexa/drexa
|
||||||
|
|
||||||
|
go 1.25.4
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.11
|
||||||
|
github.com/gofiber/fiber/v2 v2.52.9
|
||||||
|
github.com/google/uuid v1.6.0
|
||||||
|
github.com/sqids/sqids-go v0.4.1
|
||||||
|
github.com/uptrace/bun v1.2.15
|
||||||
|
golang.org/x/crypto v0.40.0
|
||||||
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
go.opentelemetry.io/otel v1.37.0 // indirect
|
||||||
|
go.opentelemetry.io/otel/trace v1.37.0 // indirect
|
||||||
|
mellium.im/sasl v0.3.2 // indirect
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/andybalholm/brotli v1.1.0 // indirect
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.3.0
|
||||||
|
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||||
|
github.com/klauspost/compress v1.17.9 // indirect
|
||||||
|
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||||
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
|
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||||
|
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||||
|
github.com/rivo/uniseg v0.2.0 // indirect
|
||||||
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||||
|
github.com/uptrace/bun/dialect/pgdialect v1.2.15
|
||||||
|
github.com/uptrace/bun/driver/pgdriver v1.2.15
|
||||||
|
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||||
|
github.com/valyala/fasthttp v1.51.0 // indirect
|
||||||
|
github.com/valyala/tcplisten v1.0.0 // indirect
|
||||||
|
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||||
|
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||||
|
golang.org/x/sys v0.34.0 // indirect
|
||||||
|
)
|
||||||
72
apps/backend/go.sum
Normal file
72
apps/backend/go.sum
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
|
||||||
|
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.11 h1:AQvxbp830wPhHTqc1u7nzoLT+ZFxGY7emj5DR5DYFik=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.11/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
|
||||||
|
github.com/gofiber/fiber/v2 v2.52.9 h1:YjKl5DOiyP3j0mO61u3NTmK7or8GzzWzCFzkboyP5cw=
|
||||||
|
github.com/gofiber/fiber/v2 v2.52.9/go.mod h1:YEcBbO/FB+5M1IZNBP9FO3J9281zgPAreiI1oqg8nDw=
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||||
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
|
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||||
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
|
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||||
|
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||||
|
github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA=
|
||||||
|
github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
|
||||||
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
|
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
||||||
|
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||||
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||||
|
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||||
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg=
|
||||||
|
github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
||||||
|
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||||
|
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
|
github.com/sqids/sqids-go v0.4.1 h1:eQKYzmAZbLlRwHeHYPF35QhgxwZHLnlmVj9AkIj/rrw=
|
||||||
|
github.com/sqids/sqids-go v0.4.1/go.mod h1:EMwHuPQgSNFS0A49jESTfIQS+066XQTVhukrzEPScl8=
|
||||||
|
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||||
|
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYmW5DyG0UqvY96Bu5QYsTLvCHdrgo=
|
||||||
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GHm1iF1pBvUfxfsH0Wmnc2VbpgvbI9ZWuIRs=
|
||||||
|
github.com/uptrace/bun v1.2.15 h1:Ut68XRBLDgp9qG9QBMa9ELWaZOmzHNdczHQdrOZbEFE=
|
||||||
|
github.com/uptrace/bun v1.2.15/go.mod h1:Eghz7NonZMiTX/Z6oKYytJ0oaMEJ/eq3kEV4vSqG038=
|
||||||
|
github.com/uptrace/bun/dialect/pgdialect v1.2.15 h1:er+/3giAIqpfrXJw+KP9B7ujyQIi5XkPnFmgjAVL6bA=
|
||||||
|
github.com/uptrace/bun/dialect/pgdialect v1.2.15/go.mod h1:QSiz6Qpy9wlGFsfpf7UMSL6mXAL1jDJhFwuOVacCnOQ=
|
||||||
|
github.com/uptrace/bun/driver/pgdriver v1.2.15 h1:eZZ60ZtUUE6jjv6VAI1pCMaTgtx3sxmChQzwbvchOOo=
|
||||||
|
github.com/uptrace/bun/driver/pgdriver v1.2.15/go.mod h1:s2zz/BAeScal4KLFDI8PURwATN8s9RDBsElEbnPAjv4=
|
||||||
|
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||||
|
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||||
|
github.com/valyala/fasthttp v1.51.0 h1:8b30A5JlZ6C7AS81RsWjYMQmrZG6feChmgAolCl1SqA=
|
||||||
|
github.com/valyala/fasthttp v1.51.0/go.mod h1:oI2XroL+lI7vdXyYoQk03bXBThfFl2cVdIA3Xl7cH8g=
|
||||||
|
github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8=
|
||||||
|
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
|
||||||
|
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
|
||||||
|
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||||
|
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||||
|
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||||
|
go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
|
||||||
|
go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
|
||||||
|
go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
|
||||||
|
go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
|
||||||
|
golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM=
|
||||||
|
golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY=
|
||||||
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=
|
||||||
|
golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
mellium.im/sasl v0.3.2 h1:PT6Xp7ccn9XaXAnJ03FcEjmAn7kK1x7aoXV6F+Vmrl0=
|
||||||
|
mellium.im/sasl v0.3.2/go.mod h1:NKXDi1zkr+BlMHLQjY3ofYuU4KSPFxknb8mfEu6SveY=
|
||||||
24
apps/backend/internal/auth/err.go
Normal file
24
apps/backend/internal/auth/err.go
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ErrUnauthenticatedRequest = errors.New("unauthenticated request")
|
||||||
|
|
||||||
|
type InvalidAccessTokenError struct {
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func newInvalidAccessTokenError(err error) *InvalidAccessTokenError {
|
||||||
|
return &InvalidAccessTokenError{err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *InvalidAccessTokenError) Error() string {
|
||||||
|
return fmt.Sprintf("invalid access token: %v", e.err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *InvalidAccessTokenError) Unwrap() error {
|
||||||
|
return e.err
|
||||||
|
}
|
||||||
113
apps/backend/internal/auth/http.go
Normal file
113
apps/backend/internal/auth/http.go
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"log/slog"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/user"
|
||||||
|
"github.com/gofiber/fiber/v2"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type loginRequest struct {
|
||||||
|
Email string `json:"email"`
|
||||||
|
Password string `json:"password"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type registerRequest struct {
|
||||||
|
Email string `json:"email"`
|
||||||
|
Password string `json:"password"`
|
||||||
|
DisplayName string `json:"displayName"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type loginResponse struct {
|
||||||
|
User user.User `json:"user"`
|
||||||
|
AccessToken string `json:"accessToken"`
|
||||||
|
RefreshToken string `json:"refreshToken"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type HTTPHandler struct {
|
||||||
|
service *Service
|
||||||
|
db *bun.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewHTTPHandler(s *Service, db *bun.DB) *HTTPHandler {
|
||||||
|
return &HTTPHandler{service: s, db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *HTTPHandler) RegisterRoutes(api fiber.Router) {
|
||||||
|
auth := api.Group("/auth")
|
||||||
|
|
||||||
|
auth.Post("/login", h.Login)
|
||||||
|
auth.Post("/register", h.Register)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *HTTPHandler) Login(c *fiber.Ctx) error {
|
||||||
|
req := new(loginRequest)
|
||||||
|
if err := c.BodyParser(req); err != nil {
|
||||||
|
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request"})
|
||||||
|
}
|
||||||
|
|
||||||
|
tx, err := h.db.BeginTx(c.Context(), nil)
|
||||||
|
if err != nil {
|
||||||
|
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Internal server error"})
|
||||||
|
}
|
||||||
|
defer tx.Rollback()
|
||||||
|
|
||||||
|
result, err := h.service.LoginWithEmailAndPassword(c.Context(), tx, req.Email, req.Password)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, ErrInvalidCredentials) {
|
||||||
|
return c.Status(fiber.StatusUnauthorized).JSON(fiber.Map{"error": "Invalid credentials"})
|
||||||
|
}
|
||||||
|
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Internal server error"})
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Internal server error"})
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.JSON(loginResponse{
|
||||||
|
User: *result.User,
|
||||||
|
AccessToken: result.AccessToken,
|
||||||
|
RefreshToken: result.RefreshToken,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *HTTPHandler) Register(c *fiber.Ctx) error {
|
||||||
|
req := new(registerRequest)
|
||||||
|
if err := c.BodyParser(req); err != nil {
|
||||||
|
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request"})
|
||||||
|
}
|
||||||
|
|
||||||
|
tx, err := h.db.BeginTx(c.Context(), nil)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to begin transaction", "error", err)
|
||||||
|
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Internal server error"})
|
||||||
|
}
|
||||||
|
defer tx.Rollback()
|
||||||
|
|
||||||
|
result, err := h.service.Register(c.Context(), tx, registerOptions{
|
||||||
|
email: req.Email,
|
||||||
|
password: req.Password,
|
||||||
|
displayName: req.DisplayName,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
var ae *user.AlreadyExistsError
|
||||||
|
if errors.As(err, &ae) {
|
||||||
|
return c.Status(fiber.StatusConflict).JSON(fiber.Map{"error": "User already exists"})
|
||||||
|
}
|
||||||
|
slog.Error("failed to register user", "error", err)
|
||||||
|
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Internal server error"})
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
slog.Error("failed to commit transaction", "error", err)
|
||||||
|
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Internal server error"})
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.JSON(loginResponse{
|
||||||
|
User: *result.User,
|
||||||
|
AccessToken: result.AccessToken,
|
||||||
|
RefreshToken: result.RefreshToken,
|
||||||
|
})
|
||||||
|
}
|
||||||
57
apps/backend/internal/auth/middleware.go
Normal file
57
apps/backend/internal/auth/middleware.go
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/user"
|
||||||
|
"github.com/gofiber/fiber/v2"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
const authenticatedUserKey = "authenticatedUser"
|
||||||
|
|
||||||
|
// NewBearerAuthMiddleware is a middleware that authenticates a request using a bearer token.
|
||||||
|
// To obtain the authenticated user in subsequent handlers, see AuthenticatedUser.
|
||||||
|
func NewBearerAuthMiddleware(s *Service, db *bun.DB) fiber.Handler {
|
||||||
|
return func(c *fiber.Ctx) error {
|
||||||
|
authHeader := c.Get("Authorization")
|
||||||
|
if authHeader == "" {
|
||||||
|
return c.SendStatus(fiber.StatusUnauthorized)
|
||||||
|
}
|
||||||
|
|
||||||
|
parts := strings.Split(authHeader, " ")
|
||||||
|
if len(parts) != 2 || parts[0] != "Bearer" {
|
||||||
|
return c.SendStatus(fiber.StatusUnauthorized)
|
||||||
|
}
|
||||||
|
|
||||||
|
token := parts[1]
|
||||||
|
u, err := s.AuthenticateWithAccessToken(c.Context(), db, token)
|
||||||
|
if err != nil {
|
||||||
|
var e *InvalidAccessTokenError
|
||||||
|
if errors.As(err, &e) {
|
||||||
|
return c.SendStatus(fiber.StatusUnauthorized)
|
||||||
|
}
|
||||||
|
|
||||||
|
var nf *user.NotFoundError
|
||||||
|
if errors.As(err, &nf) {
|
||||||
|
return c.SendStatus(fiber.StatusUnauthorized)
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.SendStatus(fiber.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Locals(authenticatedUserKey, u)
|
||||||
|
|
||||||
|
return c.Next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AuthenticatedUser returns the authenticated user from the given fiber context.
|
||||||
|
// Returns ErrUnauthenticatedRequest if not authenticated.
|
||||||
|
func AuthenticatedUser(c *fiber.Ctx) (*user.User, error) {
|
||||||
|
if u, ok := c.Locals(authenticatedUserKey).(*user.User); ok {
|
||||||
|
return u, nil
|
||||||
|
}
|
||||||
|
return nil, ErrUnauthenticatedRequest
|
||||||
|
}
|
||||||
126
apps/backend/internal/auth/service.go
Normal file
126
apps/backend/internal/auth/service.go
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/hex"
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/password"
|
||||||
|
"github.com/get-drexa/drexa/internal/user"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LoginResult struct {
|
||||||
|
User *user.User
|
||||||
|
AccessToken string
|
||||||
|
RefreshToken string
|
||||||
|
}
|
||||||
|
|
||||||
|
var ErrInvalidCredentials = errors.New("invalid credentials")
|
||||||
|
|
||||||
|
type Service struct {
|
||||||
|
userService *user.Service
|
||||||
|
tokenConfig TokenConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
type registerOptions struct {
|
||||||
|
displayName string
|
||||||
|
email string
|
||||||
|
password string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewService(userService *user.Service, tokenConfig TokenConfig) *Service {
|
||||||
|
return &Service{
|
||||||
|
userService: userService,
|
||||||
|
tokenConfig: tokenConfig,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) LoginWithEmailAndPassword(ctx context.Context, db bun.IDB, email, plain string) (*LoginResult, error) {
|
||||||
|
u, err := s.userService.UserByEmail(ctx, db, email)
|
||||||
|
if err != nil {
|
||||||
|
var nf *user.NotFoundError
|
||||||
|
if errors.As(err, &nf) {
|
||||||
|
return nil, ErrInvalidCredentials
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ok, err := password.Verify(plain, u.Password)
|
||||||
|
if err != nil || !ok {
|
||||||
|
return nil, ErrInvalidCredentials
|
||||||
|
}
|
||||||
|
|
||||||
|
at, err := GenerateAccessToken(u, &s.tokenConfig)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
rt, err := GenerateRefreshToken(u, &s.tokenConfig)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = db.NewInsert().Model(rt).Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &LoginResult{
|
||||||
|
User: u,
|
||||||
|
AccessToken: at,
|
||||||
|
RefreshToken: hex.EncodeToString(rt.Token),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) Register(ctx context.Context, db bun.IDB, opts registerOptions) (*LoginResult, error) {
|
||||||
|
hashed, err := password.Hash(opts.password)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
u, err := s.userService.RegisterUser(ctx, db, user.UserRegistrationOptions{
|
||||||
|
Email: opts.email,
|
||||||
|
DisplayName: opts.displayName,
|
||||||
|
Password: hashed,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
at, err := GenerateAccessToken(u, &s.tokenConfig)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
rt, err := GenerateRefreshToken(u, &s.tokenConfig)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = db.NewInsert().Model(rt).Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &LoginResult{
|
||||||
|
User: u,
|
||||||
|
AccessToken: at,
|
||||||
|
RefreshToken: hex.EncodeToString(rt.Token),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) AuthenticateWithAccessToken(ctx context.Context, db bun.IDB, token string) (*user.User, error) {
|
||||||
|
claims, err := ParseAccessToken(token, &s.tokenConfig)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
id, err := uuid.Parse(claims.Subject)
|
||||||
|
if err != nil {
|
||||||
|
return nil, newInvalidAccessTokenError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.userService.UserByID(ctx, db, id)
|
||||||
|
}
|
||||||
98
apps/backend/internal/auth/tokens.go
Normal file
98
apps/backend/internal/auth/tokens.go
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/user"
|
||||||
|
"github.com/golang-jwt/jwt/v5"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
accessTokenValidFor = time.Minute * 15
|
||||||
|
refreshTokenByteLength = 32
|
||||||
|
refreshTokenValidFor = time.Hour * 24 * 30
|
||||||
|
)
|
||||||
|
|
||||||
|
type TokenConfig struct {
|
||||||
|
Issuer string
|
||||||
|
Audience string
|
||||||
|
SecretKey []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
type RefreshToken struct {
|
||||||
|
bun.BaseModel `bun:"refresh_tokens"`
|
||||||
|
|
||||||
|
ID uuid.UUID `bun:",pk,type:uuid"`
|
||||||
|
UserID uuid.UUID `bun:"user_id,notnull"`
|
||||||
|
Token []byte `bun:"-"`
|
||||||
|
TokenHash string `bun:"token_hash,notnull"`
|
||||||
|
ExpiresAt time.Time `bun:"expires_at,notnull"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,notnull"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTokenID() (uuid.UUID, error) {
|
||||||
|
return uuid.NewV7()
|
||||||
|
}
|
||||||
|
|
||||||
|
func GenerateAccessToken(user *user.User, c *TokenConfig) (string, error) {
|
||||||
|
now := time.Now()
|
||||||
|
|
||||||
|
token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.RegisteredClaims{
|
||||||
|
Issuer: c.Issuer,
|
||||||
|
Audience: jwt.ClaimStrings{c.Audience},
|
||||||
|
Subject: user.ID.String(),
|
||||||
|
ExpiresAt: jwt.NewNumericDate(now.Add(accessTokenValidFor)),
|
||||||
|
IssuedAt: jwt.NewNumericDate(now),
|
||||||
|
})
|
||||||
|
|
||||||
|
signed, err := token.SignedString(c.SecretKey)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to sign token: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return signed, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func GenerateRefreshToken(user *user.User, c *TokenConfig) (*RefreshToken, error) {
|
||||||
|
now := time.Now()
|
||||||
|
|
||||||
|
buf := make([]byte, refreshTokenByteLength)
|
||||||
|
if _, err := rand.Read(buf); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to generate refresh token: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
id, err := newTokenID()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to generate token ID: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
h := sha256.Sum256(buf)
|
||||||
|
hex := hex.EncodeToString(h[:])
|
||||||
|
|
||||||
|
return &RefreshToken{
|
||||||
|
ID: id,
|
||||||
|
UserID: user.ID,
|
||||||
|
Token: buf,
|
||||||
|
TokenHash: hex,
|
||||||
|
ExpiresAt: now.Add(refreshTokenValidFor),
|
||||||
|
CreatedAt: now,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseAccessToken parses a JWT access token and returns the claims.
|
||||||
|
// Returns an InvalidAccessTokenError if the token is invalid.
|
||||||
|
func ParseAccessToken(token string, c *TokenConfig) (*jwt.RegisteredClaims, error) {
|
||||||
|
parsed, err := jwt.ParseWithClaims(token, &jwt.RegisteredClaims{}, func(token *jwt.Token) (any, error) {
|
||||||
|
return c.SecretKey, nil
|
||||||
|
}, jwt.WithIssuer(c.Issuer), jwt.WithExpirationRequired(), jwt.WithAudience(c.Audience))
|
||||||
|
if err != nil {
|
||||||
|
return nil, newInvalidAccessTokenError(err)
|
||||||
|
}
|
||||||
|
return parsed.Claims.(*jwt.RegisteredClaims), nil
|
||||||
|
}
|
||||||
9
apps/backend/internal/blob/err.go
Normal file
9
apps/backend/internal/blob/err.go
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
package blob
|
||||||
|
|
||||||
|
import "errors"
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrConflict = errors.New("key already used for a different blob")
|
||||||
|
ErrNotFound = errors.New("key not found")
|
||||||
|
ErrInvalidFileContent = errors.New("invalid file content. must provide either a reader or a blob key")
|
||||||
|
)
|
||||||
151
apps/backend/internal/blob/fs_store.go
Normal file
151
apps/backend/internal/blob/fs_store.go
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
package blob
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/ioext"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ Store = &FSStore{}
|
||||||
|
|
||||||
|
type FSStore struct {
|
||||||
|
config FSStoreConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
type FSStoreConfig struct {
|
||||||
|
Root string
|
||||||
|
UploadURL string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewFSStore(config FSStoreConfig) *FSStore {
|
||||||
|
return &FSStore{config: config}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *FSStore) Initialize(ctx context.Context) error {
|
||||||
|
return os.MkdirAll(s.config.Root, 0755)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *FSStore) GenerateUploadURL(ctx context.Context, key Key, opts UploadURLOptions) (string, error) {
|
||||||
|
return s.config.UploadURL, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *FSStore) Put(ctx context.Context, key Key, reader io.Reader) error {
|
||||||
|
path := filepath.Join(s.config.Root, string(key))
|
||||||
|
|
||||||
|
err := os.MkdirAll(filepath.Dir(path), 0755)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_EXCL, 0644)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsExist(err) {
|
||||||
|
return ErrConflict
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer f.Close()
|
||||||
|
_, err = io.Copy(f, reader)
|
||||||
|
if err != nil {
|
||||||
|
_ = os.Remove(path)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *FSStore) Read(ctx context.Context, key Key) (io.ReadCloser, error) {
|
||||||
|
path := filepath.Join(s.config.Root, string(key))
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return f, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *FSStore) ReadRange(ctx context.Context, key Key, offset, length int64) (io.ReadCloser, error) {
|
||||||
|
path := filepath.Join(s.config.Root, string(key))
|
||||||
|
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = f.Seek(offset, io.SeekStart)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ioext.NewLimitReadCloser(f, length), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *FSStore) ReadSize(ctx context.Context, key Key) (int64, error) {
|
||||||
|
path := filepath.Join(s.config.Root, string(key))
|
||||||
|
fi, err := os.Stat(path)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return 0, ErrNotFound
|
||||||
|
}
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return fi.Size(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *FSStore) Delete(ctx context.Context, key Key) error {
|
||||||
|
err := os.Remove(filepath.Join(s.config.Root, string(key)))
|
||||||
|
// no op if file does not exist
|
||||||
|
// swallow error if file does not exist
|
||||||
|
if err != nil && !os.IsNotExist(err) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *FSStore) DeletePrefix(ctx context.Context, prefix Key) error {
|
||||||
|
prefixPath := filepath.Join(s.config.Root, string(prefix))
|
||||||
|
err := os.RemoveAll(prefixPath)
|
||||||
|
if err != nil && !os.IsNotExist(err) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *FSStore) Update(ctx context.Context, key Key, opts UpdateOptions) error {
|
||||||
|
// Update is a no-op for FSStore
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *FSStore) Move(ctx context.Context, srcKey, dstKey Key) error {
|
||||||
|
oldPath := filepath.Join(s.config.Root, string(srcKey))
|
||||||
|
newPath := filepath.Join(s.config.Root, string(dstKey))
|
||||||
|
|
||||||
|
_, err := os.Stat(newPath)
|
||||||
|
if err == nil {
|
||||||
|
return ErrConflict
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.MkdirAll(filepath.Dir(newPath), 0755)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.Rename(oldPath, newPath)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return ErrNotFound
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
14
apps/backend/internal/blob/key.go
Normal file
14
apps/backend/internal/blob/key.go
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
package blob
|
||||||
|
|
||||||
|
type Key string
|
||||||
|
|
||||||
|
type KeyMode int
|
||||||
|
|
||||||
|
const (
|
||||||
|
KeyModeStable KeyMode = iota
|
||||||
|
KeyModeDerived
|
||||||
|
)
|
||||||
|
|
||||||
|
func (k Key) IsNil() bool {
|
||||||
|
return k == ""
|
||||||
|
}
|
||||||
28
apps/backend/internal/blob/store.go
Normal file
28
apps/backend/internal/blob/store.go
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
package blob
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type UploadURLOptions struct {
|
||||||
|
Duration time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
type UpdateOptions struct {
|
||||||
|
ContentType string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Store interface {
|
||||||
|
Initialize(ctx context.Context) error
|
||||||
|
GenerateUploadURL(ctx context.Context, key Key, opts UploadURLOptions) (string, error)
|
||||||
|
Put(ctx context.Context, key Key, reader io.Reader) error
|
||||||
|
Update(ctx context.Context, key Key, opts UpdateOptions) error
|
||||||
|
Delete(ctx context.Context, key Key) error
|
||||||
|
DeletePrefix(ctx context.Context, prefix Key) error
|
||||||
|
Move(ctx context.Context, srcKey, dstKey Key) error
|
||||||
|
Read(ctx context.Context, key Key) (io.ReadCloser, error)
|
||||||
|
ReadRange(ctx context.Context, key Key, offset, length int64) (io.ReadCloser, error)
|
||||||
|
ReadSize(ctx context.Context, key Key) (int64, error)
|
||||||
|
}
|
||||||
61
apps/backend/internal/database/errs.go
Normal file
61
apps/backend/internal/database/errs.go
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/uptrace/bun/driver/pgdriver"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PostgreSQL SQLSTATE error codes.
|
||||||
|
// See: https://www.postgresql.org/docs/current/errcodes-appendix.html
|
||||||
|
const (
|
||||||
|
PgUniqueViolation = "23505"
|
||||||
|
PgForeignKeyViolation = "23503"
|
||||||
|
PgNotNullViolation = "23502"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PostgreSQL protocol error field identifiers used with pgdriver.Error.Field().
|
||||||
|
// See: https://www.postgresql.org/docs/current/protocol-error-fields.html
|
||||||
|
//
|
||||||
|
// Common fields:
|
||||||
|
// - 'C' - SQLSTATE code (e.g., "23505")
|
||||||
|
// - 'M' - Primary error message
|
||||||
|
// - 'D' - Detail message
|
||||||
|
// - 'H' - Hint
|
||||||
|
// - 's' - Schema name
|
||||||
|
// - 't' - Table name
|
||||||
|
// - 'c' - Column name
|
||||||
|
// - 'n' - Constraint name
|
||||||
|
const (
|
||||||
|
pgFieldCode = 'C'
|
||||||
|
pgFieldConstraint = 'n'
|
||||||
|
)
|
||||||
|
|
||||||
|
// IsUniqueViolation checks if the error is a PostgreSQL unique constraint violation.
|
||||||
|
func IsUniqueViolation(err error) bool {
|
||||||
|
return hasPgCode(err, PgUniqueViolation)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsForeignKeyViolation checks if the error is a PostgreSQL foreign key violation.
|
||||||
|
func IsForeignKeyViolation(err error) bool {
|
||||||
|
return hasPgCode(err, PgForeignKeyViolation)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsNotNullViolation checks if the error is a PostgreSQL not-null constraint violation.
|
||||||
|
func IsNotNullViolation(err error) bool {
|
||||||
|
return hasPgCode(err, PgNotNullViolation)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConstraintName returns the constraint name from a PostgreSQL error, or empty string if not applicable.
|
||||||
|
func ConstraintName(err error) string {
|
||||||
|
var pgErr pgdriver.Error
|
||||||
|
if errors.As(err, &pgErr) {
|
||||||
|
return pgErr.Field(pgFieldConstraint)
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasPgCode(err error, code string) bool {
|
||||||
|
var pgErr pgdriver.Error
|
||||||
|
return errors.As(err, &pgErr) && pgErr.Field(pgFieldCode) == code
|
||||||
|
}
|
||||||
28
apps/backend/internal/database/migrate.go
Normal file
28
apps/backend/internal/database/migrate.go
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"embed"
|
||||||
|
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
"github.com/uptrace/bun/migrate"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed migrations/*.sql
|
||||||
|
var sqlMigrations embed.FS
|
||||||
|
|
||||||
|
// RunMigrations discovers and runs all migrations against the database.
|
||||||
|
func RunMigrations(ctx context.Context, db *bun.DB) error {
|
||||||
|
migrations := migrate.NewMigrations()
|
||||||
|
if err := migrations.Discover(sqlMigrations); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
migrator := migrate.NewMigrator(db, migrations)
|
||||||
|
if err := migrator.Init(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := migrator.Migrate(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
94
apps/backend/internal/database/migrations/001_initial.up.sql
Normal file
94
apps/backend/internal/database/migrations/001_initial.up.sql
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
-- ============================================================================
|
||||||
|
-- Application Tables
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
id UUID PRIMARY KEY,
|
||||||
|
display_name TEXT,
|
||||||
|
email TEXT NOT NULL UNIQUE,
|
||||||
|
password TEXT NOT NULL,
|
||||||
|
storage_usage_bytes BIGINT NOT NULL,
|
||||||
|
storage_quota_bytes BIGINT NOT NULL,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_users_email ON users(email);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS refresh_tokens (
|
||||||
|
id UUID PRIMARY KEY,
|
||||||
|
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token_hash TEXT NOT NULL UNIQUE,
|
||||||
|
expires_at TIMESTAMPTZ NOT NULL,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_refresh_tokens_user_id ON refresh_tokens(user_id);
|
||||||
|
CREATE INDEX idx_refresh_tokens_token_hash ON refresh_tokens(token_hash);
|
||||||
|
CREATE INDEX idx_refresh_tokens_expires_at ON refresh_tokens(expires_at);
|
||||||
|
|
||||||
|
-- Virtual filesystem nodes (unified files + directories)
|
||||||
|
CREATE TABLE IF NOT EXISTS vfs_nodes (
|
||||||
|
id UUID PRIMARY KEY,
|
||||||
|
public_id TEXT NOT NULL UNIQUE, -- opaque ID for external API (no timestamp leak)
|
||||||
|
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
parent_id UUID REFERENCES vfs_nodes(id) ON DELETE CASCADE, -- NULL = root directory
|
||||||
|
kind TEXT NOT NULL CHECK (kind IN ('file', 'directory')),
|
||||||
|
status TEXT NOT NULL DEFAULT 'ready' CHECK (status IN ('pending', 'ready')),
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
|
||||||
|
-- File-specific fields (NULL for directories)
|
||||||
|
blob_key TEXT, -- reference to blob storage (flat mode), NULL for hierarchical
|
||||||
|
size BIGINT, -- file size in bytes
|
||||||
|
mime_type TEXT, -- content type
|
||||||
|
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
deleted_at TIMESTAMPTZ, -- soft delete for trash
|
||||||
|
|
||||||
|
-- No duplicate names in same parent (per user, excluding deleted)
|
||||||
|
CONSTRAINT unique_node_name UNIQUE NULLS NOT DISTINCT (user_id, parent_id, name, deleted_at)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_vfs_nodes_user_id ON vfs_nodes(user_id) WHERE deleted_at IS NULL;
|
||||||
|
CREATE INDEX idx_vfs_nodes_parent_id ON vfs_nodes(parent_id) WHERE deleted_at IS NULL;
|
||||||
|
CREATE INDEX idx_vfs_nodes_user_parent ON vfs_nodes(user_id, parent_id) WHERE deleted_at IS NULL;
|
||||||
|
CREATE INDEX idx_vfs_nodes_kind ON vfs_nodes(user_id, kind) WHERE deleted_at IS NULL;
|
||||||
|
CREATE INDEX idx_vfs_nodes_deleted ON vfs_nodes(user_id, deleted_at) WHERE deleted_at IS NOT NULL;
|
||||||
|
CREATE INDEX idx_vfs_nodes_public_id ON vfs_nodes(public_id);
|
||||||
|
CREATE UNIQUE INDEX idx_vfs_nodes_user_root ON vfs_nodes(user_id) WHERE parent_id IS NULL; -- one root per user
|
||||||
|
CREATE INDEX idx_vfs_nodes_pending ON vfs_nodes(created_at) WHERE status = 'pending'; -- for cleanup job
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS node_shares (
|
||||||
|
id UUID PRIMARY KEY,
|
||||||
|
node_id UUID NOT NULL REFERENCES vfs_nodes(id) ON DELETE CASCADE,
|
||||||
|
share_token TEXT NOT NULL UNIQUE,
|
||||||
|
expires_at TIMESTAMPTZ,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_node_shares_share_token ON node_shares(share_token);
|
||||||
|
CREATE INDEX idx_node_shares_node_id ON node_shares(node_id);
|
||||||
|
CREATE INDEX idx_node_shares_expires_at ON node_shares(expires_at) WHERE expires_at IS NOT NULL;
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Triggers for updated_at timestamps
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.updated_at = NOW();
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
CREATE TRIGGER update_users_updated_at BEFORE UPDATE ON users
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
CREATE TRIGGER update_vfs_nodes_updated_at BEFORE UPDATE ON vfs_nodes
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
CREATE TRIGGER update_node_shares_updated_at BEFORE UPDATE ON node_shares
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
27
apps/backend/internal/database/postgres.go
Normal file
27
apps/backend/internal/database/postgres.go
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
"github.com/uptrace/bun/dialect/pgdialect"
|
||||||
|
"github.com/uptrace/bun/driver/pgdriver"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewFromPostgres(url string) *bun.DB {
|
||||||
|
sqldb := sql.OpenDB(pgdriver.NewConnector(pgdriver.WithDSN(url)))
|
||||||
|
|
||||||
|
// Configure connection pool to prevent "database closed" errors
|
||||||
|
// SetMaxOpenConns sets the maximum number of open connections to the database
|
||||||
|
sqldb.SetMaxOpenConns(25)
|
||||||
|
// SetMaxIdleConns sets the maximum number of connections in the idle connection pool
|
||||||
|
sqldb.SetMaxIdleConns(5)
|
||||||
|
// SetConnMaxLifetime sets the maximum amount of time a connection may be reused
|
||||||
|
sqldb.SetConnMaxLifetime(5 * time.Minute)
|
||||||
|
// SetConnMaxIdleTime sets the maximum amount of time a connection may be idle
|
||||||
|
sqldb.SetConnMaxIdleTime(10 * time.Minute)
|
||||||
|
|
||||||
|
db := bun.NewDB(sqldb, pgdialect.New())
|
||||||
|
return db
|
||||||
|
}
|
||||||
155
apps/backend/internal/drexa/config.go
Normal file
155
apps/backend/internal/drexa/config.go
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
package drexa
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/base64"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
type StorageMode string
|
||||||
|
type StorageBackend string
|
||||||
|
|
||||||
|
const (
|
||||||
|
StorageModeFlat StorageMode = "flat"
|
||||||
|
StorageModeHierarchical StorageMode = "hierarchical"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
StorageBackendFS StorageBackend = "fs"
|
||||||
|
StorageBackendS3 StorageBackend = "s3"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
Server ServerConfig `yaml:"server"`
|
||||||
|
Database DatabaseConfig `yaml:"database"`
|
||||||
|
JWT JWTConfig `yaml:"jwt"`
|
||||||
|
Storage StorageConfig `yaml:"storage"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServerConfig struct {
|
||||||
|
Port int `yaml:"port"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DatabaseConfig struct {
|
||||||
|
PostgresURL string `yaml:"postgres_url"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type JWTConfig struct {
|
||||||
|
Issuer string `yaml:"issuer"`
|
||||||
|
Audience string `yaml:"audience"`
|
||||||
|
SecretKeyBase64 string `yaml:"secret_key_base64"`
|
||||||
|
SecretKeyPath string `yaml:"secret_key_path"`
|
||||||
|
SecretKey []byte `yaml:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type StorageConfig struct {
|
||||||
|
Mode StorageMode `yaml:"mode"`
|
||||||
|
Backend StorageBackend `yaml:"backend"`
|
||||||
|
RootPath string `yaml:"root_path"`
|
||||||
|
Bucket string `yaml:"bucket"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConfigFromFile loads configuration from a YAML file.
|
||||||
|
// JWT secret key is loaded from JWT_SECRET_KEY env var (base64 encoded),
|
||||||
|
// falling back to the file path specified in jwt.secret_key_path.
|
||||||
|
func ConfigFromFile(path string) (*Config, error) {
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return nil, fmt.Errorf("config file not found: %s", path)
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var config Config
|
||||||
|
if err := yaml.Unmarshal(data, &config); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load JWT secret key (priority: env var > config base64 > config file path)
|
||||||
|
if envKey := os.Getenv("JWT_SECRET_KEY"); envKey != "" {
|
||||||
|
key, err := base64.StdEncoding.DecodeString(envKey)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("JWT_SECRET_KEY env var is not valid base64")
|
||||||
|
}
|
||||||
|
config.JWT.SecretKey = key
|
||||||
|
} else if config.JWT.SecretKeyBase64 != "" {
|
||||||
|
key, err := base64.StdEncoding.DecodeString(config.JWT.SecretKeyBase64)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("jwt.secret_key_base64 is not valid base64")
|
||||||
|
}
|
||||||
|
config.JWT.SecretKey = key
|
||||||
|
} else if config.JWT.SecretKeyPath != "" {
|
||||||
|
keyData, err := os.ReadFile(config.JWT.SecretKeyPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
key, err := base64.StdEncoding.DecodeString(string(keyData))
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("jwt.secret_key_path file content is not valid base64")
|
||||||
|
}
|
||||||
|
config.JWT.SecretKey = key
|
||||||
|
}
|
||||||
|
|
||||||
|
if errs := config.Validate(); len(errs) > 0 {
|
||||||
|
return nil, NewConfigError(errs...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate checks for required configuration fields.
|
||||||
|
func (c *Config) Validate() []error {
|
||||||
|
var errs []error
|
||||||
|
|
||||||
|
// Server
|
||||||
|
if c.Server.Port == 0 {
|
||||||
|
errs = append(errs, errors.New("server.port is required"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Database
|
||||||
|
if c.Database.PostgresURL == "" {
|
||||||
|
errs = append(errs, errors.New("database.postgres_url is required"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// JWT
|
||||||
|
if c.JWT.Issuer == "" {
|
||||||
|
errs = append(errs, errors.New("jwt.issuer is required"))
|
||||||
|
}
|
||||||
|
if c.JWT.Audience == "" {
|
||||||
|
errs = append(errs, errors.New("jwt.audience is required"))
|
||||||
|
}
|
||||||
|
if len(c.JWT.SecretKey) == 0 {
|
||||||
|
errs = append(errs, errors.New("jwt secret key is required (set JWT_SECRET_KEY env var, jwt.secret_key_base64, or jwt.secret_key_path)"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Storage
|
||||||
|
if c.Storage.Mode == "" {
|
||||||
|
errs = append(errs, errors.New("storage.mode is required"))
|
||||||
|
} else if c.Storage.Mode != StorageModeFlat && c.Storage.Mode != StorageModeHierarchical {
|
||||||
|
errs = append(errs, errors.New("storage.mode must be 'flat' or 'hierarchical'"))
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.Storage.Backend == "" {
|
||||||
|
errs = append(errs, errors.New("storage.backend is required"))
|
||||||
|
} else if c.Storage.Backend != StorageBackendFS && c.Storage.Backend != StorageBackendS3 {
|
||||||
|
errs = append(errs, errors.New("storage.backend must be 'fs' or 's3'"))
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.Storage.Backend == StorageBackendFS && c.Storage.RootPath == "" {
|
||||||
|
errs = append(errs, errors.New("storage.root_path is required when backend is 'fs'"))
|
||||||
|
}
|
||||||
|
if c.Storage.Backend == StorageBackendS3 {
|
||||||
|
if c.Storage.Bucket == "" {
|
||||||
|
errs = append(errs, errors.New("storage.bucket is required when backend is 's3'"))
|
||||||
|
}
|
||||||
|
if c.Storage.Mode == StorageModeHierarchical {
|
||||||
|
errs = append(errs, errors.New("storage.mode must be 'flat' when backend is 's3'"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return errs
|
||||||
|
}
|
||||||
23
apps/backend/internal/drexa/err.go
Normal file
23
apps/backend/internal/drexa/err.go
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
package drexa
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ConfigError struct {
|
||||||
|
Errors []error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewConfigError(errs ...error) *ConfigError {
|
||||||
|
return &ConfigError{Errors: errs}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ConfigError) Error() string {
|
||||||
|
sb := strings.Builder{}
|
||||||
|
sb.WriteString("invalid config:\n")
|
||||||
|
for _, err := range e.Errors {
|
||||||
|
sb.WriteString(fmt.Sprintf(" - %s\n", err.Error()))
|
||||||
|
}
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
72
apps/backend/internal/drexa/server.go
Normal file
72
apps/backend/internal/drexa/server.go
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
package drexa
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/auth"
|
||||||
|
"github.com/get-drexa/drexa/internal/blob"
|
||||||
|
"github.com/get-drexa/drexa/internal/database"
|
||||||
|
"github.com/get-drexa/drexa/internal/upload"
|
||||||
|
"github.com/get-drexa/drexa/internal/user"
|
||||||
|
"github.com/get-drexa/drexa/internal/virtualfs"
|
||||||
|
"github.com/gofiber/fiber/v2"
|
||||||
|
"github.com/gofiber/fiber/v2/middleware/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewServer(c Config) (*fiber.App, error) {
|
||||||
|
app := fiber.New()
|
||||||
|
db := database.NewFromPostgres(c.Database.PostgresURL)
|
||||||
|
|
||||||
|
app.Use(logger.New())
|
||||||
|
|
||||||
|
// Initialize blob store based on config
|
||||||
|
var blobStore blob.Store
|
||||||
|
switch c.Storage.Backend {
|
||||||
|
case StorageBackendFS:
|
||||||
|
blobStore = blob.NewFSStore(blob.FSStoreConfig{
|
||||||
|
Root: c.Storage.RootPath,
|
||||||
|
})
|
||||||
|
case StorageBackendS3:
|
||||||
|
return nil, fmt.Errorf("s3 storage backend not yet implemented")
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unknown storage backend: %s", c.Storage.Backend)
|
||||||
|
}
|
||||||
|
|
||||||
|
err := blobStore.Initialize(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to initialize blob store: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize key resolver based on config
|
||||||
|
var keyResolver virtualfs.BlobKeyResolver
|
||||||
|
switch c.Storage.Mode {
|
||||||
|
case StorageModeFlat:
|
||||||
|
keyResolver = virtualfs.NewFlatKeyResolver()
|
||||||
|
case StorageModeHierarchical:
|
||||||
|
keyResolver = virtualfs.NewHierarchicalKeyResolver(db)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unknown storage mode: %s", c.Storage.Mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
vfs, err := virtualfs.NewVirtualFS(db, blobStore, keyResolver)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create virtual file system: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
userService := user.NewService()
|
||||||
|
authService := auth.NewService(userService, auth.TokenConfig{
|
||||||
|
Issuer: c.JWT.Issuer,
|
||||||
|
Audience: c.JWT.Audience,
|
||||||
|
SecretKey: c.JWT.SecretKey,
|
||||||
|
})
|
||||||
|
uploadService := upload.NewService(vfs, blobStore)
|
||||||
|
|
||||||
|
authMiddleware := auth.NewBearerAuthMiddleware(authService, db)
|
||||||
|
|
||||||
|
api := app.Group("/api")
|
||||||
|
auth.NewHTTPHandler(authService, db).RegisterRoutes(api)
|
||||||
|
upload.NewHTTPHandler(uploadService, authMiddleware).RegisterRoutes(api, authMiddleware)
|
||||||
|
|
||||||
|
return app, nil
|
||||||
|
}
|
||||||
23
apps/backend/internal/ioext/counting_reader.go
Normal file
23
apps/backend/internal/ioext/counting_reader.go
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
package ioext
|
||||||
|
|
||||||
|
import "io"
|
||||||
|
|
||||||
|
type CountingReader struct {
|
||||||
|
reader io.Reader
|
||||||
|
count int64
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCountingReader(reader io.Reader) *CountingReader {
|
||||||
|
return &CountingReader{reader: reader}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *CountingReader) Read(p []byte) (n int, err error) {
|
||||||
|
n, err = r.reader.Read(p)
|
||||||
|
r.count += int64(n)
|
||||||
|
return n, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *CountingReader) Count() int64 {
|
||||||
|
return r.count
|
||||||
|
}
|
||||||
|
|
||||||
24
apps/backend/internal/ioext/limit_read_closer.go
Normal file
24
apps/backend/internal/ioext/limit_read_closer.go
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
package ioext
|
||||||
|
|
||||||
|
import "io"
|
||||||
|
|
||||||
|
type LimitReadCloser struct {
|
||||||
|
reader io.ReadCloser
|
||||||
|
limitReader io.Reader
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewLimitReadCloser(reader io.ReadCloser, length int64) *LimitReadCloser {
|
||||||
|
return &LimitReadCloser{
|
||||||
|
reader: reader,
|
||||||
|
limitReader: io.LimitReader(reader, length),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *LimitReadCloser) Read(p []byte) (n int, err error) {
|
||||||
|
return r.limitReader.Read(p)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *LimitReadCloser) Close() error {
|
||||||
|
return r.reader.Close()
|
||||||
|
}
|
||||||
|
|
||||||
138
apps/backend/internal/password/password.go
Normal file
138
apps/backend/internal/password/password.go
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
package password
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
"crypto/subtle"
|
||||||
|
"encoding/base64"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/crypto/argon2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Hashed represents a securely hashed password.
|
||||||
|
// This type ensures plaintext passwords cannot be accidentally stored.
|
||||||
|
type Hashed string
|
||||||
|
|
||||||
|
// argon2id parameters
|
||||||
|
const (
|
||||||
|
memory = 64 * 1024
|
||||||
|
iterations = 3
|
||||||
|
parallelism = 2
|
||||||
|
saltLength = 16
|
||||||
|
keyLength = 32
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrInvalidHash = errors.New("invalid hash format")
|
||||||
|
ErrIncompatibleHash = errors.New("incompatible hash algorithm")
|
||||||
|
ErrIncompatibleVersion = errors.New("incompatible argon2 version")
|
||||||
|
)
|
||||||
|
|
||||||
|
type argon2Hash struct {
|
||||||
|
memory uint32
|
||||||
|
iterations uint32
|
||||||
|
parallelism uint8
|
||||||
|
salt []byte
|
||||||
|
hash []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hash securely hashes a plaintext password using argon2id.
|
||||||
|
func Hash(plain string) (Hashed, error) {
|
||||||
|
salt := make([]byte, saltLength)
|
||||||
|
if _, err := rand.Read(salt); err != nil {
|
||||||
|
return "", fmt.Errorf("failed to generate salt: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
hash := argon2.IDKey(
|
||||||
|
[]byte(plain),
|
||||||
|
salt,
|
||||||
|
iterations,
|
||||||
|
memory,
|
||||||
|
parallelism,
|
||||||
|
keyLength,
|
||||||
|
)
|
||||||
|
|
||||||
|
b64Salt := base64.RawStdEncoding.EncodeToString(salt)
|
||||||
|
b64Hash := base64.RawStdEncoding.EncodeToString(hash)
|
||||||
|
|
||||||
|
encoded := fmt.Sprintf(
|
||||||
|
"$argon2id$v=%d$m=%d,t=%d,p=%d$%s$%s",
|
||||||
|
argon2.Version,
|
||||||
|
memory,
|
||||||
|
iterations,
|
||||||
|
parallelism,
|
||||||
|
b64Salt,
|
||||||
|
b64Hash,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Hashed(encoded), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify checks if a plaintext password matches a hashed password.
|
||||||
|
func Verify(plain string, hashed Hashed) (bool, error) {
|
||||||
|
h, err := decodeHash(string(hashed))
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
otherHash := argon2.IDKey(
|
||||||
|
[]byte(plain),
|
||||||
|
h.salt,
|
||||||
|
h.iterations,
|
||||||
|
h.memory,
|
||||||
|
h.parallelism,
|
||||||
|
uint32(len(h.hash)),
|
||||||
|
)
|
||||||
|
|
||||||
|
if subtle.ConstantTimeCompare(h.hash, otherHash) == 1 {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeHash(encodedHash string) (*argon2Hash, error) {
|
||||||
|
parts := strings.Split(encodedHash, "$")
|
||||||
|
if len(parts) != 6 {
|
||||||
|
return nil, ErrInvalidHash
|
||||||
|
}
|
||||||
|
|
||||||
|
if parts[1] != "argon2id" {
|
||||||
|
return nil, ErrIncompatibleHash
|
||||||
|
}
|
||||||
|
|
||||||
|
var version int
|
||||||
|
if _, err := fmt.Sscanf(parts[2], "v=%d", &version); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse version: %w", err)
|
||||||
|
}
|
||||||
|
if version != argon2.Version {
|
||||||
|
return nil, ErrIncompatibleVersion
|
||||||
|
}
|
||||||
|
|
||||||
|
h := &argon2Hash{}
|
||||||
|
if _, err := fmt.Sscanf(
|
||||||
|
parts[3],
|
||||||
|
"m=%d,t=%d,p=%d",
|
||||||
|
&h.memory,
|
||||||
|
&h.iterations,
|
||||||
|
&h.parallelism,
|
||||||
|
); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse parameters: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
salt, err := base64.RawStdEncoding.DecodeString(parts[4])
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to decode salt: %w", err)
|
||||||
|
}
|
||||||
|
h.salt = salt
|
||||||
|
|
||||||
|
hash, err := base64.RawStdEncoding.DecodeString(parts[5])
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to decode hash: %w", err)
|
||||||
|
}
|
||||||
|
h.hash = hash
|
||||||
|
|
||||||
|
return h, nil
|
||||||
|
}
|
||||||
9
apps/backend/internal/upload/err.go
Normal file
9
apps/backend/internal/upload/err.go
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
package upload
|
||||||
|
|
||||||
|
import "errors"
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrNotFound = errors.New("not found")
|
||||||
|
ErrParentNotDirectory = errors.New("parent is not a directory")
|
||||||
|
ErrConflict = errors.New("node conflict")
|
||||||
|
)
|
||||||
99
apps/backend/internal/upload/http.go
Normal file
99
apps/backend/internal/upload/http.go
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
package upload
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/auth"
|
||||||
|
"github.com/gofiber/fiber/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
type createUploadRequest struct {
|
||||||
|
ParentID string `json:"parentId"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type updateUploadRequest struct {
|
||||||
|
Status Status `json:"status"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type HTTPHandler struct {
|
||||||
|
service *Service
|
||||||
|
authMiddleware fiber.Handler
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewHTTPHandler(s *Service, authMiddleware fiber.Handler) *HTTPHandler {
|
||||||
|
return &HTTPHandler{service: s, authMiddleware: authMiddleware}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *HTTPHandler) RegisterRoutes(api fiber.Router, authMiddleware fiber.Handler) {
|
||||||
|
upload := api.Group("/uploads")
|
||||||
|
upload.Use(authMiddleware)
|
||||||
|
|
||||||
|
upload.Post("/", h.Create)
|
||||||
|
upload.Put("/:uploadID/content", h.ReceiveContent)
|
||||||
|
upload.Patch("/:uploadID", h.Update)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *HTTPHandler) Create(c *fiber.Ctx) error {
|
||||||
|
u, err := auth.AuthenticatedUser(c)
|
||||||
|
if err != nil {
|
||||||
|
return c.Status(fiber.StatusUnauthorized).JSON(fiber.Map{"error": "Unauthorized"})
|
||||||
|
}
|
||||||
|
|
||||||
|
req := new(createUploadRequest)
|
||||||
|
if err := c.BodyParser(req); err != nil {
|
||||||
|
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request"})
|
||||||
|
}
|
||||||
|
|
||||||
|
upload, err := h.service.CreateUpload(c.Context(), u.ID, CreateUploadOptions{
|
||||||
|
ParentID: req.ParentID,
|
||||||
|
Name: req.Name,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Internal server error"})
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.JSON(upload)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *HTTPHandler) ReceiveContent(c *fiber.Ctx) error {
|
||||||
|
u, err := auth.AuthenticatedUser(c)
|
||||||
|
if err != nil {
|
||||||
|
return c.Status(fiber.StatusUnauthorized).JSON(fiber.Map{"error": "Unauthorized"})
|
||||||
|
}
|
||||||
|
|
||||||
|
uploadID := c.Params("uploadID")
|
||||||
|
|
||||||
|
err = h.service.ReceiveUpload(c.Context(), u.ID, uploadID, c.Request().BodyStream())
|
||||||
|
defer c.Request().CloseBodyStream()
|
||||||
|
if err != nil {
|
||||||
|
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Internal server error"})
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.SendStatus(fiber.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *HTTPHandler) Update(c *fiber.Ctx) error {
|
||||||
|
u, err := auth.AuthenticatedUser(c)
|
||||||
|
if err != nil {
|
||||||
|
return c.Status(fiber.StatusUnauthorized).JSON(fiber.Map{"error": "Unauthorized"})
|
||||||
|
}
|
||||||
|
|
||||||
|
req := new(updateUploadRequest)
|
||||||
|
if err := c.BodyParser(req); err != nil {
|
||||||
|
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request"})
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Status == StatusCompleted {
|
||||||
|
upload, err := h.service.CompleteUpload(c.Context(), u.ID, c.Params("uploadID"))
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, ErrNotFound) {
|
||||||
|
return c.SendStatus(fiber.StatusNotFound)
|
||||||
|
}
|
||||||
|
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Internal server error"})
|
||||||
|
}
|
||||||
|
return c.JSON(upload)
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.SendStatus(fiber.StatusBadRequest)
|
||||||
|
}
|
||||||
133
apps/backend/internal/upload/service.go
Normal file
133
apps/backend/internal/upload/service.go
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
package upload
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"io"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/blob"
|
||||||
|
"github.com/get-drexa/drexa/internal/virtualfs"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Service struct {
|
||||||
|
vfs *virtualfs.VirtualFS
|
||||||
|
blobStore blob.Store
|
||||||
|
|
||||||
|
pendingUploads sync.Map
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewService(vfs *virtualfs.VirtualFS, blobStore blob.Store) *Service {
|
||||||
|
return &Service{
|
||||||
|
vfs: vfs,
|
||||||
|
blobStore: blobStore,
|
||||||
|
|
||||||
|
pendingUploads: sync.Map{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type CreateUploadOptions struct {
|
||||||
|
ParentID string
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) CreateUpload(ctx context.Context, userID uuid.UUID, opts CreateUploadOptions) (*Upload, error) {
|
||||||
|
parentNode, err := s.vfs.FindNodeByPublicID(ctx, userID, opts.ParentID)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, virtualfs.ErrNodeNotFound) {
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if parentNode.Kind != virtualfs.NodeKindDirectory {
|
||||||
|
return nil, ErrParentNotDirectory
|
||||||
|
}
|
||||||
|
|
||||||
|
node, err := s.vfs.CreateFile(ctx, userID, virtualfs.CreateFileOptions{
|
||||||
|
ParentID: parentNode.ID,
|
||||||
|
Name: opts.Name,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, virtualfs.ErrNodeConflict) {
|
||||||
|
return nil, ErrConflict
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
uploadURL, err := s.blobStore.GenerateUploadURL(ctx, node.BlobKey, blob.UploadURLOptions{
|
||||||
|
Duration: 1 * time.Hour,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
_ = s.vfs.PermanentlyDeleteNode(ctx, node)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
upload := &Upload{
|
||||||
|
ID: node.PublicID,
|
||||||
|
Status: StatusPending,
|
||||||
|
TargetNode: node,
|
||||||
|
UploadURL: uploadURL,
|
||||||
|
}
|
||||||
|
|
||||||
|
s.pendingUploads.Store(upload.ID, upload)
|
||||||
|
|
||||||
|
return upload, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) ReceiveUpload(ctx context.Context, userID uuid.UUID, uploadID string, reader io.Reader) error {
|
||||||
|
n, ok := s.pendingUploads.Load(uploadID)
|
||||||
|
if !ok {
|
||||||
|
return ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
upload, ok := n.(*Upload)
|
||||||
|
if !ok {
|
||||||
|
return ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if upload.TargetNode.UserID != userID {
|
||||||
|
return ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
err := s.vfs.WriteFile(ctx, upload.TargetNode, virtualfs.FileContentFromReader(reader))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
upload.Status = StatusCompleted
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) CompleteUpload(ctx context.Context, userID uuid.UUID, uploadID string) (*Upload, error) {
|
||||||
|
n, ok := s.pendingUploads.Load(uploadID)
|
||||||
|
if !ok {
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
upload, ok := n.(*Upload)
|
||||||
|
if !ok {
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if upload.TargetNode.UserID != userID {
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if upload.TargetNode.Status == virtualfs.NodeStatusReady && upload.Status == StatusCompleted {
|
||||||
|
return upload, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := s.vfs.WriteFile(ctx, upload.TargetNode, virtualfs.FileContentFromBlobKey(upload.TargetNode.BlobKey))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
upload.Status = StatusCompleted
|
||||||
|
s.pendingUploads.Delete(uploadID)
|
||||||
|
|
||||||
|
return upload, nil
|
||||||
|
}
|
||||||
18
apps/backend/internal/upload/upload.go
Normal file
18
apps/backend/internal/upload/upload.go
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
package upload
|
||||||
|
|
||||||
|
import "github.com/get-drexa/drexa/internal/virtualfs"
|
||||||
|
|
||||||
|
type Status string
|
||||||
|
|
||||||
|
const (
|
||||||
|
StatusPending Status = "pending"
|
||||||
|
StatusCompleted Status = "completed"
|
||||||
|
StatusFailed Status = "failed"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Upload struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Status Status `json:"status"`
|
||||||
|
TargetNode *virtualfs.Node `json:"-"`
|
||||||
|
UploadURL string `json:"uploadUrl"`
|
||||||
|
}
|
||||||
36
apps/backend/internal/user/err.go
Normal file
36
apps/backend/internal/user/err.go
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
package user
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
type NotFoundError struct {
|
||||||
|
// ID is the ID that was used to try to find the user.
|
||||||
|
// Not set if not tried.
|
||||||
|
id uuid.UUID
|
||||||
|
|
||||||
|
// Email is the email that was used to try to find the user.
|
||||||
|
// Not set if not tried.
|
||||||
|
email string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newNotFoundError(id uuid.UUID, email string) *NotFoundError {
|
||||||
|
return &NotFoundError{id, email}
|
||||||
|
}
|
||||||
|
func (e *NotFoundError) Error() string {
|
||||||
|
return fmt.Sprintf("user not found: %v", e.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
type AlreadyExistsError struct {
|
||||||
|
// Email is the email that was used to try to create the user.
|
||||||
|
Email string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newAlreadyExistsError(email string) *AlreadyExistsError {
|
||||||
|
return &AlreadyExistsError{email}
|
||||||
|
}
|
||||||
|
func (e *AlreadyExistsError) Error() string {
|
||||||
|
return fmt.Sprintf("user with email %s already exists", e.Email)
|
||||||
|
}
|
||||||
76
apps/backend/internal/user/service.go
Normal file
76
apps/backend/internal/user/service.go
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
package user
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/database"
|
||||||
|
"github.com/get-drexa/drexa/internal/password"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Service struct{}
|
||||||
|
|
||||||
|
type UserRegistrationOptions struct {
|
||||||
|
Email string
|
||||||
|
DisplayName string
|
||||||
|
Password password.Hashed
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewService() *Service {
|
||||||
|
return &Service{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) RegisterUser(ctx context.Context, db bun.IDB, opts UserRegistrationOptions) (*User, error) {
|
||||||
|
uid, err := newUserID()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
u := User{
|
||||||
|
ID: uid,
|
||||||
|
Email: opts.Email,
|
||||||
|
DisplayName: opts.DisplayName,
|
||||||
|
Password: opts.Password,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = db.NewInsert().Model(&u).Returning("*").Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if database.IsUniqueViolation(err) {
|
||||||
|
return nil, newAlreadyExistsError(u.Email)
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &u, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) UserByID(ctx context.Context, db bun.IDB, id uuid.UUID) (*User, error) {
|
||||||
|
var user User
|
||||||
|
err := db.NewSelect().Model(&user).Where("id = ?", id).Scan(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return nil, newNotFoundError(id, "")
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &user, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) UserByEmail(ctx context.Context, db bun.IDB, email string) (*User, error) {
|
||||||
|
var user User
|
||||||
|
err := db.NewSelect().Model(&user).Where("email = ?", email).Scan(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return nil, newNotFoundError(uuid.Nil, email)
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &user, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) UserExistsByEmail(ctx context.Context, db bun.IDB, email string) (bool, error) {
|
||||||
|
return db.NewSelect().Model(&User{}).Where("email = ?", email).Exists(ctx)
|
||||||
|
}
|
||||||
26
apps/backend/internal/user/user.go
Normal file
26
apps/backend/internal/user/user.go
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
package user
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/password"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type User struct {
|
||||||
|
bun.BaseModel `bun:"users"`
|
||||||
|
|
||||||
|
ID uuid.UUID `bun:",pk,type:uuid" json:"id"`
|
||||||
|
DisplayName string `bun:"display_name" json:"displayName"`
|
||||||
|
Email string `bun:"email,unique,notnull" json:"email"`
|
||||||
|
Password password.Hashed `bun:"password,notnull" json:"-"`
|
||||||
|
StorageUsageBytes int64 `bun:"storage_usage_bytes,notnull" json:"storageUsageBytes"`
|
||||||
|
StorageQuotaBytes int64 `bun:"storage_quota_bytes,notnull" json:"storageQuotaBytes"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,notnull" json:"createdAt"`
|
||||||
|
UpdatedAt time.Time `bun:"updated_at,notnull" json:"updatedAt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func newUserID() (uuid.UUID, error) {
|
||||||
|
return uuid.NewV7()
|
||||||
|
}
|
||||||
9
apps/backend/internal/virtualfs/err.go
Normal file
9
apps/backend/internal/virtualfs/err.go
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
package virtualfs
|
||||||
|
|
||||||
|
import "errors"
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrNodeNotFound = errors.New("node not found")
|
||||||
|
ErrNodeConflict = errors.New("node conflict")
|
||||||
|
ErrUnsupportedOperation = errors.New("unsupported operation")
|
||||||
|
)
|
||||||
35
apps/backend/internal/virtualfs/flat_key_resolver.go
Normal file
35
apps/backend/internal/virtualfs/flat_key_resolver.go
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
package virtualfs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/blob"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
type FlatKeyResolver struct{}
|
||||||
|
|
||||||
|
var _ BlobKeyResolver = &FlatKeyResolver{}
|
||||||
|
|
||||||
|
func NewFlatKeyResolver() *FlatKeyResolver {
|
||||||
|
return &FlatKeyResolver{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *FlatKeyResolver) KeyMode() blob.KeyMode {
|
||||||
|
return blob.KeyModeStable
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *FlatKeyResolver) Resolve(ctx context.Context, node *Node) (blob.Key, error) {
|
||||||
|
if node.BlobKey == "" {
|
||||||
|
id, err := uuid.NewV7()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return blob.Key(id.String()), nil
|
||||||
|
}
|
||||||
|
return node.BlobKey, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *FlatKeyResolver) ResolveDeletionKeys(ctx context.Context, node *Node, allKeys []blob.Key) (*DeletionPlan, error) {
|
||||||
|
return &DeletionPlan{Keys: allKeys}, nil
|
||||||
|
}
|
||||||
39
apps/backend/internal/virtualfs/hierarchical_key_resolver.go
Normal file
39
apps/backend/internal/virtualfs/hierarchical_key_resolver.go
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
package virtualfs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/blob"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type HierarchicalKeyResolver struct {
|
||||||
|
db *bun.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ BlobKeyResolver = &HierarchicalKeyResolver{}
|
||||||
|
|
||||||
|
func NewHierarchicalKeyResolver(db *bun.DB) *HierarchicalKeyResolver {
|
||||||
|
return &HierarchicalKeyResolver{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HierarchicalKeyResolver) KeyMode() blob.KeyMode {
|
||||||
|
return blob.KeyModeDerived
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HierarchicalKeyResolver) Resolve(ctx context.Context, node *Node) (blob.Key, error) {
|
||||||
|
path, err := buildNodeAbsolutePath(ctx, r.db, node.ID)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return blob.Key(path), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HierarchicalKeyResolver) ResolveDeletionKeys(ctx context.Context, node *Node, allKeys []blob.Key) (*DeletionPlan, error) {
|
||||||
|
path, err := buildNodeAbsolutePath(ctx, r.db, node.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &DeletionPlan{Prefix: blob.Key(path)}, nil
|
||||||
|
}
|
||||||
18
apps/backend/internal/virtualfs/key_resolver.go
Normal file
18
apps/backend/internal/virtualfs/key_resolver.go
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
package virtualfs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/blob"
|
||||||
|
)
|
||||||
|
|
||||||
|
type BlobKeyResolver interface {
|
||||||
|
KeyMode() blob.KeyMode
|
||||||
|
Resolve(ctx context.Context, node *Node) (blob.Key, error)
|
||||||
|
ResolveDeletionKeys(ctx context.Context, node *Node, allKeys []blob.Key) (*DeletionPlan, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type DeletionPlan struct {
|
||||||
|
Prefix blob.Key
|
||||||
|
Keys []blob.Key
|
||||||
|
}
|
||||||
49
apps/backend/internal/virtualfs/node.go
Normal file
49
apps/backend/internal/virtualfs/node.go
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
package virtualfs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/get-drexa/drexa/internal/blob"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type NodeKind string
|
||||||
|
|
||||||
|
const (
|
||||||
|
NodeKindFile NodeKind = "file"
|
||||||
|
NodeKindDirectory NodeKind = "directory"
|
||||||
|
)
|
||||||
|
|
||||||
|
type NodeStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
NodeStatusPending NodeStatus = "pending"
|
||||||
|
NodeStatusReady NodeStatus = "ready"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Node struct {
|
||||||
|
bun.BaseModel `bun:"vfs_nodes"`
|
||||||
|
|
||||||
|
ID uuid.UUID `bun:",pk,type:uuid"`
|
||||||
|
PublicID string `bun:"public_id,notnull"`
|
||||||
|
UserID uuid.UUID `bun:"user_id,notnull"`
|
||||||
|
ParentID uuid.UUID `bun:"parent_id,notnull"`
|
||||||
|
Kind NodeKind `bun:"kind,notnull"`
|
||||||
|
Status NodeStatus `bun:"status,notnull"`
|
||||||
|
Name string `bun:"name,notnull"`
|
||||||
|
|
||||||
|
BlobKey blob.Key `bun:"blob_key"`
|
||||||
|
Size int64 `bun:"size"`
|
||||||
|
MimeType string `bun:"mime_type"`
|
||||||
|
|
||||||
|
CreatedAt time.Time `bun:"created_at,notnull"`
|
||||||
|
UpdatedAt time.Time `bun:"updated_at,notnull"`
|
||||||
|
DeletedAt time.Time `bun:"deleted_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsAccessible returns true if the node can be accessed.
|
||||||
|
// If the node is not ready or if it is soft deleted, it cannot be accessed.
|
||||||
|
func (n *Node) IsAccessible() bool {
|
||||||
|
return n.DeletedAt.IsZero() && n.Status == NodeStatusReady
|
||||||
|
}
|
||||||
42
apps/backend/internal/virtualfs/path.go
Normal file
42
apps/backend/internal/virtualfs/path.go
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
package virtualfs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"errors"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
const absolutePathQuery = `WITH RECURSIVE path AS (
|
||||||
|
SELECT id, parent_id, name, 1 as depth
|
||||||
|
FROM vfs_nodes WHERE id = $1 AND deleted_at IS NULL
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT n.id, n.parent_id, n.name, p.depth + 1
|
||||||
|
FROM vfs_nodes n
|
||||||
|
JOIN path p ON n.id = p.parent_id
|
||||||
|
WHERE n.deleted_at IS NULL
|
||||||
|
)
|
||||||
|
SELECT name FROM path
|
||||||
|
WHERE EXISTS (SELECT 1 FROM path WHERE parent_id IS NULL)
|
||||||
|
ORDER BY depth DESC;`
|
||||||
|
|
||||||
|
func JoinPath(parts ...string) string {
|
||||||
|
return strings.Join(parts, "/")
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildNodeAbsolutePath(ctx context.Context, db *bun.DB, nodeID uuid.UUID) (string, error) {
|
||||||
|
var path []string
|
||||||
|
err := db.NewRaw(absolutePathQuery, nodeID).Scan(ctx, &path)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return "", ErrNodeNotFound
|
||||||
|
}
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return JoinPath(path...), nil
|
||||||
|
}
|
||||||
486
apps/backend/internal/virtualfs/vfs.go
Normal file
486
apps/backend/internal/virtualfs/vfs.go
Normal file
@@ -0,0 +1,486 @@
|
|||||||
|
package virtualfs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"crypto/rand"
|
||||||
|
"database/sql"
|
||||||
|
"encoding/binary"
|
||||||
|
"errors"
|
||||||
|
"io"
|
||||||
|
|
||||||
|
"github.com/gabriel-vasile/mimetype"
|
||||||
|
"github.com/get-drexa/drexa/internal/blob"
|
||||||
|
"github.com/get-drexa/drexa/internal/database"
|
||||||
|
"github.com/get-drexa/drexa/internal/ioext"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/sqids/sqids-go"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type VirtualFS struct {
|
||||||
|
db *bun.DB
|
||||||
|
blobStore blob.Store
|
||||||
|
keyResolver BlobKeyResolver
|
||||||
|
|
||||||
|
sqid *sqids.Sqids
|
||||||
|
}
|
||||||
|
|
||||||
|
type CreateNodeOptions struct {
|
||||||
|
ParentID uuid.UUID
|
||||||
|
Kind NodeKind
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
|
||||||
|
type CreateFileOptions struct {
|
||||||
|
ParentID uuid.UUID
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
|
||||||
|
type FileContent struct {
|
||||||
|
reader io.Reader
|
||||||
|
blobKey blob.Key
|
||||||
|
}
|
||||||
|
|
||||||
|
func FileContentFromReader(reader io.Reader) FileContent {
|
||||||
|
return FileContent{reader: reader}
|
||||||
|
}
|
||||||
|
|
||||||
|
func FileContentFromBlobKey(blobKey blob.Key) FileContent {
|
||||||
|
return FileContent{blobKey: blobKey}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewVirtualFS(db *bun.DB, blobStore blob.Store, keyResolver BlobKeyResolver) (*VirtualFS, error) {
|
||||||
|
sqid, err := sqids.New()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &VirtualFS{
|
||||||
|
db: db,
|
||||||
|
blobStore: blobStore,
|
||||||
|
keyResolver: keyResolver,
|
||||||
|
sqid: sqid,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) FindNode(ctx context.Context, userID, fileID string) (*Node, error) {
|
||||||
|
var node Node
|
||||||
|
err := vfs.db.NewSelect().Model(&node).
|
||||||
|
Where("user_id = ?", userID).
|
||||||
|
Where("id = ?", fileID).
|
||||||
|
Where("status = ?", NodeStatusReady).
|
||||||
|
Where("deleted_at IS NULL").
|
||||||
|
Scan(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return nil, ErrNodeNotFound
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &node, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) FindNodeByPublicID(ctx context.Context, userID uuid.UUID, publicID string) (*Node, error) {
|
||||||
|
var node Node
|
||||||
|
err := vfs.db.NewSelect().Model(&node).
|
||||||
|
Where("user_id = ?", userID).
|
||||||
|
Where("public_id = ?", publicID).
|
||||||
|
Where("status = ?", NodeStatusReady).
|
||||||
|
Where("deleted_at IS NULL").
|
||||||
|
Scan(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return nil, ErrNodeNotFound
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &node, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) ListChildren(ctx context.Context, node *Node) ([]*Node, error) {
|
||||||
|
if !node.IsAccessible() {
|
||||||
|
return nil, ErrNodeNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
var nodes []*Node
|
||||||
|
err := vfs.db.NewSelect().Model(&nodes).
|
||||||
|
Where("user_id = ?", node.UserID).
|
||||||
|
Where("parent_id = ?", node.ID).
|
||||||
|
Where("status = ?", NodeStatusReady).
|
||||||
|
Where("deleted_at IS NULL").
|
||||||
|
Scan(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return make([]*Node, 0), nil
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) CreateFile(ctx context.Context, userID uuid.UUID, opts CreateFileOptions) (*Node, error) {
|
||||||
|
pid, err := vfs.generatePublicID()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
node := Node{
|
||||||
|
PublicID: pid,
|
||||||
|
UserID: userID,
|
||||||
|
ParentID: opts.ParentID,
|
||||||
|
Kind: NodeKindFile,
|
||||||
|
Status: NodeStatusPending,
|
||||||
|
Name: opts.Name,
|
||||||
|
}
|
||||||
|
|
||||||
|
if vfs.keyResolver.KeyMode() == blob.KeyModeStable {
|
||||||
|
node.BlobKey, err = vfs.keyResolver.Resolve(ctx, &node)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = vfs.db.NewInsert().Model(&node).Returning("*").Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if database.IsUniqueViolation(err) {
|
||||||
|
return nil, ErrNodeConflict
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &node, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) WriteFile(ctx context.Context, node *Node, content FileContent) error {
|
||||||
|
if content.reader == nil && content.blobKey.IsNil() {
|
||||||
|
return blob.ErrInvalidFileContent
|
||||||
|
}
|
||||||
|
|
||||||
|
if !node.DeletedAt.IsZero() {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
setCols := make([]string, 0, 4)
|
||||||
|
|
||||||
|
if content.reader != nil {
|
||||||
|
key, err := vfs.keyResolver.Resolve(ctx, node)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := make([]byte, 3072)
|
||||||
|
n, err := io.ReadFull(content.reader, buf)
|
||||||
|
if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
buf = buf[:n]
|
||||||
|
|
||||||
|
mt := mimetype.Detect(buf)
|
||||||
|
cr := ioext.NewCountingReader(io.MultiReader(bytes.NewReader(buf), content.reader))
|
||||||
|
|
||||||
|
err = vfs.blobStore.Put(ctx, key, cr)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if vfs.keyResolver.KeyMode() == blob.KeyModeStable {
|
||||||
|
node.BlobKey = key
|
||||||
|
setCols = append(setCols, "blob_key")
|
||||||
|
}
|
||||||
|
|
||||||
|
node.MimeType = mt.String()
|
||||||
|
node.Size = cr.Count()
|
||||||
|
node.Status = NodeStatusReady
|
||||||
|
|
||||||
|
setCols = append(setCols, "mime_type", "size", "status")
|
||||||
|
} else {
|
||||||
|
node.BlobKey = content.blobKey
|
||||||
|
|
||||||
|
b, err := vfs.blobStore.ReadRange(ctx, content.blobKey, 0, 3072)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer b.Close()
|
||||||
|
|
||||||
|
buf := make([]byte, 3072)
|
||||||
|
n, err := io.ReadFull(b, buf)
|
||||||
|
if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
buf = buf[:n]
|
||||||
|
mt := mimetype.Detect(buf)
|
||||||
|
node.MimeType = mt.String()
|
||||||
|
node.Status = NodeStatusReady
|
||||||
|
|
||||||
|
s, err := vfs.blobStore.ReadSize(ctx, content.blobKey)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
node.Size = s
|
||||||
|
|
||||||
|
setCols = append(setCols, "mime_type", "blob_key", "size", "status")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := vfs.db.NewUpdate().Model(&node).
|
||||||
|
Column(setCols...).
|
||||||
|
WherePK().
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) CreateDirectory(ctx context.Context, userID uuid.UUID, parentID uuid.UUID, name string) (*Node, error) {
|
||||||
|
pid, err := vfs.generatePublicID()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
node := Node{
|
||||||
|
PublicID: pid,
|
||||||
|
UserID: userID,
|
||||||
|
ParentID: parentID,
|
||||||
|
Kind: NodeKindDirectory,
|
||||||
|
Status: NodeStatusReady,
|
||||||
|
Name: name,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = vfs.db.NewInsert().Model(&node).Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if database.IsUniqueViolation(err) {
|
||||||
|
return nil, ErrNodeConflict
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &node, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) SoftDeleteNode(ctx context.Context, node *Node) error {
|
||||||
|
if !node.IsAccessible() {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := vfs.db.NewUpdate().Model(node).
|
||||||
|
WherePK().
|
||||||
|
Where("deleted_at IS NULL").
|
||||||
|
Where("status = ?", NodeStatusReady).
|
||||||
|
Set("deleted_at = NOW()").
|
||||||
|
Returning("deleted_at").
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) RestoreNode(ctx context.Context, node *Node) error {
|
||||||
|
if node.Status != NodeStatusReady {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := vfs.db.NewUpdate().Model(node).
|
||||||
|
WherePK().
|
||||||
|
Where("deleted_at IS NOT NULL").
|
||||||
|
Set("deleted_at = NULL").
|
||||||
|
Returning("deleted_at").
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) RenameNode(ctx context.Context, node *Node, name string) error {
|
||||||
|
if !node.IsAccessible() {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := vfs.db.NewUpdate().Model(node).
|
||||||
|
WherePK().
|
||||||
|
Where("status = ?", NodeStatusReady).
|
||||||
|
Where("deleted_at IS NULL").
|
||||||
|
Set("name = ?", name).
|
||||||
|
Returning("name, updated_at").
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) MoveNode(ctx context.Context, node *Node, parentID uuid.UUID) error {
|
||||||
|
if !node.IsAccessible() {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
oldKey, err := vfs.keyResolver.Resolve(ctx, node)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = vfs.db.NewUpdate().Model(node).
|
||||||
|
WherePK().
|
||||||
|
Where("status = ?", NodeStatusReady).
|
||||||
|
Where("deleted_at IS NULL").
|
||||||
|
Set("parent_id = ?", parentID).
|
||||||
|
Returning("parent_id, updated_at").
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
if database.IsUniqueViolation(err) {
|
||||||
|
return ErrNodeConflict
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
newKey, err := vfs.keyResolver.Resolve(ctx, node)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = vfs.blobStore.Move(ctx, oldKey, newKey)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if vfs.keyResolver.KeyMode() == blob.KeyModeStable {
|
||||||
|
node.BlobKey = newKey
|
||||||
|
_, err = vfs.db.NewUpdate().Model(node).
|
||||||
|
WherePK().
|
||||||
|
Set("blob_key = ?", newKey).
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) AbsolutePath(ctx context.Context, node *Node) (string, error) {
|
||||||
|
if !node.IsAccessible() {
|
||||||
|
return "", ErrNodeNotFound
|
||||||
|
}
|
||||||
|
return buildNodeAbsolutePath(ctx, vfs.db, node.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) PermanentlyDeleteNode(ctx context.Context, node *Node) error {
|
||||||
|
if !node.IsAccessible() {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
switch node.Kind {
|
||||||
|
case NodeKindFile:
|
||||||
|
return vfs.permanentlyDeleteFileNode(ctx, node)
|
||||||
|
case NodeKindDirectory:
|
||||||
|
return vfs.permanentlyDeleteDirectoryNode(ctx, node)
|
||||||
|
default:
|
||||||
|
return ErrUnsupportedOperation
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) permanentlyDeleteFileNode(ctx context.Context, node *Node) error {
|
||||||
|
err := vfs.blobStore.Delete(ctx, node.BlobKey)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = vfs.db.NewDelete().Model(node).WherePK().Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) permanentlyDeleteDirectoryNode(ctx context.Context, node *Node) error {
|
||||||
|
const descendantsQuery = `WITH RECURSIVE descendants AS (
|
||||||
|
SELECT id, blob_key FROM vfs_nodes WHERE id = ?
|
||||||
|
UNION ALL
|
||||||
|
SELECT n.id, n.blob_key FROM vfs_nodes n
|
||||||
|
JOIN descendants d ON n.parent_id = d.id
|
||||||
|
)
|
||||||
|
SELECT id, blob_key FROM descendants`
|
||||||
|
|
||||||
|
type nodeRecord struct {
|
||||||
|
ID uuid.UUID `bun:"id"`
|
||||||
|
BlobKey blob.Key `bun:"blob_key"`
|
||||||
|
}
|
||||||
|
|
||||||
|
tx, err := vfs.db.BeginTx(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer tx.Rollback()
|
||||||
|
|
||||||
|
var records []nodeRecord
|
||||||
|
err = tx.NewRaw(descendantsQuery, node.ID).Scan(ctx, &records)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(records) == 0 {
|
||||||
|
return ErrNodeNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
nodeIDs := make([]uuid.UUID, 0, len(records))
|
||||||
|
blobKeys := make([]blob.Key, 0, len(records))
|
||||||
|
for _, r := range records {
|
||||||
|
nodeIDs = append(nodeIDs, r.ID)
|
||||||
|
if !r.BlobKey.IsNil() {
|
||||||
|
blobKeys = append(blobKeys, r.BlobKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
plan, err := vfs.keyResolver.ResolveDeletionKeys(ctx, node, blobKeys)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = tx.NewDelete().
|
||||||
|
Model((*Node)(nil)).
|
||||||
|
Where("id IN (?)", bun.In(nodeIDs)).
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !plan.Prefix.IsNil() {
|
||||||
|
_ = vfs.blobStore.DeletePrefix(ctx, plan.Prefix)
|
||||||
|
} else {
|
||||||
|
for _, key := range plan.Keys {
|
||||||
|
_ = vfs.blobStore.Delete(ctx, key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return tx.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vfs *VirtualFS) generatePublicID() (string, error) {
|
||||||
|
var b [8]byte
|
||||||
|
_, err := rand.Read(b[:])
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
n := binary.BigEndian.Uint64(b[:])
|
||||||
|
return vfs.sqid.Encode([]uint64{n})
|
||||||
|
}
|
||||||
60
apps/cli/README.md
Normal file
60
apps/cli/README.md
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
# @drexa/cli
|
||||||
|
|
||||||
|
Admin CLI tool for managing Drexa resources.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
From the project root:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun drexa <command> [subcommand] [options]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
### `generate apikey`
|
||||||
|
|
||||||
|
Generate a new API key for authentication.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun drexa generate apikey
|
||||||
|
```
|
||||||
|
|
||||||
|
The command will interactively prompt you for (using Node.js readline):
|
||||||
|
- **Prefix**: A short identifier for the key (e.g., 'proxy', 'admin'). Cannot contain dashes.
|
||||||
|
- **Key byte length**: Length of the key in bytes (default: 32)
|
||||||
|
- **Description**: A description of what this key is for
|
||||||
|
- **Expiration date**: Optional expiration date in YYYY-MM-DD format
|
||||||
|
|
||||||
|
The command will output:
|
||||||
|
- **Unhashed key**: Save this securely - it won't be shown again
|
||||||
|
- **Hashed key**: Store this in your database
|
||||||
|
- **Description**: The description you provided
|
||||||
|
- **Expiration date**: When the key expires (if set)
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
Run the CLI directly:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun run apps/cli/index.ts <command>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
apps/cli/
|
||||||
|
├── index.ts # Main entry point
|
||||||
|
├── prompts.ts # Interactive prompt utilities
|
||||||
|
└── commands/ # Command structure mirrors CLI structure
|
||||||
|
└── generate/
|
||||||
|
├── index.ts # Generate command group
|
||||||
|
└── apikey.ts # API key generation command
|
||||||
|
```
|
||||||
|
|
||||||
|
## Adding New Commands
|
||||||
|
|
||||||
|
1. Create a new directory under `commands/` for command groups
|
||||||
|
2. Create command files following the pattern in `commands/generate/apikey.ts`
|
||||||
|
3. Export commands from an `index.ts` in the command group directory
|
||||||
|
4. Register the command group in the main `index.ts`
|
||||||
68
apps/cli/commands/generate/apikey.ts
Normal file
68
apps/cli/commands/generate/apikey.ts
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
import { generateApiKey, newPrefix } from "@drexa/auth"
|
||||||
|
import chalk from "chalk"
|
||||||
|
import { Command } from "commander"
|
||||||
|
import { promptNumber, promptOptionalDate, promptText } from "../../prompts.ts"
|
||||||
|
|
||||||
|
export const apikeyCommand = new Command("apikey")
|
||||||
|
.description("Generate a new API key")
|
||||||
|
.action(async () => {
|
||||||
|
console.log(chalk.bold.blue("\n🔑 Generate API Key\n"))
|
||||||
|
|
||||||
|
// Prompt for all required information
|
||||||
|
const prefixInput = await promptText(
|
||||||
|
"Enter API key prefix (e.g., 'proxy', 'admin'):",
|
||||||
|
)
|
||||||
|
const prefix = newPrefix(prefixInput)
|
||||||
|
|
||||||
|
if (!prefix) {
|
||||||
|
console.error(
|
||||||
|
chalk.red(
|
||||||
|
'✗ Invalid prefix: cannot contain "-" character. Please use alphanumeric characters only.',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
const keyByteLength = await promptNumber("Enter key byte length:", 32)
|
||||||
|
const description = await promptText("Enter description:")
|
||||||
|
const expiresAt = await promptOptionalDate("Enter expiration date")
|
||||||
|
|
||||||
|
console.log(chalk.dim("\n⏳ Generating API key...\n"))
|
||||||
|
|
||||||
|
// Generate the API key
|
||||||
|
const result = await generateApiKey({
|
||||||
|
prefix,
|
||||||
|
keyByteLength,
|
||||||
|
description,
|
||||||
|
expiresAt,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Display results
|
||||||
|
console.log(chalk.green.bold("✓ API Key Generated Successfully!\n"))
|
||||||
|
console.log(chalk.gray("─".repeat(60)))
|
||||||
|
console.log(
|
||||||
|
chalk.yellow.bold(
|
||||||
|
"\n⚠️ IMPORTANT: Save the unhashed key now. It won't be shown again!\n",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
console.log(chalk.bold("Unhashed Key ") + chalk.dim("(save this):"))
|
||||||
|
console.log(chalk.green(` ${result.unhashedKey}\n`))
|
||||||
|
console.log(chalk.gray("─".repeat(60)))
|
||||||
|
console.log(
|
||||||
|
chalk.bold("\nHashed Key ") +
|
||||||
|
chalk.dim("(store this in your database):"),
|
||||||
|
)
|
||||||
|
console.log(chalk.dim(` ${result.hashedKey}\n`))
|
||||||
|
console.log(chalk.bold("Description:"))
|
||||||
|
console.log(chalk.white(` ${result.description}\n`))
|
||||||
|
|
||||||
|
if (result.expiresAt) {
|
||||||
|
console.log(chalk.bold("Expires At:"))
|
||||||
|
console.log(chalk.yellow(` ${result.expiresAt.toISOString()}\n`))
|
||||||
|
} else {
|
||||||
|
console.log(chalk.bold("Expires At:"))
|
||||||
|
console.log(chalk.dim(" Never\n"))
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(chalk.gray("─".repeat(60)) + "\n")
|
||||||
|
})
|
||||||
6
apps/cli/commands/generate/index.ts
Normal file
6
apps/cli/commands/generate/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import { Command } from "commander"
|
||||||
|
import { apikeyCommand } from "./apikey.ts"
|
||||||
|
|
||||||
|
export const generateCommand = new Command("generate")
|
||||||
|
.description("Generate various resources")
|
||||||
|
.addCommand(apikeyCommand)
|
||||||
17
apps/cli/index.ts
Executable file
17
apps/cli/index.ts
Executable file
@@ -0,0 +1,17 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
|
||||||
|
import { Command } from "commander"
|
||||||
|
import { generateCommand } from "./commands/generate/index.ts"
|
||||||
|
|
||||||
|
const program = new Command()
|
||||||
|
|
||||||
|
program
|
||||||
|
.name("drexa")
|
||||||
|
.description("Drexa CLI - Admin tools for managing Drexa resources")
|
||||||
|
.version("0.1.0")
|
||||||
|
|
||||||
|
// Register command groups
|
||||||
|
program.addCommand(generateCommand)
|
||||||
|
|
||||||
|
// Parse command line arguments
|
||||||
|
program.parse()
|
||||||
23
apps/cli/package.json
Normal file
23
apps/cli/package.json
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"name": "@drexa/cli",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"bin": {
|
||||||
|
"drexa": "./index.ts"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"cli": "bun run index.ts"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@drexa/auth": "workspace:*",
|
||||||
|
"chalk": "^5.3.0",
|
||||||
|
"commander": "^12.1.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/bun": "latest"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"typescript": "^5"
|
||||||
|
}
|
||||||
|
}
|
||||||
111
apps/cli/prompts.ts
Normal file
111
apps/cli/prompts.ts
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
import * as readline from "node:readline/promises"
|
||||||
|
import chalk from "chalk"
|
||||||
|
|
||||||
|
function createReadlineInterface() {
|
||||||
|
return readline.createInterface({
|
||||||
|
input: process.stdin,
|
||||||
|
output: process.stdout,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function promptText(message: string): Promise<string> {
|
||||||
|
const rl = createReadlineInterface()
|
||||||
|
try {
|
||||||
|
const input = await rl.question(chalk.cyan(`${message} `))
|
||||||
|
|
||||||
|
if (!input || input.trim() === "") {
|
||||||
|
console.error(chalk.red("✗ Input is required"))
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
return input.trim()
|
||||||
|
} finally {
|
||||||
|
rl.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function promptNumber(
|
||||||
|
message: string,
|
||||||
|
defaultValue?: number,
|
||||||
|
): Promise<number> {
|
||||||
|
const rl = createReadlineInterface()
|
||||||
|
try {
|
||||||
|
const defaultStr = defaultValue
|
||||||
|
? chalk.dim(` (default: ${defaultValue})`)
|
||||||
|
: ""
|
||||||
|
const input = await rl.question(chalk.cyan(`${message}${defaultStr} `))
|
||||||
|
|
||||||
|
if ((!input || input.trim() === "") && defaultValue !== undefined) {
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!input || input.trim() === "") {
|
||||||
|
console.error(chalk.red("✗ Input is required"))
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
const num = Number.parseInt(input.trim(), 10)
|
||||||
|
if (Number.isNaN(num) || num <= 0) {
|
||||||
|
console.error(chalk.red("✗ Please enter a valid positive number"))
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return num
|
||||||
|
} finally {
|
||||||
|
rl.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function promptOptionalDate(
|
||||||
|
message: string,
|
||||||
|
): Promise<Date | undefined> {
|
||||||
|
const rl = createReadlineInterface()
|
||||||
|
try {
|
||||||
|
const input = await rl.question(
|
||||||
|
chalk.cyan(`${message} `) +
|
||||||
|
chalk.dim("(optional, format: YYYY-MM-DD) "),
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!input || input.trim() === "") {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
const date = new Date(input.trim())
|
||||||
|
if (Number.isNaN(date.getTime())) {
|
||||||
|
console.error(
|
||||||
|
chalk.red("✗ Invalid date format. Please use YYYY-MM-DD"),
|
||||||
|
)
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (date < new Date()) {
|
||||||
|
console.error(chalk.red("✗ Expiration date must be in the future"))
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return date
|
||||||
|
} finally {
|
||||||
|
rl.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function promptConfirm(
|
||||||
|
message: string,
|
||||||
|
defaultValue = false,
|
||||||
|
): Promise<boolean> {
|
||||||
|
const rl = createReadlineInterface()
|
||||||
|
try {
|
||||||
|
const defaultStr = defaultValue
|
||||||
|
? chalk.dim(" (Y/n)")
|
||||||
|
: chalk.dim(" (y/N)")
|
||||||
|
const input = await rl.question(chalk.cyan(`${message}${defaultStr} `))
|
||||||
|
|
||||||
|
if (!input || input.trim() === "") {
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = input.toLowerCase().trim()
|
||||||
|
return normalized === "y" || normalized === "yes"
|
||||||
|
} finally {
|
||||||
|
rl.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
83
apps/cli/test-example.md
Normal file
83
apps/cli/test-example.md
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
# Testing the CLI
|
||||||
|
|
||||||
|
To test the API key generation interactively, run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun drexa generate apikey
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Session
|
||||||
|
|
||||||
|
The CLI now uses **chalk** for beautiful colored output!
|
||||||
|
|
||||||
|
```
|
||||||
|
$ bun drexa generate apikey
|
||||||
|
|
||||||
|
🔑 Generate API Key
|
||||||
|
|
||||||
|
Enter API key prefix (e.g., 'proxy', 'admin'): testkey
|
||||||
|
Enter key byte length: (default: 32)
|
||||||
|
Enter description: Test API Key for development
|
||||||
|
Enter expiration date (optional, format: YYYY-MM-DD):
|
||||||
|
|
||||||
|
⏳ Generating API key...
|
||||||
|
|
||||||
|
✓ API Key Generated Successfully!
|
||||||
|
|
||||||
|
────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
⚠️ IMPORTANT: Save the unhashed key now. It won't be shown again!
|
||||||
|
|
||||||
|
Unhashed Key (save this):
|
||||||
|
sk-testkey-AbCdEfGhIjKlMnOpQrStUvWxYz0123456789
|
||||||
|
|
||||||
|
────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
Hashed Key (store this in your database):
|
||||||
|
$argon2id$v=19$m=4,t=3,p=1$...
|
||||||
|
|
||||||
|
Description:
|
||||||
|
Test API Key for development
|
||||||
|
|
||||||
|
Expires At:
|
||||||
|
Never
|
||||||
|
|
||||||
|
────────────────────────────────────────────────────────────
|
||||||
|
```
|
||||||
|
|
||||||
|
### Color Scheme
|
||||||
|
- **Prompts**: Cyan text with dimmed hints
|
||||||
|
- **Success messages**: Green with checkmark
|
||||||
|
- **Warnings**: Yellow with warning icon
|
||||||
|
- **Errors**: Red with X mark
|
||||||
|
- **Important data**: Green (unhashed key), dimmed (hashed key)
|
||||||
|
- **Separators**: Gray lines
|
||||||
|
|
||||||
|
## Testing with Invalid Input
|
||||||
|
|
||||||
|
### Invalid prefix (contains dash)
|
||||||
|
```bash
|
||||||
|
$ bun drexa generate apikey
|
||||||
|
Enter API key prefix (e.g., 'proxy', 'admin'): test-key
|
||||||
|
✗ Invalid prefix: cannot contain "-" character. Please use alphanumeric characters only.
|
||||||
|
```
|
||||||
|
|
||||||
|
### Invalid key byte length
|
||||||
|
```bash
|
||||||
|
$ bun drexa generate apikey
|
||||||
|
Enter API key prefix (e.g., 'proxy', 'admin'): testkey
|
||||||
|
Enter key byte length: (default: 32) -5
|
||||||
|
✗ Please enter a valid positive number
|
||||||
|
```
|
||||||
|
|
||||||
|
### Invalid date format
|
||||||
|
```bash
|
||||||
|
$ bun drexa generate apikey
|
||||||
|
Enter API key prefix (e.g., 'proxy', 'admin'): testkey
|
||||||
|
Enter key byte length: (default: 32)
|
||||||
|
Enter description: Test
|
||||||
|
Enter expiration date (optional, format: YYYY-MM-DD): invalid-date
|
||||||
|
✗ Invalid date format. Please use YYYY-MM-DD
|
||||||
|
```
|
||||||
|
|
||||||
|
All error messages are displayed in red for better visibility.
|
||||||
28
apps/cli/tsconfig.json
Normal file
28
apps/cli/tsconfig.json
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
// Environment setup & latest features
|
||||||
|
"lib": ["ESNext"],
|
||||||
|
"target": "ESNext",
|
||||||
|
"module": "Preserve",
|
||||||
|
"moduleDetection": "force",
|
||||||
|
"allowJs": true,
|
||||||
|
|
||||||
|
// Bundler mode
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"verbatimModuleSyntax": true,
|
||||||
|
"noEmit": true,
|
||||||
|
|
||||||
|
// Best practices
|
||||||
|
"strict": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noUncheckedIndexedAccess": true,
|
||||||
|
"noImplicitOverride": true,
|
||||||
|
|
||||||
|
// Some stricter flags (disabled by default)
|
||||||
|
"noUnusedLocals": false,
|
||||||
|
"noUnusedParameters": false,
|
||||||
|
"noPropertyAccessFromIndexSignature": false
|
||||||
|
}
|
||||||
|
}
|
||||||
6
apps/drive-web/.env.sample
Normal file
6
apps/drive-web/.env.sample
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
# this is the url to the convex instance (NOT THE DASHBOARD)
|
||||||
|
VITE_CONVEX_URL=
|
||||||
|
# this is the convex url for invoking http actions
|
||||||
|
VITE_CONVEX_SITE_URL=
|
||||||
|
# this is the url to the file proxy
|
||||||
|
FILE_PROXY_URL=
|
||||||
@@ -3,10 +3,10 @@
|
|||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<title>Bun + React</title>
|
<title>Drive</title>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="root"></div>
|
<div id="root"></div>
|
||||||
<script type="module" src="./entry.tsx"></script>
|
<script type="module" src="/src/entry.tsx"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
@@ -4,18 +4,20 @@
|
|||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "bun --hot src/server.tsx",
|
"dev": "vite",
|
||||||
"build": "bun build ./src/index.html --outdir=dist --sourcemap --target=browser --minify --define:process.env.NODE_ENV='\"production\"' --env='BUN_PUBLIC_*'",
|
"build": "vite build",
|
||||||
"start": "NODE_ENV=production bun src/index.tsx",
|
"preview": "vite preview",
|
||||||
"format": "biome format --write"
|
"format": "biome format --write"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@convex-dev/workos": "^0.0.1",
|
"@convex-dev/better-auth": "^0.8.9",
|
||||||
"@fileone/convex": "workspace:*",
|
"@fileone/convex": "workspace:*",
|
||||||
"@radix-ui/react-checkbox": "^1.3.3",
|
"@radix-ui/react-checkbox": "^1.3.3",
|
||||||
"@radix-ui/react-context-menu": "^2.2.16",
|
"@radix-ui/react-context-menu": "^2.2.16",
|
||||||
"@radix-ui/react-dialog": "^1.1.15",
|
"@radix-ui/react-dialog": "^1.1.15",
|
||||||
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
||||||
|
"@radix-ui/react-label": "^2.1.7",
|
||||||
|
"@radix-ui/react-progress": "^1.1.7",
|
||||||
"@radix-ui/react-separator": "^1.1.7",
|
"@radix-ui/react-separator": "^1.1.7",
|
||||||
"@radix-ui/react-slot": "^1.2.3",
|
"@radix-ui/react-slot": "^1.2.3",
|
||||||
"@radix-ui/react-tooltip": "^1.2.8",
|
"@radix-ui/react-tooltip": "^1.2.8",
|
||||||
@@ -23,15 +25,18 @@
|
|||||||
"@tanstack/react-router": "^1.131.41",
|
"@tanstack/react-router": "^1.131.41",
|
||||||
"@tanstack/react-table": "^8.21.3",
|
"@tanstack/react-table": "^8.21.3",
|
||||||
"@tanstack/router-devtools": "^1.131.42",
|
"@tanstack/router-devtools": "^1.131.42",
|
||||||
"@workos-inc/authkit-react": "^0.12.0",
|
"better-auth": "1.3.8",
|
||||||
"bun-plugin-tailwind": "latest",
|
|
||||||
"class-variance-authority": "^0.7.1",
|
"class-variance-authority": "^0.7.1",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"convex": "^1.27.0",
|
"convex": "^1.27.0",
|
||||||
"convex-helpers": "^0.1.104",
|
"convex-helpers": "^0.1.104",
|
||||||
"jotai": "^2.14.0",
|
"jotai": "^2.14.0",
|
||||||
|
"jotai-effect": "^2.1.3",
|
||||||
|
"jotai-scope": "^0.9.5",
|
||||||
|
"jotai-tanstack-query": "^0.11.0",
|
||||||
"lucide-react": "^0.544.0",
|
"lucide-react": "^0.544.0",
|
||||||
"motion": "^12.23.16",
|
"motion": "^12.23.16",
|
||||||
|
"nanoid": "^5.1.6",
|
||||||
"next-themes": "^0.4.6",
|
"next-themes": "^0.4.6",
|
||||||
"react": "^19",
|
"react": "^19",
|
||||||
"react-dom": "^19",
|
"react-dom": "^19",
|
||||||
@@ -42,7 +47,11 @@
|
|||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@tanstack/router-cli": "^1.131.41",
|
"@tanstack/router-cli": "^1.131.41",
|
||||||
|
"@tanstack/router-plugin": "^1.133.13",
|
||||||
|
"@types/node": "^22.10.5",
|
||||||
"@types/react": "^19",
|
"@types/react": "^19",
|
||||||
"@types/react-dom": "^19"
|
"@types/react-dom": "^19",
|
||||||
|
"@vitejs/plugin-react": "^5.0.4",
|
||||||
|
"vite": "^7.1.10"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
50
apps/drive-web/src/APITester.tsx
Normal file
50
apps/drive-web/src/APITester.tsx
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import { type FormEvent, useRef } from "react"
|
||||||
|
|
||||||
|
export function APITester() {
|
||||||
|
const responseInputRef = useRef<HTMLTextAreaElement>(null)
|
||||||
|
|
||||||
|
const testEndpoint = async (e: FormEvent<HTMLFormElement>) => {
|
||||||
|
e.preventDefault()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const form = e.currentTarget
|
||||||
|
const formData = new FormData(form)
|
||||||
|
const endpoint = formData.get("endpoint") as string
|
||||||
|
const url = new URL(endpoint, location.href)
|
||||||
|
const method = formData.get("method") as string
|
||||||
|
const res = await fetch(url, { method })
|
||||||
|
|
||||||
|
const data = await res.json()
|
||||||
|
responseInputRef.current!.value = JSON.stringify(data, null, 2)
|
||||||
|
} catch (error) {
|
||||||
|
responseInputRef.current!.value = String(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="api-tester">
|
||||||
|
<form onSubmit={testEndpoint} className="endpoint-row">
|
||||||
|
<select name="method" className="method">
|
||||||
|
<option value="GET">GET</option>
|
||||||
|
<option value="PUT">PUT</option>
|
||||||
|
</select>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
name="endpoint"
|
||||||
|
defaultValue="/api/hello"
|
||||||
|
className="url-input"
|
||||||
|
placeholder="/api/hello"
|
||||||
|
/>
|
||||||
|
<button type="submit" className="send-button">
|
||||||
|
Send
|
||||||
|
</button>
|
||||||
|
</form>
|
||||||
|
<textarea
|
||||||
|
ref={responseInputRef}
|
||||||
|
readOnly
|
||||||
|
placeholder="Response will appear here..."
|
||||||
|
className="response-area"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
33
apps/drive-web/src/auth.ts
Normal file
33
apps/drive-web/src/auth.ts
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import {
|
||||||
|
convexClient,
|
||||||
|
crossDomainClient,
|
||||||
|
} from "@convex-dev/better-auth/client/plugins"
|
||||||
|
import { createAuthClient } from "better-auth/react"
|
||||||
|
import { createContext, useContext } from "react"
|
||||||
|
|
||||||
|
export type AuthErrorCode = keyof typeof authClient.$ERROR_CODES
|
||||||
|
|
||||||
|
export class BetterAuthError extends Error {
|
||||||
|
constructor(public readonly errorCode: AuthErrorCode) {
|
||||||
|
super(`better-auth error: ${errorCode}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const authClient = createAuthClient({
|
||||||
|
baseURL: import.meta.env.VITE_CONVEX_SITE_URL,
|
||||||
|
plugins: [convexClient(), crossDomainClient()],
|
||||||
|
})
|
||||||
|
|
||||||
|
export type Session = NonNullable<
|
||||||
|
Awaited<ReturnType<typeof authClient.useSession>>["data"]
|
||||||
|
>
|
||||||
|
|
||||||
|
export const SessionContext = createContext<Session | null>(null)
|
||||||
|
|
||||||
|
export function useSession() {
|
||||||
|
const context = useContext(SessionContext)
|
||||||
|
if (!context) {
|
||||||
|
throw new Error("useSession must be used within a SessionProvider")
|
||||||
|
}
|
||||||
|
return context
|
||||||
|
}
|
||||||
109
apps/drive-web/src/components/ui/breadcrumb.tsx
Normal file
109
apps/drive-web/src/components/ui/breadcrumb.tsx
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
import { Slot } from "@radix-ui/react-slot"
|
||||||
|
import { ChevronRight, MoreHorizontal } from "lucide-react"
|
||||||
|
import type * as React from "react"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Breadcrumb({ ...props }: React.ComponentProps<"nav">) {
|
||||||
|
return <nav aria-label="breadcrumb" data-slot="breadcrumb" {...props} />
|
||||||
|
}
|
||||||
|
|
||||||
|
function BreadcrumbList({ className, ...props }: React.ComponentProps<"ol">) {
|
||||||
|
return (
|
||||||
|
<ol
|
||||||
|
data-slot="breadcrumb-list"
|
||||||
|
className={cn(
|
||||||
|
"text-muted-foreground flex flex-wrap items-center gap-1.5 text-sm break-words sm:gap-2.5",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function BreadcrumbItem({ className, ...props }: React.ComponentProps<"li">) {
|
||||||
|
return (
|
||||||
|
<li
|
||||||
|
data-slot="breadcrumb-item"
|
||||||
|
className={cn("inline-flex items-center gap-1.5", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function BreadcrumbLink({
|
||||||
|
asChild,
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<"a"> & {
|
||||||
|
asChild?: boolean
|
||||||
|
}) {
|
||||||
|
const Comp = asChild ? Slot : "a"
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Comp
|
||||||
|
data-slot="breadcrumb-link"
|
||||||
|
className={cn("hover:text-foreground transition-colors", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function BreadcrumbPage({ className, ...props }: React.ComponentProps<"span">) {
|
||||||
|
return (
|
||||||
|
<span
|
||||||
|
data-slot="breadcrumb-page"
|
||||||
|
role="link"
|
||||||
|
aria-disabled="true"
|
||||||
|
aria-current="page"
|
||||||
|
className={cn("text-foreground font-normal", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function BreadcrumbSeparator({
|
||||||
|
children,
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<"li">) {
|
||||||
|
return (
|
||||||
|
<li
|
||||||
|
data-slot="breadcrumb-separator"
|
||||||
|
role="presentation"
|
||||||
|
aria-hidden="true"
|
||||||
|
className={cn("[&>svg]:size-3.5", className)}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
{children ?? <ChevronRight />}
|
||||||
|
</li>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function BreadcrumbEllipsis({
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<"span">) {
|
||||||
|
return (
|
||||||
|
<span
|
||||||
|
data-slot="breadcrumb-ellipsis"
|
||||||
|
role="presentation"
|
||||||
|
aria-hidden="true"
|
||||||
|
className={cn("flex size-9 items-center justify-center", className)}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
<MoreHorizontal className="size-4" />
|
||||||
|
<span className="sr-only">More</span>
|
||||||
|
</span>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export {
|
||||||
|
Breadcrumb,
|
||||||
|
BreadcrumbList,
|
||||||
|
BreadcrumbItem,
|
||||||
|
BreadcrumbLink,
|
||||||
|
BreadcrumbPage,
|
||||||
|
BreadcrumbSeparator,
|
||||||
|
BreadcrumbEllipsis,
|
||||||
|
}
|
||||||
@@ -54,7 +54,7 @@ function Button({
|
|||||||
<Comp
|
<Comp
|
||||||
data-slot="button"
|
data-slot="button"
|
||||||
className={cn(buttonVariants({ variant, size, className }))}
|
className={cn(buttonVariants({ variant, size, className }))}
|
||||||
disabled={loading || props.disabled}
|
disabled={props.disabled}
|
||||||
{...props}
|
{...props}
|
||||||
>
|
>
|
||||||
{asChild ? (
|
{asChild ? (
|
||||||
92
apps/drive-web/src/components/ui/card.tsx
Normal file
92
apps/drive-web/src/components/ui/card.tsx
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
import type * as React from "react"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Card({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card"
|
||||||
|
className={cn(
|
||||||
|
"bg-card text-card-foreground flex flex-col gap-6 rounded-xl border py-6 shadow-sm",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardHeader({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-header"
|
||||||
|
className={cn(
|
||||||
|
"@container/card-header grid auto-rows-min grid-rows-[auto_auto] items-start gap-2 px-6 has-data-[slot=card-action]:grid-cols-[1fr_auto] [.border-b]:pb-6",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardTitle({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-title"
|
||||||
|
className={cn("leading-none font-semibold", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardDescription({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-description"
|
||||||
|
className={cn("text-muted-foreground text-sm", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardAction({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-action"
|
||||||
|
className={cn(
|
||||||
|
"col-start-2 row-span-2 row-start-1 self-start justify-self-end",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardContent({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-content"
|
||||||
|
className={cn("px-6", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardFooter({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-footer"
|
||||||
|
className={cn("flex items-center px-6 [.border-t]:pt-6", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export {
|
||||||
|
Card,
|
||||||
|
CardHeader,
|
||||||
|
CardFooter,
|
||||||
|
CardTitle,
|
||||||
|
CardAction,
|
||||||
|
CardDescription,
|
||||||
|
CardContent,
|
||||||
|
}
|
||||||
30
apps/drive-web/src/components/ui/checkbox.tsx
Normal file
30
apps/drive-web/src/components/ui/checkbox.tsx
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import * as CheckboxPrimitive from "@radix-ui/react-checkbox"
|
||||||
|
import { CheckIcon } from "lucide-react"
|
||||||
|
import type * as React from "react"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Checkbox({
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof CheckboxPrimitive.Root>) {
|
||||||
|
return (
|
||||||
|
<CheckboxPrimitive.Root
|
||||||
|
data-slot="checkbox"
|
||||||
|
className={cn(
|
||||||
|
"peer border-input dark:bg-input/30 data-[state=checked]:bg-primary data-[state=checked]:text-primary-foreground dark:data-[state=checked]:bg-primary data-[state=checked]:border-primary focus-visible:border-ring focus-visible:ring-ring/50 aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive size-4 shrink-0 rounded-[4px] border shadow-xs transition-shadow outline-none focus-visible:ring-[3px] disabled:cursor-not-allowed disabled:opacity-50",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
<CheckboxPrimitive.Indicator
|
||||||
|
data-slot="checkbox-indicator"
|
||||||
|
className="flex items-center justify-center text-current transition-none"
|
||||||
|
>
|
||||||
|
<CheckIcon className="size-3.5" />
|
||||||
|
</CheckboxPrimitive.Indicator>
|
||||||
|
</CheckboxPrimitive.Root>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Checkbox }
|
||||||
@@ -36,7 +36,7 @@ function DialogOverlay({
|
|||||||
<DialogPrimitive.Overlay
|
<DialogPrimitive.Overlay
|
||||||
data-slot="dialog-overlay"
|
data-slot="dialog-overlay"
|
||||||
className={cn(
|
className={cn(
|
||||||
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-black/50 backdrop-blur-xs",
|
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-black/20",
|
||||||
className,
|
className,
|
||||||
)}
|
)}
|
||||||
{...props}
|
{...props}
|
||||||
241
apps/drive-web/src/components/ui/field.tsx
Normal file
241
apps/drive-web/src/components/ui/field.tsx
Normal file
@@ -0,0 +1,241 @@
|
|||||||
|
import { cva, type VariantProps } from "class-variance-authority"
|
||||||
|
import { useMemo } from "react"
|
||||||
|
import { Label } from "@/components/ui/label"
|
||||||
|
import { Separator } from "@/components/ui/separator"
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function FieldSet({ className, ...props }: React.ComponentProps<"fieldset">) {
|
||||||
|
return (
|
||||||
|
<fieldset
|
||||||
|
data-slot="field-set"
|
||||||
|
className={cn(
|
||||||
|
"flex flex-col gap-6",
|
||||||
|
"has-[>[data-slot=checkbox-group]]:gap-3 has-[>[data-slot=radio-group]]:gap-3",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function FieldLegend({
|
||||||
|
className,
|
||||||
|
variant = "legend",
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<"legend"> & { variant?: "legend" | "label" }) {
|
||||||
|
return (
|
||||||
|
<legend
|
||||||
|
data-slot="field-legend"
|
||||||
|
data-variant={variant}
|
||||||
|
className={cn(
|
||||||
|
"mb-3 font-medium",
|
||||||
|
"data-[variant=legend]:text-base",
|
||||||
|
"data-[variant=label]:text-sm",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function FieldGroup({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="field-group"
|
||||||
|
className={cn(
|
||||||
|
"group/field-group @container/field-group flex w-full flex-col gap-7 data-[slot=checkbox-group]:gap-3 [&>[data-slot=field-group]]:gap-4",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const fieldVariants = cva(
|
||||||
|
"group/field flex w-full gap-3 data-[invalid=true]:text-destructive",
|
||||||
|
{
|
||||||
|
variants: {
|
||||||
|
orientation: {
|
||||||
|
vertical: ["flex-col [&>*]:w-full [&>.sr-only]:w-auto"],
|
||||||
|
horizontal: [
|
||||||
|
"flex-row items-center",
|
||||||
|
"[&>[data-slot=field-label]]:flex-auto",
|
||||||
|
"has-[>[data-slot=field-content]]:items-start has-[>[data-slot=field-content]]:[&>[role=checkbox],[role=radio]]:mt-px",
|
||||||
|
],
|
||||||
|
responsive: [
|
||||||
|
"flex-col [&>*]:w-full [&>.sr-only]:w-auto @md/field-group:flex-row @md/field-group:items-center @md/field-group:[&>*]:w-auto",
|
||||||
|
"@md/field-group:[&>[data-slot=field-label]]:flex-auto",
|
||||||
|
"@md/field-group:has-[>[data-slot=field-content]]:items-start @md/field-group:has-[>[data-slot=field-content]]:[&>[role=checkbox],[role=radio]]:mt-px",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
defaultVariants: {
|
||||||
|
orientation: "vertical",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
function Field({
|
||||||
|
className,
|
||||||
|
orientation = "vertical",
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<"div"> & VariantProps<typeof fieldVariants>) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
role="group"
|
||||||
|
data-slot="field"
|
||||||
|
data-orientation={orientation}
|
||||||
|
className={cn(fieldVariants({ orientation }), className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function FieldContent({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="field-content"
|
||||||
|
className={cn(
|
||||||
|
"group/field-content flex flex-1 flex-col gap-1.5 leading-snug",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function FieldLabel({
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof Label>) {
|
||||||
|
return (
|
||||||
|
<Label
|
||||||
|
data-slot="field-label"
|
||||||
|
className={cn(
|
||||||
|
"group/field-label peer/field-label flex w-fit gap-2 leading-snug group-data-[disabled=true]/field:opacity-50",
|
||||||
|
"has-[>[data-slot=field]]:w-full has-[>[data-slot=field]]:flex-col has-[>[data-slot=field]]:rounded-md has-[>[data-slot=field]]:border [&>*]:data-[slot=field]:p-4",
|
||||||
|
"has-data-[state=checked]:bg-primary/5 has-data-[state=checked]:border-primary dark:has-data-[state=checked]:bg-primary/10",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function FieldTitle({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="field-label"
|
||||||
|
className={cn(
|
||||||
|
"flex w-fit items-center gap-2 text-sm leading-snug font-medium group-data-[disabled=true]/field:opacity-50",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function FieldDescription({ className, ...props }: React.ComponentProps<"p">) {
|
||||||
|
return (
|
||||||
|
<p
|
||||||
|
data-slot="field-description"
|
||||||
|
className={cn(
|
||||||
|
"text-muted-foreground text-sm leading-normal font-normal group-has-[[data-orientation=horizontal]]/field:text-balance",
|
||||||
|
"last:mt-0 nth-last-2:-mt-1 [[data-variant=legend]+&]:-mt-1.5",
|
||||||
|
"[&>a:hover]:text-primary [&>a]:underline [&>a]:underline-offset-4",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function FieldSeparator({
|
||||||
|
children,
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<"div"> & {
|
||||||
|
children?: React.ReactNode
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="field-separator"
|
||||||
|
data-content={!!children}
|
||||||
|
className={cn(
|
||||||
|
"relative -my-2 h-5 text-sm group-data-[variant=outline]/field-group:-mb-2",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
<Separator className="absolute inset-0 top-1/2" />
|
||||||
|
{children && (
|
||||||
|
<span
|
||||||
|
className="bg-background text-muted-foreground relative mx-auto block w-fit px-2"
|
||||||
|
data-slot="field-separator-content"
|
||||||
|
>
|
||||||
|
{children}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function FieldError({
|
||||||
|
className,
|
||||||
|
children,
|
||||||
|
errors,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<"div"> & {
|
||||||
|
errors?: Array<{ message?: string } | undefined>
|
||||||
|
}) {
|
||||||
|
const content = useMemo(() => {
|
||||||
|
if (children) {
|
||||||
|
return children
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!errors) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errors?.length === 1 && errors[0]?.message) {
|
||||||
|
return errors[0].message
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ul className="ml-4 flex list-disc flex-col gap-1">
|
||||||
|
{errors.map(
|
||||||
|
(error, index) =>
|
||||||
|
error?.message && <li key={index}>{error.message}</li>,
|
||||||
|
)}
|
||||||
|
</ul>
|
||||||
|
)
|
||||||
|
}, [children, errors])
|
||||||
|
|
||||||
|
if (!content) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
role="alert"
|
||||||
|
data-slot="field-error"
|
||||||
|
className={cn("text-destructive text-sm font-normal", className)}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
{content}
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export {
|
||||||
|
Field,
|
||||||
|
FieldLabel,
|
||||||
|
FieldDescription,
|
||||||
|
FieldError,
|
||||||
|
FieldGroup,
|
||||||
|
FieldLegend,
|
||||||
|
FieldSeparator,
|
||||||
|
FieldSet,
|
||||||
|
FieldContent,
|
||||||
|
FieldTitle,
|
||||||
|
}
|
||||||
21
apps/drive-web/src/components/ui/input.tsx
Normal file
21
apps/drive-web/src/components/ui/input.tsx
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import type * as React from "react"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Input({ className, type, ...props }: React.ComponentProps<"input">) {
|
||||||
|
return (
|
||||||
|
<input
|
||||||
|
type={type}
|
||||||
|
data-slot="input"
|
||||||
|
className={cn(
|
||||||
|
"file:text-foreground placeholder:text-muted-foreground selection:bg-primary selection:text-primary-foreground dark:bg-input/30 border-input flex h-9 w-full min-w-0 rounded-md border bg-transparent px-3 py-1 text-base shadow-xs transition-[color,box-shadow] outline-none file:inline-flex file:h-7 file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:pointer-events-none disabled:cursor-not-allowed disabled:opacity-50 md:text-sm",
|
||||||
|
"focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px]",
|
||||||
|
"aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Input }
|
||||||
22
apps/drive-web/src/components/ui/label.tsx
Normal file
22
apps/drive-web/src/components/ui/label.tsx
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import * as LabelPrimitive from "@radix-ui/react-label"
|
||||||
|
import type * as React from "react"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Label({
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof LabelPrimitive.Root>) {
|
||||||
|
return (
|
||||||
|
<LabelPrimitive.Root
|
||||||
|
data-slot="label"
|
||||||
|
className={cn(
|
||||||
|
"flex items-center gap-2 text-sm leading-none font-medium select-none group-data-[disabled=true]:pointer-events-none group-data-[disabled=true]:opacity-50 peer-disabled:cursor-not-allowed peer-disabled:opacity-50",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Label }
|
||||||
21
apps/drive-web/src/components/ui/middle-truncated-text.tsx
Normal file
21
apps/drive-web/src/components/ui/middle-truncated-text.tsx
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function MiddleTruncatedText({
|
||||||
|
children,
|
||||||
|
className,
|
||||||
|
}: {
|
||||||
|
children: string
|
||||||
|
className?: string
|
||||||
|
}) {
|
||||||
|
const LAST_PART_LENGTH = 3
|
||||||
|
const lastPart = children.slice(children.length - LAST_PART_LENGTH)
|
||||||
|
const firstPart = children.slice(0, children.length - LAST_PART_LENGTH)
|
||||||
|
return (
|
||||||
|
<p className={cn("max-w-full flex", className)}>
|
||||||
|
<span className="flex-1 truncate">{firstPart}</span>
|
||||||
|
<span className="w-min">{lastPart}</span>
|
||||||
|
</p>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { MiddleTruncatedText }
|
||||||
29
apps/drive-web/src/components/ui/progress.tsx
Normal file
29
apps/drive-web/src/components/ui/progress.tsx
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import * as ProgressPrimitive from "@radix-ui/react-progress"
|
||||||
|
import type * as React from "react"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Progress({
|
||||||
|
className,
|
||||||
|
value,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof ProgressPrimitive.Root>) {
|
||||||
|
return (
|
||||||
|
<ProgressPrimitive.Root
|
||||||
|
data-slot="progress"
|
||||||
|
className={cn(
|
||||||
|
"bg-primary/20 relative h-2 w-full overflow-hidden rounded-full",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
<ProgressPrimitive.Indicator
|
||||||
|
data-slot="progress-indicator"
|
||||||
|
className="bg-primary h-full w-full flex-1 transition-all"
|
||||||
|
style={{ transform: `translateX(-${100 - (value || 0)}%)` }}
|
||||||
|
/>
|
||||||
|
</ProgressPrimitive.Root>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Progress }
|
||||||
28
apps/drive-web/src/components/ui/separator.tsx
Normal file
28
apps/drive-web/src/components/ui/separator.tsx
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
"use client"
|
||||||
|
|
||||||
|
import * as SeparatorPrimitive from "@radix-ui/react-separator"
|
||||||
|
import type * as React from "react"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Separator({
|
||||||
|
className,
|
||||||
|
orientation = "horizontal",
|
||||||
|
decorative = true,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof SeparatorPrimitive.Root>) {
|
||||||
|
return (
|
||||||
|
<SeparatorPrimitive.Root
|
||||||
|
data-slot="separator"
|
||||||
|
decorative={decorative}
|
||||||
|
orientation={orientation}
|
||||||
|
className={cn(
|
||||||
|
"bg-border shrink-0 data-[orientation=horizontal]:h-px data-[orientation=horizontal]:w-full data-[orientation=vertical]:h-full data-[orientation=vertical]:w-px",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Separator }
|
||||||
139
apps/drive-web/src/components/ui/sheet.tsx
Normal file
139
apps/drive-web/src/components/ui/sheet.tsx
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
"use client"
|
||||||
|
|
||||||
|
import * as SheetPrimitive from "@radix-ui/react-dialog"
|
||||||
|
import { XIcon } from "lucide-react"
|
||||||
|
import type * as React from "react"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Sheet({ ...props }: React.ComponentProps<typeof SheetPrimitive.Root>) {
|
||||||
|
return <SheetPrimitive.Root data-slot="sheet" {...props} />
|
||||||
|
}
|
||||||
|
|
||||||
|
function SheetTrigger({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof SheetPrimitive.Trigger>) {
|
||||||
|
return <SheetPrimitive.Trigger data-slot="sheet-trigger" {...props} />
|
||||||
|
}
|
||||||
|
|
||||||
|
function SheetClose({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof SheetPrimitive.Close>) {
|
||||||
|
return <SheetPrimitive.Close data-slot="sheet-close" {...props} />
|
||||||
|
}
|
||||||
|
|
||||||
|
function SheetPortal({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof SheetPrimitive.Portal>) {
|
||||||
|
return <SheetPrimitive.Portal data-slot="sheet-portal" {...props} />
|
||||||
|
}
|
||||||
|
|
||||||
|
function SheetOverlay({
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof SheetPrimitive.Overlay>) {
|
||||||
|
return (
|
||||||
|
<SheetPrimitive.Overlay
|
||||||
|
data-slot="sheet-overlay"
|
||||||
|
className={cn(
|
||||||
|
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-black/50",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function SheetContent({
|
||||||
|
className,
|
||||||
|
children,
|
||||||
|
side = "right",
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof SheetPrimitive.Content> & {
|
||||||
|
side?: "top" | "right" | "bottom" | "left"
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<SheetPortal>
|
||||||
|
<SheetOverlay />
|
||||||
|
<SheetPrimitive.Content
|
||||||
|
data-slot="sheet-content"
|
||||||
|
className={cn(
|
||||||
|
"bg-background data-[state=open]:animate-in data-[state=closed]:animate-out fixed z-50 flex flex-col gap-4 shadow-lg transition ease-in-out data-[state=closed]:duration-300 data-[state=open]:duration-500",
|
||||||
|
side === "right" &&
|
||||||
|
"data-[state=closed]:slide-out-to-right data-[state=open]:slide-in-from-right inset-y-0 right-0 h-full w-3/4 border-l sm:max-w-sm",
|
||||||
|
side === "left" &&
|
||||||
|
"data-[state=closed]:slide-out-to-left data-[state=open]:slide-in-from-left inset-y-0 left-0 h-full w-3/4 border-r sm:max-w-sm",
|
||||||
|
side === "top" &&
|
||||||
|
"data-[state=closed]:slide-out-to-top data-[state=open]:slide-in-from-top inset-x-0 top-0 h-auto border-b",
|
||||||
|
side === "bottom" &&
|
||||||
|
"data-[state=closed]:slide-out-to-bottom data-[state=open]:slide-in-from-bottom inset-x-0 bottom-0 h-auto border-t",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
{children}
|
||||||
|
<SheetPrimitive.Close className="ring-offset-background focus:ring-ring data-[state=open]:bg-secondary absolute top-4 right-4 rounded-xs opacity-70 transition-opacity hover:opacity-100 focus:ring-2 focus:ring-offset-2 focus:outline-hidden disabled:pointer-events-none">
|
||||||
|
<XIcon className="size-4" />
|
||||||
|
<span className="sr-only">Close</span>
|
||||||
|
</SheetPrimitive.Close>
|
||||||
|
</SheetPrimitive.Content>
|
||||||
|
</SheetPortal>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function SheetHeader({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="sheet-header"
|
||||||
|
className={cn("flex flex-col gap-1.5 p-4", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function SheetFooter({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="sheet-footer"
|
||||||
|
className={cn("mt-auto flex flex-col gap-2 p-4", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function SheetTitle({
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof SheetPrimitive.Title>) {
|
||||||
|
return (
|
||||||
|
<SheetPrimitive.Title
|
||||||
|
data-slot="sheet-title"
|
||||||
|
className={cn("text-foreground font-semibold", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function SheetDescription({
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof SheetPrimitive.Description>) {
|
||||||
|
return (
|
||||||
|
<SheetPrimitive.Description
|
||||||
|
data-slot="sheet-description"
|
||||||
|
className={cn("text-muted-foreground text-sm", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export {
|
||||||
|
Sheet,
|
||||||
|
SheetTrigger,
|
||||||
|
SheetClose,
|
||||||
|
SheetContent,
|
||||||
|
SheetHeader,
|
||||||
|
SheetFooter,
|
||||||
|
SheetTitle,
|
||||||
|
SheetDescription,
|
||||||
|
}
|
||||||
@@ -92,7 +92,7 @@ function SidebarProvider({
|
|||||||
return isMobile
|
return isMobile
|
||||||
? setOpenMobile((open) => !open)
|
? setOpenMobile((open) => !open)
|
||||||
: setOpen((open) => !open)
|
: setOpen((open) => !open)
|
||||||
}, [isMobile, setOpen, setOpenMobile])
|
}, [isMobile, setOpen])
|
||||||
|
|
||||||
// Adds a keyboard shortcut to toggle the sidebar.
|
// Adds a keyboard shortcut to toggle the sidebar.
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
@@ -124,15 +124,7 @@ function SidebarProvider({
|
|||||||
setOpenMobile,
|
setOpenMobile,
|
||||||
toggleSidebar,
|
toggleSidebar,
|
||||||
}),
|
}),
|
||||||
[
|
[state, open, setOpen, isMobile, openMobile, toggleSidebar],
|
||||||
state,
|
|
||||||
open,
|
|
||||||
setOpen,
|
|
||||||
isMobile,
|
|
||||||
openMobile,
|
|
||||||
setOpenMobile,
|
|
||||||
toggleSidebar,
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
13
apps/drive-web/src/components/ui/skeleton.tsx
Normal file
13
apps/drive-web/src/components/ui/skeleton.tsx
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Skeleton({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="skeleton"
|
||||||
|
className={cn("bg-accent animate-pulse rounded-md", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Skeleton }
|
||||||
59
apps/drive-web/src/components/ui/tooltip.tsx
Normal file
59
apps/drive-web/src/components/ui/tooltip.tsx
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
import * as TooltipPrimitive from "@radix-ui/react-tooltip"
|
||||||
|
import type * as React from "react"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function TooltipProvider({
|
||||||
|
delayDuration = 0,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof TooltipPrimitive.Provider>) {
|
||||||
|
return (
|
||||||
|
<TooltipPrimitive.Provider
|
||||||
|
data-slot="tooltip-provider"
|
||||||
|
delayDuration={delayDuration}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function Tooltip({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof TooltipPrimitive.Root>) {
|
||||||
|
return (
|
||||||
|
<TooltipProvider>
|
||||||
|
<TooltipPrimitive.Root data-slot="tooltip" {...props} />
|
||||||
|
</TooltipProvider>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function TooltipTrigger({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof TooltipPrimitive.Trigger>) {
|
||||||
|
return <TooltipPrimitive.Trigger data-slot="tooltip-trigger" {...props} />
|
||||||
|
}
|
||||||
|
|
||||||
|
function TooltipContent({
|
||||||
|
className,
|
||||||
|
sideOffset = 0,
|
||||||
|
children,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof TooltipPrimitive.Content>) {
|
||||||
|
return (
|
||||||
|
<TooltipPrimitive.Portal>
|
||||||
|
<TooltipPrimitive.Content
|
||||||
|
data-slot="tooltip-content"
|
||||||
|
sideOffset={sideOffset}
|
||||||
|
className={cn(
|
||||||
|
"bg-primary text-primary-foreground animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 w-fit origin-(--radix-tooltip-content-transform-origin) rounded-md px-3 py-1.5 text-xs text-balance",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
{children}
|
||||||
|
<TooltipPrimitive.Arrow className="bg-primary fill-primary z-50 size-2.5 translate-y-[calc(-50%_-_2px)] rotate-45 rounded-[2px]" />
|
||||||
|
</TooltipPrimitive.Content>
|
||||||
|
</TooltipPrimitive.Portal>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Tooltip, TooltipTrigger, TooltipContent, TooltipProvider }
|
||||||
64
apps/drive-web/src/components/with-atom.tsx
Normal file
64
apps/drive-web/src/components/with-atom.tsx
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import {
|
||||||
|
type Atom,
|
||||||
|
type ExtractAtomArgs,
|
||||||
|
type ExtractAtomResult,
|
||||||
|
type ExtractAtomValue,
|
||||||
|
type PrimitiveAtom,
|
||||||
|
type SetStateAction,
|
||||||
|
useAtom,
|
||||||
|
type WritableAtom,
|
||||||
|
} from "jotai"
|
||||||
|
import type * as React from "react"
|
||||||
|
|
||||||
|
type SetAtom<Args extends unknown[], Result> = (...args: Args) => Result
|
||||||
|
|
||||||
|
export function WithAtom<Value, Args extends unknown[], Result>(props: {
|
||||||
|
atom: WritableAtom<Value, Args, Result>
|
||||||
|
children: (
|
||||||
|
value: Awaited<Value>,
|
||||||
|
setAtom: SetAtom<Args, Result>,
|
||||||
|
) => React.ReactNode
|
||||||
|
}): React.ReactNode
|
||||||
|
export function WithAtom<Value>(props: {
|
||||||
|
atom: PrimitiveAtom<Value>
|
||||||
|
children: (
|
||||||
|
value: Awaited<Value>,
|
||||||
|
setAtom: SetAtom<[SetStateAction<Value>], void>,
|
||||||
|
) => React.ReactNode
|
||||||
|
}): React.ReactNode
|
||||||
|
export function WithAtom<Value>(props: {
|
||||||
|
atom: Atom<Value>
|
||||||
|
children: (value: Awaited<Value>, setAtom: never) => React.ReactNode
|
||||||
|
}): React.ReactNode
|
||||||
|
export function WithAtom<
|
||||||
|
AtomType extends WritableAtom<unknown, never[], unknown>,
|
||||||
|
>(props: {
|
||||||
|
atom: AtomType
|
||||||
|
children: (
|
||||||
|
value: Awaited<ExtractAtomValue<AtomType>>,
|
||||||
|
setAtom: SetAtom<
|
||||||
|
ExtractAtomArgs<AtomType>,
|
||||||
|
ExtractAtomResult<AtomType>
|
||||||
|
>,
|
||||||
|
) => React.ReactNode
|
||||||
|
}): React.ReactNode
|
||||||
|
export function WithAtom<AtomType extends Atom<unknown>>(props: {
|
||||||
|
atom: AtomType
|
||||||
|
children: (
|
||||||
|
value: Awaited<ExtractAtomValue<AtomType>>,
|
||||||
|
setAtom: never,
|
||||||
|
) => React.ReactNode
|
||||||
|
}): React.ReactNode
|
||||||
|
export function WithAtom<Value, Args extends unknown[], Result>({
|
||||||
|
atom,
|
||||||
|
children,
|
||||||
|
}: {
|
||||||
|
atom: Atom<Value> | WritableAtom<Value, Args, Result>
|
||||||
|
children: (
|
||||||
|
value: Awaited<Value>,
|
||||||
|
setAtom: SetAtom<Args, Result> | never,
|
||||||
|
) => React.ReactNode
|
||||||
|
}) {
|
||||||
|
const [value, setAtom] = useAtom(atom as WritableAtom<Value, Args, Result>)
|
||||||
|
return children(value, setAtom)
|
||||||
|
}
|
||||||
270
apps/drive-web/src/dashboard/dashboard-sidebar.tsx
Normal file
270
apps/drive-web/src/dashboard/dashboard-sidebar.tsx
Normal file
@@ -0,0 +1,270 @@
|
|||||||
|
import { api } from "@fileone/convex/api"
|
||||||
|
import { newDirectoryHandle } from "@fileone/convex/filesystem"
|
||||||
|
import { useMutation } from "@tanstack/react-query"
|
||||||
|
import { Link, useLocation, useParams } from "@tanstack/react-router"
|
||||||
|
import {
|
||||||
|
useMutation as useConvexMutation,
|
||||||
|
useQuery as useConvexQuery,
|
||||||
|
} from "convex/react"
|
||||||
|
import { useAtomValue, useSetAtom, useStore } from "jotai"
|
||||||
|
import {
|
||||||
|
CircleXIcon,
|
||||||
|
ClockIcon,
|
||||||
|
FilesIcon,
|
||||||
|
FolderInputIcon,
|
||||||
|
LogOutIcon,
|
||||||
|
ScissorsIcon,
|
||||||
|
SettingsIcon,
|
||||||
|
TrashIcon,
|
||||||
|
User2Icon,
|
||||||
|
} from "lucide-react"
|
||||||
|
import { toast } from "sonner"
|
||||||
|
import { Card, CardFooter, CardHeader, CardTitle } from "@/components/ui/card"
|
||||||
|
import {
|
||||||
|
DropdownMenu,
|
||||||
|
DropdownMenuContent,
|
||||||
|
DropdownMenuItem,
|
||||||
|
DropdownMenuTrigger,
|
||||||
|
} from "@/components/ui/dropdown-menu"
|
||||||
|
import {
|
||||||
|
Sidebar,
|
||||||
|
SidebarContent,
|
||||||
|
SidebarFooter,
|
||||||
|
SidebarGroup,
|
||||||
|
SidebarHeader,
|
||||||
|
SidebarMenu,
|
||||||
|
SidebarMenuButton,
|
||||||
|
SidebarMenuItem,
|
||||||
|
} from "@/components/ui/sidebar"
|
||||||
|
import { formatError } from "@/lib/error"
|
||||||
|
import { Button } from "../components/ui/button"
|
||||||
|
import { LoadingSpinner } from "../components/ui/loading-spinner"
|
||||||
|
import { clearCutItemsAtom, cutHandlesAtom } from "../files/store"
|
||||||
|
import { backgroundTaskProgressAtom } from "./state"
|
||||||
|
|
||||||
|
export function DashboardSidebar() {
|
||||||
|
return (
|
||||||
|
<Sidebar variant="inset" collapsible="icon">
|
||||||
|
<SidebarHeader>
|
||||||
|
<SidebarMenu>
|
||||||
|
<SidebarMenuItem>
|
||||||
|
<UserMenu />
|
||||||
|
</SidebarMenuItem>
|
||||||
|
</SidebarMenu>
|
||||||
|
</SidebarHeader>
|
||||||
|
<SidebarContent>
|
||||||
|
<SidebarGroup>
|
||||||
|
<MainSidebarMenu />
|
||||||
|
</SidebarGroup>
|
||||||
|
</SidebarContent>
|
||||||
|
<SidebarFooter>
|
||||||
|
<SidebarMenu>
|
||||||
|
<CutItemsCard />
|
||||||
|
<BackgroundTaskProgressItem />
|
||||||
|
</SidebarMenu>
|
||||||
|
</SidebarFooter>
|
||||||
|
</Sidebar>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function MainSidebarMenu() {
|
||||||
|
const location = useLocation()
|
||||||
|
|
||||||
|
const isActive = (path: string) => {
|
||||||
|
if (path === "/") {
|
||||||
|
return location.pathname === "/"
|
||||||
|
}
|
||||||
|
return location.pathname.startsWith(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SidebarMenu>
|
||||||
|
<SidebarMenuItem>
|
||||||
|
<SidebarMenuButton asChild isActive={isActive("/recent")}>
|
||||||
|
<Link to="/recent">
|
||||||
|
<ClockIcon />
|
||||||
|
<span>Recent</span>
|
||||||
|
</Link>
|
||||||
|
</SidebarMenuButton>
|
||||||
|
</SidebarMenuItem>
|
||||||
|
<AllFilesItem />
|
||||||
|
<TrashItem />
|
||||||
|
</SidebarMenu>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function AllFilesItem() {
|
||||||
|
const location = useLocation()
|
||||||
|
const rootDirectory = useConvexQuery(api.files.fetchRootDirectory)
|
||||||
|
|
||||||
|
if (!rootDirectory) return null
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SidebarMenuItem>
|
||||||
|
<SidebarMenuButton
|
||||||
|
asChild
|
||||||
|
isActive={location.pathname.startsWith("/directories")}
|
||||||
|
>
|
||||||
|
<Link to={`/directories/${rootDirectory._id}`}>
|
||||||
|
<FilesIcon />
|
||||||
|
<span>All Files</span>
|
||||||
|
</Link>
|
||||||
|
</SidebarMenuButton>
|
||||||
|
</SidebarMenuItem>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function TrashItem() {
|
||||||
|
const location = useLocation()
|
||||||
|
const rootDirectory = useConvexQuery(api.files.fetchRootDirectory)
|
||||||
|
|
||||||
|
if (!rootDirectory) return null
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SidebarMenuItem>
|
||||||
|
<SidebarMenuButton
|
||||||
|
asChild
|
||||||
|
isActive={location.pathname.startsWith("/trash/directories")}
|
||||||
|
>
|
||||||
|
<Link to={`/trash/directories/${rootDirectory._id}`}>
|
||||||
|
<TrashIcon />
|
||||||
|
<span>Trash</span>
|
||||||
|
</Link>
|
||||||
|
</SidebarMenuButton>
|
||||||
|
</SidebarMenuItem>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function BackgroundTaskProgressItem() {
|
||||||
|
const backgroundTaskProgress = useAtomValue(backgroundTaskProgressAtom)
|
||||||
|
|
||||||
|
if (!backgroundTaskProgress) return null
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SidebarMenuItem className="flex items-center gap-2 opacity-80 text-sm">
|
||||||
|
<LoadingSpinner />
|
||||||
|
{backgroundTaskProgress.label}
|
||||||
|
</SidebarMenuItem>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Displays the number of cut items and allows the user to perform actions on them, such as moving them to a target directory.
|
||||||
|
* Visible when there are cut items.
|
||||||
|
*/
|
||||||
|
function CutItemsCard() {
|
||||||
|
const { directoryId } = useParams({ strict: false })
|
||||||
|
const cutHandles = useAtomValue(cutHandlesAtom)
|
||||||
|
const clearCutItems = useSetAtom(clearCutItemsAtom)
|
||||||
|
const setCutHandles = useSetAtom(cutHandlesAtom)
|
||||||
|
const setBackgroundTaskProgress = useSetAtom(backgroundTaskProgressAtom)
|
||||||
|
const store = useStore()
|
||||||
|
|
||||||
|
const _moveItems = useConvexMutation(api.filesystem.moveItems)
|
||||||
|
const { mutate: moveItems } = useMutation({
|
||||||
|
mutationFn: _moveItems,
|
||||||
|
onMutate: () => {
|
||||||
|
setBackgroundTaskProgress({
|
||||||
|
label: "Moving items…",
|
||||||
|
})
|
||||||
|
const cutHandles = store.get(cutHandlesAtom)
|
||||||
|
clearCutItems()
|
||||||
|
return { cutHandles }
|
||||||
|
},
|
||||||
|
onError: (error, _variables, context) => {
|
||||||
|
if (context?.cutHandles) {
|
||||||
|
setCutHandles(context.cutHandles)
|
||||||
|
}
|
||||||
|
toast.error("Failed to move items", {
|
||||||
|
description: formatError(error),
|
||||||
|
})
|
||||||
|
},
|
||||||
|
onSuccess: () => {
|
||||||
|
toast.success("Items moved")
|
||||||
|
},
|
||||||
|
onSettled: () => {
|
||||||
|
setBackgroundTaskProgress(null)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (cutHandles.length === 0) return null
|
||||||
|
|
||||||
|
const moveCutItems = () => {
|
||||||
|
if (directoryId) {
|
||||||
|
moveItems({
|
||||||
|
targetDirectory: newDirectoryHandle(directoryId),
|
||||||
|
items: cutHandles,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SidebarMenuItem>
|
||||||
|
<Card className="p-0 gap-0 rounded-md overflow-clip">
|
||||||
|
<CardHeader className="px-3.5 py-1.5! gap-0 border-b border-b-primary-foreground/10 bg-primary text-primary-foreground">
|
||||||
|
<CardTitle className="p-0 m-0 text-xs uppercase">
|
||||||
|
<div className="flex items-center gap-1.5">
|
||||||
|
<ScissorsIcon size={16} /> {cutHandles.length} Cut
|
||||||
|
Items
|
||||||
|
</div>
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardFooter className="p-1 flex flex-col">
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="ghost"
|
||||||
|
className="w-full justify-start transition-none"
|
||||||
|
disabled={!directoryId}
|
||||||
|
onClick={moveCutItems}
|
||||||
|
>
|
||||||
|
<FolderInputIcon size={16} />
|
||||||
|
Move items here
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="ghost"
|
||||||
|
className="w-full justify-start transition-none"
|
||||||
|
onClick={() => clearCutItems()}
|
||||||
|
>
|
||||||
|
<CircleXIcon size={16} />
|
||||||
|
Clear
|
||||||
|
</Button>
|
||||||
|
</CardFooter>
|
||||||
|
</Card>
|
||||||
|
</SidebarMenuItem>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function UserMenu() {
|
||||||
|
function handleSignOut() {}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<DropdownMenu>
|
||||||
|
<DropdownMenuTrigger asChild>
|
||||||
|
<SidebarMenuButton size="lg" asChild>
|
||||||
|
<a href="/">
|
||||||
|
<div className="bg-sidebar-primary text-sidebar-primary-foreground flex aspect-square size-8 items-center justify-center rounded-lg">
|
||||||
|
<User2Icon className="size-4" />
|
||||||
|
</div>
|
||||||
|
<div className="grid flex-1 text-left text-sm leading-tight">
|
||||||
|
<span className="truncate font-medium">
|
||||||
|
Acme Inc
|
||||||
|
</span>
|
||||||
|
<span className="truncate text-xs">Enterprise</span>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
</SidebarMenuButton>
|
||||||
|
</DropdownMenuTrigger>
|
||||||
|
<DropdownMenuContent className="w-64" align="start" side="bottom">
|
||||||
|
<DropdownMenuItem>
|
||||||
|
<SettingsIcon />
|
||||||
|
Settings
|
||||||
|
</DropdownMenuItem>
|
||||||
|
<DropdownMenuItem onClick={handleSignOut}>
|
||||||
|
<LogOutIcon />
|
||||||
|
Log out
|
||||||
|
</DropdownMenuItem>
|
||||||
|
</DropdownMenuContent>
|
||||||
|
</DropdownMenu>
|
||||||
|
)
|
||||||
|
}
|
||||||
9
apps/drive-web/src/dashboard/state.ts
Normal file
9
apps/drive-web/src/dashboard/state.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
import { atom } from "jotai"
|
||||||
|
|
||||||
|
type BackgroundTaskProgress = {
|
||||||
|
label: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export const backgroundTaskProgressAtom = atom<BackgroundTaskProgress | null>(
|
||||||
|
null,
|
||||||
|
)
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import type { Doc } from "@fileone/convex/_generated/dataModel"
|
import type { Doc } from "@fileone/convex/dataModel"
|
||||||
import type { DirectoryInfo } from "@fileone/convex/model/directories"
|
import type { FileSystemItem } from "@fileone/convex/filesystem"
|
||||||
import type { FileSystemItem } from "@fileone/convex/model/filesystem"
|
import type { DirectoryInfo } from "@fileone/convex/types"
|
||||||
import { createContext } from "react"
|
import { createContext } from "react"
|
||||||
|
|
||||||
type DirectoryPageContextType = {
|
type DirectoryPageContextType = {
|
||||||
@@ -0,0 +1,116 @@
|
|||||||
|
import { api } from "@fileone/convex/api"
|
||||||
|
import { newFileSystemHandle } from "@fileone/convex/filesystem"
|
||||||
|
import { useMutation } from "@tanstack/react-query"
|
||||||
|
import { useMutation as useContextMutation } from "convex/react"
|
||||||
|
import { useAtom, useAtomValue, useSetAtom, useStore } from "jotai"
|
||||||
|
import { TextCursorInputIcon, TrashIcon } from "lucide-react"
|
||||||
|
import { toast } from "sonner"
|
||||||
|
import {
|
||||||
|
ContextMenu,
|
||||||
|
ContextMenuContent,
|
||||||
|
ContextMenuItem,
|
||||||
|
ContextMenuTrigger,
|
||||||
|
} from "@/components/ui/context-menu"
|
||||||
|
import {
|
||||||
|
contextMenuTargeItemsAtom,
|
||||||
|
itemBeingRenamedAtom,
|
||||||
|
optimisticDeletedItemsAtom,
|
||||||
|
} from "./state"
|
||||||
|
|
||||||
|
export function DirectoryContentContextMenu({
|
||||||
|
children,
|
||||||
|
}: {
|
||||||
|
children: React.ReactNode
|
||||||
|
}) {
|
||||||
|
const store = useStore()
|
||||||
|
const [target, setTarget] = useAtom(contextMenuTargeItemsAtom)
|
||||||
|
const setOptimisticDeletedItems = useSetAtom(optimisticDeletedItemsAtom)
|
||||||
|
const moveToTrashMutation = useContextMutation(api.filesystem.moveToTrash)
|
||||||
|
const { mutate: moveToTrash } = useMutation({
|
||||||
|
mutationFn: moveToTrashMutation,
|
||||||
|
onMutate: ({ handles }) => {
|
||||||
|
setOptimisticDeletedItems(
|
||||||
|
(prev) =>
|
||||||
|
new Set([...prev, ...handles.map((handle) => handle.id)]),
|
||||||
|
)
|
||||||
|
},
|
||||||
|
onSuccess: ({ deleted, errors }, { handles }) => {
|
||||||
|
setOptimisticDeletedItems((prev) => {
|
||||||
|
const newSet = new Set(prev)
|
||||||
|
for (const handle of handles) {
|
||||||
|
newSet.delete(handle.id)
|
||||||
|
}
|
||||||
|
return newSet
|
||||||
|
})
|
||||||
|
if (errors.length === 0 && deleted.length === handles.length) {
|
||||||
|
toast.success(`Moved ${handles.length} items to trash`)
|
||||||
|
} else if (errors.length === handles.length) {
|
||||||
|
toast.error("Failed to move to trash")
|
||||||
|
} else {
|
||||||
|
toast.info(
|
||||||
|
`Moved ${deleted.length} items to trash; failed to move ${errors.length} items`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const handleDelete = () => {
|
||||||
|
const selectedItems = store.get(contextMenuTargeItemsAtom)
|
||||||
|
if (selectedItems.length > 0) {
|
||||||
|
moveToTrash({
|
||||||
|
handles: selectedItems.map(newFileSystemHandle),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ContextMenu
|
||||||
|
onOpenChange={(open) => {
|
||||||
|
if (!open) {
|
||||||
|
setTarget([])
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<ContextMenuTrigger asChild>{children}</ContextMenuTrigger>
|
||||||
|
{target && (
|
||||||
|
<ContextMenuContent>
|
||||||
|
<RenameMenuItem />
|
||||||
|
<ContextMenuItem onClick={handleDelete}>
|
||||||
|
<TrashIcon />
|
||||||
|
Move to trash
|
||||||
|
</ContextMenuItem>
|
||||||
|
</ContextMenuContent>
|
||||||
|
)}
|
||||||
|
</ContextMenu>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function RenameMenuItem() {
|
||||||
|
const store = useStore()
|
||||||
|
const target = useAtomValue(contextMenuTargeItemsAtom)
|
||||||
|
const setItemBeingRenamed = useSetAtom(itemBeingRenamedAtom)
|
||||||
|
|
||||||
|
const handleRename = () => {
|
||||||
|
const selectedItems = store.get(contextMenuTargeItemsAtom)
|
||||||
|
if (selectedItems.length === 1) {
|
||||||
|
// biome-ignore lint/style/noNonNullAssertion: length is checked
|
||||||
|
const selectedItem = selectedItems[0]!
|
||||||
|
setItemBeingRenamed({
|
||||||
|
originalItem: selectedItem,
|
||||||
|
name: selectedItem.doc.name,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only render if exactly one item is selected
|
||||||
|
if (target.length !== 1) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ContextMenuItem onClick={handleRename}>
|
||||||
|
<TextCursorInputIcon />
|
||||||
|
Rename
|
||||||
|
</ContextMenuItem>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
import { api } from "@fileone/convex/_generated/api"
|
import type { Doc } from "@fileone/convex/dataModel"
|
||||||
import type { Doc } from "@fileone/convex/_generated/dataModel"
|
|
||||||
import {
|
import {
|
||||||
type DirectoryHandle,
|
type DirectoryHandle,
|
||||||
type FileHandle,
|
type FileHandle,
|
||||||
@@ -10,30 +9,21 @@ import {
|
|||||||
newDirectoryHandle,
|
newDirectoryHandle,
|
||||||
newFileHandle,
|
newFileHandle,
|
||||||
newFileSystemHandle,
|
newFileSystemHandle,
|
||||||
} from "@fileone/convex/model/filesystem"
|
} from "@fileone/convex/filesystem"
|
||||||
import { useMutation } from "@tanstack/react-query"
|
|
||||||
import { Link, useNavigate } from "@tanstack/react-router"
|
import { Link, useNavigate } from "@tanstack/react-router"
|
||||||
import {
|
import {
|
||||||
type ColumnDef,
|
type ColumnDef,
|
||||||
flexRender,
|
flexRender,
|
||||||
getCoreRowModel,
|
getCoreRowModel,
|
||||||
|
getFilteredRowModel,
|
||||||
type Row,
|
type Row,
|
||||||
type Table as TableType,
|
type Table as TableType,
|
||||||
useReactTable,
|
useReactTable,
|
||||||
} from "@tanstack/react-table"
|
} from "@tanstack/react-table"
|
||||||
import { useMutation as useContextMutation } from "convex/react"
|
import { type PrimitiveAtom, useSetAtom, useStore } from "jotai"
|
||||||
import { useAtom, useAtomValue, useSetAtom, useStore } from "jotai"
|
import { useContext, useEffect, useMemo, useRef } from "react"
|
||||||
import { TextCursorInputIcon, TrashIcon } from "lucide-react"
|
|
||||||
import { useContext, useEffect, useRef } from "react"
|
|
||||||
import { toast } from "sonner"
|
|
||||||
import { DirectoryIcon } from "@/components/icons/directory-icon"
|
import { DirectoryIcon } from "@/components/icons/directory-icon"
|
||||||
import { Checkbox } from "@/components/ui/checkbox"
|
import { Checkbox } from "@/components/ui/checkbox"
|
||||||
import {
|
|
||||||
ContextMenu,
|
|
||||||
ContextMenuContent,
|
|
||||||
ContextMenuItem,
|
|
||||||
ContextMenuTrigger,
|
|
||||||
} from "@/components/ui/context-menu"
|
|
||||||
import {
|
import {
|
||||||
Table,
|
Table,
|
||||||
TableBody,
|
TableBody,
|
||||||
@@ -47,16 +37,22 @@ import {
|
|||||||
keyboardModifierAtom,
|
keyboardModifierAtom,
|
||||||
} from "@/lib/keyboard"
|
} from "@/lib/keyboard"
|
||||||
import { TextFileIcon } from "../../components/icons/text-file-icon"
|
import { TextFileIcon } from "../../components/icons/text-file-icon"
|
||||||
import { useFileDrop } from "../../files/use-file-drop"
|
import { type FileDragInfo, useFileDrop } from "../../files/use-file-drop"
|
||||||
import { cn } from "../../lib/utils"
|
import { cn } from "../../lib/utils"
|
||||||
import { DirectoryPageContext } from "./context"
|
import { DirectoryPageContext } from "./context"
|
||||||
import {
|
|
||||||
contextMenuTargeItemsAtom,
|
type DirectoryContentTableItemIdFilter = Set<FileSystemItem["doc"]["_id"]>
|
||||||
dragInfoAtom,
|
|
||||||
itemBeingRenamedAtom,
|
type DirectoryContentTableProps = {
|
||||||
openedFileAtom,
|
hiddenItems: DirectoryContentTableItemIdFilter
|
||||||
optimisticDeletedItemsAtom,
|
directoryUrlFn: (directory: Doc<"directories">) => string
|
||||||
} from "./state"
|
fileDragInfoAtom: PrimitiveAtom<FileDragInfo | null>
|
||||||
|
onContextMenu: (
|
||||||
|
row: Row<FileSystemItem>,
|
||||||
|
table: TableType<FileSystemItem>,
|
||||||
|
) => void
|
||||||
|
onOpenFile: (file: Doc<"files">) => void
|
||||||
|
}
|
||||||
|
|
||||||
function formatFileSize(bytes: number): string {
|
function formatFileSize(bytes: number): string {
|
||||||
if (bytes === 0) return "0 B"
|
if (bytes === 0) return "0 B"
|
||||||
@@ -68,180 +64,121 @@ function formatFileSize(bytes: number): string {
|
|||||||
return `${parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}`
|
return `${parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}`
|
||||||
}
|
}
|
||||||
|
|
||||||
const columns: ColumnDef<FileSystemItem>[] = [
|
function useTableColumns(
|
||||||
{
|
onOpenFile: (file: Doc<"files">) => void,
|
||||||
id: "select",
|
directoryUrlFn: (directory: Doc<"directories">) => string,
|
||||||
header: ({ table }) => (
|
): ColumnDef<FileSystemItem>[] {
|
||||||
<Checkbox
|
return useMemo(
|
||||||
checked={table.getIsAllPageRowsSelected()}
|
() => [
|
||||||
onCheckedChange={(value) => {
|
{
|
||||||
table.toggleAllPageRowsSelected(!!value)
|
id: "select",
|
||||||
}}
|
header: ({ table }) => (
|
||||||
aria-label="Select all"
|
<Checkbox
|
||||||
/>
|
checked={table.getIsAllPageRowsSelected()}
|
||||||
),
|
onCheckedChange={(value) => {
|
||||||
cell: ({ row }) => (
|
table.toggleAllPageRowsSelected(!!value)
|
||||||
<Checkbox
|
}}
|
||||||
checked={row.getIsSelected()}
|
aria-label="Select all"
|
||||||
onClick={(e) => {
|
/>
|
||||||
e.stopPropagation()
|
),
|
||||||
}}
|
cell: ({ row }) => (
|
||||||
onCheckedChange={row.getToggleSelectedHandler()}
|
<Checkbox
|
||||||
aria-label="Select row"
|
checked={row.getIsSelected()}
|
||||||
/>
|
onClick={(e) => {
|
||||||
),
|
e.stopPropagation()
|
||||||
enableSorting: false,
|
}}
|
||||||
enableHiding: false,
|
onCheckedChange={row.getToggleSelectedHandler()}
|
||||||
size: 24,
|
aria-label="Select row"
|
||||||
},
|
/>
|
||||||
{
|
),
|
||||||
header: "Name",
|
enableSorting: false,
|
||||||
accessorKey: "doc.name",
|
enableHiding: false,
|
||||||
cell: ({ row }) => {
|
size: 24,
|
||||||
switch (row.original.kind) {
|
},
|
||||||
case FileType.File:
|
{
|
||||||
return <FileNameCell file={row.original.doc} />
|
header: "Name",
|
||||||
case FileType.Directory:
|
accessorKey: "doc.name",
|
||||||
return <DirectoryNameCell directory={row.original.doc} />
|
cell: ({ row }) => {
|
||||||
}
|
switch (row.original.kind) {
|
||||||
},
|
case FileType.File:
|
||||||
size: 1000,
|
return (
|
||||||
},
|
<FileNameCell
|
||||||
{
|
file={row.original.doc}
|
||||||
header: "Size",
|
onOpenFile={onOpenFile}
|
||||||
accessorKey: "size",
|
/>
|
||||||
cell: ({ row }) => {
|
)
|
||||||
switch (row.original.kind) {
|
case FileType.Directory:
|
||||||
case FileType.File:
|
return (
|
||||||
return <div>{formatFileSize(row.original.doc.size)}</div>
|
<DirectoryNameCell
|
||||||
case FileType.Directory:
|
directory={row.original.doc}
|
||||||
return <div className="font-mono">-</div>
|
directoryUrlFn={directoryUrlFn}
|
||||||
}
|
/>
|
||||||
},
|
)
|
||||||
},
|
}
|
||||||
{
|
},
|
||||||
header: "Created At",
|
size: 1000,
|
||||||
accessorKey: "createdAt",
|
},
|
||||||
cell: ({ row }) => {
|
{
|
||||||
return (
|
header: "Size",
|
||||||
<div>
|
accessorKey: "size",
|
||||||
{new Date(row.original.doc.createdAt).toLocaleString()}
|
cell: ({ row }) => {
|
||||||
</div>
|
switch (row.original.kind) {
|
||||||
)
|
case FileType.File:
|
||||||
},
|
return (
|
||||||
},
|
<div>
|
||||||
]
|
{formatFileSize(row.original.doc.size)}
|
||||||
|
</div>
|
||||||
export function DirectoryContentTable() {
|
)
|
||||||
return (
|
case FileType.Directory:
|
||||||
<DirectoryContentTableContextMenu>
|
return <div className="font-mono">-</div>
|
||||||
<div className="w-full">
|
}
|
||||||
<DirectoryContentTableContent />
|
},
|
||||||
</div>
|
},
|
||||||
</DirectoryContentTableContextMenu>
|
{
|
||||||
|
header: "Created At",
|
||||||
|
accessorKey: "createdAt",
|
||||||
|
cell: ({ row }) => {
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
{new Date(
|
||||||
|
row.original.doc.createdAt,
|
||||||
|
).toLocaleString()}
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
[onOpenFile, directoryUrlFn],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function DirectoryContentTableContextMenu({
|
export function DirectoryContentTable({
|
||||||
children,
|
hiddenItems,
|
||||||
}: {
|
directoryUrlFn,
|
||||||
children: React.ReactNode
|
onContextMenu,
|
||||||
}) {
|
fileDragInfoAtom,
|
||||||
const store = useStore()
|
onOpenFile,
|
||||||
const [target, setTarget] = useAtom(contextMenuTargeItemsAtom)
|
}: DirectoryContentTableProps) {
|
||||||
const setOptimisticDeletedItems = useSetAtom(optimisticDeletedItemsAtom)
|
|
||||||
const moveToTrashMutation = useContextMutation(api.filesystem.moveToTrash)
|
|
||||||
const setItemBeingRenamed = useSetAtom(itemBeingRenamedAtom)
|
|
||||||
const { mutate: moveToTrash } = useMutation({
|
|
||||||
mutationFn: moveToTrashMutation,
|
|
||||||
onMutate: ({ handles }) => {
|
|
||||||
setOptimisticDeletedItems(
|
|
||||||
(prev) =>
|
|
||||||
new Set([...prev, ...handles.map((handle) => handle.id)]),
|
|
||||||
)
|
|
||||||
},
|
|
||||||
onSuccess: ({ deleted, errors }, { handles }) => {
|
|
||||||
setOptimisticDeletedItems((prev) => {
|
|
||||||
const newSet = new Set(prev)
|
|
||||||
for (const handle of handles) {
|
|
||||||
newSet.delete(handle.id)
|
|
||||||
}
|
|
||||||
return newSet
|
|
||||||
})
|
|
||||||
if (errors.length === 0 && deleted.length === handles.length) {
|
|
||||||
toast.success(`Moved ${handles.length} items to trash`)
|
|
||||||
} else if (errors.length === handles.length) {
|
|
||||||
toast.error("Failed to move to trash")
|
|
||||||
} else {
|
|
||||||
toast.info(
|
|
||||||
`Moved ${deleted.length} items to trash; failed to move ${errors.length} items`,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const handleRename = () => {
|
|
||||||
const selectedItems = store.get(contextMenuTargeItemsAtom)
|
|
||||||
if (selectedItems.length === 1) {
|
|
||||||
// biome-ignore lint/style/noNonNullAssertion: length is checked
|
|
||||||
const selectedItem = selectedItems[0]!
|
|
||||||
setItemBeingRenamed({
|
|
||||||
originalItem: selectedItem,
|
|
||||||
name: selectedItem.doc.name,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleDelete = () => {
|
|
||||||
const selectedItems = store.get(contextMenuTargeItemsAtom)
|
|
||||||
if (selectedItems.length > 0) {
|
|
||||||
moveToTrash({
|
|
||||||
handles: selectedItems.map(newFileSystemHandle),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<ContextMenu
|
|
||||||
onOpenChange={(open) => {
|
|
||||||
if (!open) {
|
|
||||||
setTarget([])
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<ContextMenuTrigger asChild>{children}</ContextMenuTrigger>
|
|
||||||
{target && (
|
|
||||||
<ContextMenuContent>
|
|
||||||
<ContextMenuItem onClick={handleRename}>
|
|
||||||
<TextCursorInputIcon />
|
|
||||||
Rename
|
|
||||||
</ContextMenuItem>
|
|
||||||
<ContextMenuItem onClick={handleDelete}>
|
|
||||||
<TrashIcon />
|
|
||||||
Move to trash
|
|
||||||
</ContextMenuItem>
|
|
||||||
</ContextMenuContent>
|
|
||||||
)}
|
|
||||||
</ContextMenu>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function DirectoryContentTableContent() {
|
|
||||||
const { directoryContent } = useContext(DirectoryPageContext)
|
const { directoryContent } = useContext(DirectoryPageContext)
|
||||||
const optimisticDeletedItems = useAtomValue(optimisticDeletedItemsAtom)
|
|
||||||
const setContextMenuTargetItem = useSetAtom(contextMenuTargeItemsAtom)
|
|
||||||
const store = useStore()
|
const store = useStore()
|
||||||
const navigate = useNavigate()
|
const navigate = useNavigate()
|
||||||
|
|
||||||
const table = useReactTable({
|
const table = useReactTable({
|
||||||
data: directoryContent || [],
|
data: directoryContent || [],
|
||||||
columns,
|
columns: useTableColumns(onOpenFile, directoryUrlFn),
|
||||||
getCoreRowModel: getCoreRowModel(),
|
getCoreRowModel: getCoreRowModel(),
|
||||||
|
getFilteredRowModel: getFilteredRowModel(),
|
||||||
enableRowSelection: true,
|
enableRowSelection: true,
|
||||||
enableGlobalFilter: true,
|
enableGlobalFilter: true,
|
||||||
globalFilterFn: (row, _columnId, _filterValue, _addMeta) => {
|
state: {
|
||||||
return !optimisticDeletedItems.has(row.original.doc._id)
|
globalFilter: hiddenItems,
|
||||||
},
|
},
|
||||||
|
globalFilterFn: (
|
||||||
|
row,
|
||||||
|
_columnId,
|
||||||
|
filterValue: DirectoryContentTableItemIdFilter,
|
||||||
|
_addMeta,
|
||||||
|
) => !filterValue.has(row.original.doc._id),
|
||||||
getRowId: (row) => row.doc._id,
|
getRowId: (row) => row.doc._id,
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -262,17 +199,10 @@ export function DirectoryContentTableContent() {
|
|||||||
row: Row<FileSystemItem>,
|
row: Row<FileSystemItem>,
|
||||||
_event: React.MouseEvent,
|
_event: React.MouseEvent,
|
||||||
) => {
|
) => {
|
||||||
const target = store.get(contextMenuTargeItemsAtom)
|
if (!row.getIsSelected()) {
|
||||||
if (target.length > 0) {
|
|
||||||
setContextMenuTargetItem([])
|
|
||||||
} else if (row.getIsSelected()) {
|
|
||||||
setContextMenuTargetItem(
|
|
||||||
table.getSelectedRowModel().rows.map((row) => row.original),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
selectRow(row)
|
selectRow(row)
|
||||||
setContextMenuTargetItem([row.original])
|
|
||||||
}
|
}
|
||||||
|
onContextMenu(row, table)
|
||||||
}
|
}
|
||||||
|
|
||||||
const selectRow = (row: Row<FileSystemItem>) => {
|
const selectRow = (row: Row<FileSystemItem>) => {
|
||||||
@@ -336,6 +266,7 @@ export function DirectoryContentTableContent() {
|
|||||||
table={table}
|
table={table}
|
||||||
row={row}
|
row={row}
|
||||||
onClick={() => selectRow(row)}
|
onClick={() => selectRow(row)}
|
||||||
|
fileDragInfoAtom={fileDragInfoAtom}
|
||||||
onContextMenu={(e) =>
|
onContextMenu={(e) =>
|
||||||
handleRowContextMenu(row, e)
|
handleRowContextMenu(row, e)
|
||||||
}
|
}
|
||||||
@@ -355,8 +286,8 @@ export function DirectoryContentTableContent() {
|
|||||||
|
|
||||||
function NoResultsRow() {
|
function NoResultsRow() {
|
||||||
return (
|
return (
|
||||||
<TableRow>
|
<TableRow className="hover:bg-transparent">
|
||||||
<TableCell colSpan={columns.length} className="text-center">
|
<TableCell colSpan={4} className="text-center">
|
||||||
No results.
|
No results.
|
||||||
</TableCell>
|
</TableCell>
|
||||||
</TableRow>
|
</TableRow>
|
||||||
@@ -369,22 +300,24 @@ function FileItemRow({
|
|||||||
onClick,
|
onClick,
|
||||||
onContextMenu,
|
onContextMenu,
|
||||||
onDoubleClick,
|
onDoubleClick,
|
||||||
|
fileDragInfoAtom,
|
||||||
}: {
|
}: {
|
||||||
table: TableType<FileSystemItem>
|
table: TableType<FileSystemItem>
|
||||||
row: Row<FileSystemItem>
|
row: Row<FileSystemItem>
|
||||||
onClick: () => void
|
onClick: () => void
|
||||||
onContextMenu: (e: React.MouseEvent) => void
|
onContextMenu: (e: React.MouseEvent) => void
|
||||||
onDoubleClick: () => void
|
onDoubleClick: () => void
|
||||||
|
fileDragInfoAtom: PrimitiveAtom<FileDragInfo | null>
|
||||||
}) {
|
}) {
|
||||||
const ref = useRef<HTMLTableRowElement>(null)
|
const ref = useRef<HTMLTableRowElement>(null)
|
||||||
const setDragInfo = useSetAtom(dragInfoAtom)
|
const setFileDragInfo = useSetAtom(fileDragInfoAtom)
|
||||||
|
|
||||||
const { isDraggedOver, dropHandlers } = useFileDrop({
|
const { isDraggedOver, dropHandlers } = useFileDrop({
|
||||||
destItem:
|
destItem:
|
||||||
row.original.kind === FileType.Directory
|
row.original.kind === FileType.Directory
|
||||||
? newDirectoryHandle(row.original.doc._id)
|
? newDirectoryHandle(row.original.doc._id)
|
||||||
: null,
|
: null,
|
||||||
dragInfoAtom,
|
dragInfoAtom: fileDragInfoAtom,
|
||||||
})
|
})
|
||||||
|
|
||||||
const handleDragStart = (_e: React.DragEvent) => {
|
const handleDragStart = (_e: React.DragEvent) => {
|
||||||
@@ -411,14 +344,14 @@ function FileItemRow({
|
|||||||
draggedItems = [source]
|
draggedItems = [source]
|
||||||
}
|
}
|
||||||
|
|
||||||
setDragInfo({
|
setFileDragInfo({
|
||||||
source,
|
source,
|
||||||
items: draggedItems,
|
items: draggedItems,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleDragEnd = () => {
|
const handleDragEnd = () => {
|
||||||
setDragInfo(null)
|
setFileDragInfo(null)
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -448,23 +381,30 @@ function FileItemRow({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
function DirectoryNameCell({ directory }: { directory: Doc<"directories"> }) {
|
function DirectoryNameCell({
|
||||||
|
directory,
|
||||||
|
directoryUrlFn,
|
||||||
|
}: {
|
||||||
|
directory: Doc<"directories">
|
||||||
|
directoryUrlFn: (directory: Doc<"directories">) => string
|
||||||
|
}) {
|
||||||
return (
|
return (
|
||||||
<div className="flex w-full items-center gap-2">
|
<div className="flex w-full items-center gap-2">
|
||||||
<DirectoryIcon className="size-4" />
|
<DirectoryIcon className="size-4" />
|
||||||
<Link
|
<Link className="hover:underline" to={directoryUrlFn(directory)}>
|
||||||
className="hover:underline"
|
|
||||||
to={`/directories/${directory._id}`}
|
|
||||||
>
|
|
||||||
{directory.name}
|
{directory.name}
|
||||||
</Link>
|
</Link>
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
function FileNameCell({ file }: { file: Doc<"files"> }) {
|
function FileNameCell({
|
||||||
const setOpenedFile = useSetAtom(openedFileAtom)
|
file,
|
||||||
|
onOpenFile,
|
||||||
|
}: {
|
||||||
|
file: Doc<"files">
|
||||||
|
onOpenFile: (file: Doc<"files">) => void
|
||||||
|
}) {
|
||||||
return (
|
return (
|
||||||
<div className="flex w-full items-center gap-2">
|
<div className="flex w-full items-center gap-2">
|
||||||
<TextFileIcon className="size-4" />
|
<TextFileIcon className="size-4" />
|
||||||
@@ -472,7 +412,7 @@ function FileNameCell({ file }: { file: Doc<"files"> }) {
|
|||||||
type="button"
|
type="button"
|
||||||
className="hover:underline cursor-pointer"
|
className="hover:underline cursor-pointer"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
setOpenedFile(file)
|
onOpenFile(file)
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{file.name}
|
{file.name}
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { api } from "@fileone/convex/_generated/api"
|
import { api } from "@fileone/convex/api"
|
||||||
import type { Id } from "@fileone/convex/_generated/dataModel"
|
import type { Id } from "@fileone/convex/dataModel"
|
||||||
import { useMutation } from "@tanstack/react-query"
|
import { useMutation } from "@tanstack/react-query"
|
||||||
import { useMutation as useContextMutation } from "convex/react"
|
import { useMutation as useContextMutation } from "convex/react"
|
||||||
import { useId } from "react"
|
import { useId } from "react"
|
||||||
@@ -0,0 +1,93 @@
|
|||||||
|
import { api } from "@fileone/convex/api"
|
||||||
|
import { type FileSystemItem, FileType } from "@fileone/convex/filesystem"
|
||||||
|
import { useMutation } from "@tanstack/react-query"
|
||||||
|
import { useMutation as useContextMutation } from "convex/react"
|
||||||
|
import { useId } from "react"
|
||||||
|
import { Button } from "@/components/ui/button"
|
||||||
|
import {
|
||||||
|
Dialog,
|
||||||
|
DialogClose,
|
||||||
|
DialogContent,
|
||||||
|
DialogFooter,
|
||||||
|
DialogHeader,
|
||||||
|
DialogTitle,
|
||||||
|
} from "@/components/ui/dialog"
|
||||||
|
import { Input } from "@/components/ui/input"
|
||||||
|
|
||||||
|
type RenameFileDialogProps = {
|
||||||
|
item: FileSystemItem
|
||||||
|
onRenameSuccess: () => void
|
||||||
|
onClose: () => void
|
||||||
|
}
|
||||||
|
|
||||||
|
export function RenameFileDialog({
|
||||||
|
item,
|
||||||
|
onRenameSuccess,
|
||||||
|
onClose,
|
||||||
|
}: RenameFileDialogProps) {
|
||||||
|
const formId = useId()
|
||||||
|
|
||||||
|
const { mutate: renameFile, isPending: isRenaming } = useMutation({
|
||||||
|
mutationFn: useContextMutation(api.files.renameFile),
|
||||||
|
onSuccess: () => {
|
||||||
|
onRenameSuccess()
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const onSubmit = (event: React.FormEvent<HTMLFormElement>) => {
|
||||||
|
event.preventDefault()
|
||||||
|
|
||||||
|
const formData = new FormData(event.currentTarget)
|
||||||
|
const newName = formData.get("itemName") as string
|
||||||
|
|
||||||
|
if (newName) {
|
||||||
|
switch (item.kind) {
|
||||||
|
case FileType.File:
|
||||||
|
renameFile({
|
||||||
|
directoryId: item.doc.directoryId,
|
||||||
|
itemId: item.doc._id,
|
||||||
|
newName,
|
||||||
|
})
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Dialog
|
||||||
|
open
|
||||||
|
onOpenChange={(open) => {
|
||||||
|
if (!open) {
|
||||||
|
onClose()
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<DialogContent className="sm:max-w-md">
|
||||||
|
<DialogHeader>
|
||||||
|
<DialogTitle>Rename File</DialogTitle>
|
||||||
|
</DialogHeader>
|
||||||
|
|
||||||
|
<form id={formId} onSubmit={onSubmit}>
|
||||||
|
<RenameFileInput initialValue={item.doc.name} />
|
||||||
|
</form>
|
||||||
|
|
||||||
|
<DialogFooter>
|
||||||
|
<DialogClose asChild>
|
||||||
|
<Button loading={isRenaming} variant="outline">
|
||||||
|
<span>Cancel</span>
|
||||||
|
</Button>
|
||||||
|
</DialogClose>
|
||||||
|
<Button loading={isRenaming} type="submit" form={formId}>
|
||||||
|
<span>Rename</span>
|
||||||
|
</Button>
|
||||||
|
</DialogFooter>
|
||||||
|
</DialogContent>
|
||||||
|
</Dialog>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function RenameFileInput({ initialValue }: { initialValue: string }) {
|
||||||
|
return <Input defaultValue={initialValue} name="itemName" />
|
||||||
|
}
|
||||||
@@ -26,14 +26,18 @@ export function SkeletonDemo() {
|
|||||||
|
|
||||||
{showPageSkeleton && (
|
{showPageSkeleton && (
|
||||||
<div className="border rounded-lg p-4">
|
<div className="border rounded-lg p-4">
|
||||||
<h3 className="text-lg font-semibold mb-4">Directory Page Skeleton</h3>
|
<h3 className="text-lg font-semibold mb-4">
|
||||||
|
Directory Page Skeleton
|
||||||
|
</h3>
|
||||||
<DirectoryPageSkeleton />
|
<DirectoryPageSkeleton />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{showTableSkeleton && (
|
{showTableSkeleton && (
|
||||||
<div className="border rounded-lg p-4">
|
<div className="border rounded-lg p-4">
|
||||||
<h3 className="text-lg font-semibold mb-4">Directory Content Table Skeleton</h3>
|
<h3 className="text-lg font-semibold mb-4">
|
||||||
|
Directory Content Table Skeleton
|
||||||
|
</h3>
|
||||||
<DirectoryContentTableSkeleton rows={5} />
|
<DirectoryContentTableSkeleton rows={5} />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
import type { Doc, Id } from "@fileone/convex/_generated/dataModel"
|
import type { Doc, Id } from "@fileone/convex/dataModel"
|
||||||
import type { DirectoryItemKind } from "@fileone/convex/model/directories"
|
import type { FileSystemItem } from "@fileone/convex/filesystem"
|
||||||
import type { FileSystemItem, FileType } from "@fileone/convex/model/filesystem"
|
|
||||||
import type { RowSelectionState } from "@tanstack/react-table"
|
import type { RowSelectionState } from "@tanstack/react-table"
|
||||||
import { atom } from "jotai"
|
import { atom } from "jotai"
|
||||||
import type { FileDragInfo } from "../../files/use-file-drop"
|
import type { FileDragInfo } from "../../files/use-file-drop"
|
||||||
@@ -12,8 +11,6 @@ export const optimisticDeletedItemsAtom = atom(
|
|||||||
|
|
||||||
export const selectedFileRowsAtom = atom<RowSelectionState>({})
|
export const selectedFileRowsAtom = atom<RowSelectionState>({})
|
||||||
|
|
||||||
export const newFileTypeAtom = atom<FileType | null>(null)
|
|
||||||
|
|
||||||
export const itemBeingRenamedAtom = atom<{
|
export const itemBeingRenamedAtom = atom<{
|
||||||
originalItem: FileSystemItem
|
originalItem: FileSystemItem
|
||||||
name: string
|
name: string
|
||||||
115
apps/drive-web/src/directories/directory-path-breadcrumb.tsx
Normal file
115
apps/drive-web/src/directories/directory-path-breadcrumb.tsx
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
import type { Id } from "@fileone/convex/dataModel"
|
||||||
|
import type {
|
||||||
|
DirectoryHandle,
|
||||||
|
DirectoryPathComponent,
|
||||||
|
} from "@fileone/convex/filesystem"
|
||||||
|
import type { DirectoryInfo } from "@fileone/convex/types"
|
||||||
|
import { Link } from "@tanstack/react-router"
|
||||||
|
import type { PrimitiveAtom } from "jotai"
|
||||||
|
import { atom } from "jotai"
|
||||||
|
import { Fragment } from "react"
|
||||||
|
import {
|
||||||
|
Breadcrumb,
|
||||||
|
BreadcrumbItem,
|
||||||
|
BreadcrumbLink,
|
||||||
|
BreadcrumbList,
|
||||||
|
BreadcrumbPage,
|
||||||
|
BreadcrumbSeparator,
|
||||||
|
} from "@/components/ui/breadcrumb"
|
||||||
|
import {
|
||||||
|
Tooltip,
|
||||||
|
TooltipContent,
|
||||||
|
TooltipTrigger,
|
||||||
|
} from "@/components/ui/tooltip"
|
||||||
|
import type { FileDragInfo } from "@/files/use-file-drop"
|
||||||
|
import { useFileDrop } from "@/files/use-file-drop"
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is a placeholder file drag info atom that always stores null and is never mutated.
|
||||||
|
*/
|
||||||
|
const nullFileDragInfoAtom = atom<FileDragInfo | null>(null)
|
||||||
|
|
||||||
|
export function DirectoryPathBreadcrumb({
|
||||||
|
directory,
|
||||||
|
rootLabel,
|
||||||
|
directoryUrlFn,
|
||||||
|
fileDragInfoAtom = nullFileDragInfoAtom,
|
||||||
|
}: {
|
||||||
|
directory: DirectoryInfo
|
||||||
|
rootLabel: string
|
||||||
|
directoryUrlFn: (directory: Id<"directories">) => string
|
||||||
|
fileDragInfoAtom?: PrimitiveAtom<FileDragInfo | null>
|
||||||
|
}) {
|
||||||
|
const breadcrumbItems: React.ReactNode[] = [
|
||||||
|
<FilePathBreadcrumbItem
|
||||||
|
key={directory.path[0].handle.id}
|
||||||
|
component={directory.path[0]}
|
||||||
|
rootLabel={rootLabel}
|
||||||
|
directoryUrlFn={directoryUrlFn}
|
||||||
|
fileDragInfoAtom={fileDragInfoAtom}
|
||||||
|
/>,
|
||||||
|
]
|
||||||
|
for (let i = 1; i < directory.path.length - 1; i++) {
|
||||||
|
breadcrumbItems.push(
|
||||||
|
<Fragment key={directory.path[i]?.handle.id}>
|
||||||
|
<BreadcrumbSeparator />
|
||||||
|
<FilePathBreadcrumbItem
|
||||||
|
component={directory.path[i]!}
|
||||||
|
rootLabel={rootLabel}
|
||||||
|
directoryUrlFn={directoryUrlFn}
|
||||||
|
fileDragInfoAtom={fileDragInfoAtom}
|
||||||
|
/>
|
||||||
|
</Fragment>,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Breadcrumb>
|
||||||
|
<BreadcrumbList>
|
||||||
|
{breadcrumbItems}
|
||||||
|
<BreadcrumbSeparator />
|
||||||
|
<BreadcrumbItem>
|
||||||
|
<BreadcrumbPage>{directory.name}</BreadcrumbPage>{" "}
|
||||||
|
</BreadcrumbItem>
|
||||||
|
</BreadcrumbList>
|
||||||
|
</Breadcrumb>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function FilePathBreadcrumbItem({
|
||||||
|
component,
|
||||||
|
rootLabel,
|
||||||
|
directoryUrlFn,
|
||||||
|
fileDragInfoAtom,
|
||||||
|
}: {
|
||||||
|
component: DirectoryPathComponent
|
||||||
|
rootLabel: string
|
||||||
|
directoryUrlFn: (directory: Id<"directories">) => string
|
||||||
|
fileDragInfoAtom: PrimitiveAtom<FileDragInfo | null>
|
||||||
|
}) {
|
||||||
|
const { isDraggedOver, dropHandlers } = useFileDrop({
|
||||||
|
destItem: component.handle as DirectoryHandle,
|
||||||
|
dragInfoAtom: fileDragInfoAtom,
|
||||||
|
})
|
||||||
|
|
||||||
|
const dirName = component.name || rootLabel
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Tooltip open={isDraggedOver}>
|
||||||
|
<TooltipTrigger asChild>
|
||||||
|
<BreadcrumbItem
|
||||||
|
className={cn({ "bg-muted": isDraggedOver })}
|
||||||
|
{...dropHandlers}
|
||||||
|
>
|
||||||
|
<BreadcrumbLink asChild>
|
||||||
|
<Link to={directoryUrlFn(component.handle.id)}>
|
||||||
|
{dirName}
|
||||||
|
</Link>
|
||||||
|
</BreadcrumbLink>
|
||||||
|
</BreadcrumbItem>
|
||||||
|
</TooltipTrigger>
|
||||||
|
<TooltipContent>Move to {dirName}</TooltipContent>
|
||||||
|
</Tooltip>
|
||||||
|
)
|
||||||
|
}
|
||||||
71
apps/drive-web/src/files/PickedFileItem.tsx
Normal file
71
apps/drive-web/src/files/PickedFileItem.tsx
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import { useAtomValue } from "jotai"
|
||||||
|
import { CircleAlertIcon, XIcon } from "lucide-react"
|
||||||
|
import type React from "react"
|
||||||
|
import { Button } from "@/components/ui/button"
|
||||||
|
import { Progress } from "@/components/ui/progress"
|
||||||
|
import { Tooltip } from "@/components/ui/tooltip"
|
||||||
|
import { FileUploadStatusKind, fileUploadStatusAtomFamily } from "./store"
|
||||||
|
import type { PickedFile } from "./upload-file-dialog"
|
||||||
|
|
||||||
|
export function PickedFileItem({
|
||||||
|
file: pickedFile,
|
||||||
|
onRemove,
|
||||||
|
}: {
|
||||||
|
file: PickedFile
|
||||||
|
onRemove: (file: PickedFile) => void
|
||||||
|
}) {
|
||||||
|
const fileUploadAtom = fileUploadStatusAtomFamily(pickedFile.id)
|
||||||
|
const fileUpload = useAtomValue(fileUploadAtom)
|
||||||
|
console.log("fileUpload", fileUpload)
|
||||||
|
const { file, id } = pickedFile
|
||||||
|
|
||||||
|
let statusIndicator: React.ReactNode
|
||||||
|
if (!fileUpload) {
|
||||||
|
statusIndicator = (
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="icon"
|
||||||
|
onClick={() => onRemove(pickedFile)}
|
||||||
|
>
|
||||||
|
<XIcon className="size-4" />
|
||||||
|
</Button>
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
switch (fileUpload.kind) {
|
||||||
|
case FileUploadStatusKind.InProgress:
|
||||||
|
statusIndicator = <Progress value={fileUpload.progress * 100} />
|
||||||
|
break
|
||||||
|
case FileUploadStatusKind.Error:
|
||||||
|
statusIndicator = (
|
||||||
|
<Tooltip>
|
||||||
|
<TooltipTrigger>
|
||||||
|
<CircleAlertIcon />
|
||||||
|
</TooltipTrigger>
|
||||||
|
</Tooltip>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<li
|
||||||
|
className="pl-3 pr-1 py-0.5 h-8 hover:bg-muted flex justify-between items-center"
|
||||||
|
key={id}
|
||||||
|
>
|
||||||
|
<span>{file.name}</span>
|
||||||
|
{fileUpload ? (
|
||||||
|
<Progress
|
||||||
|
className="max-w-20"
|
||||||
|
value={fileUpload.progress * 100}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="icon"
|
||||||
|
onClick={() => onRemove(pickedFile)}
|
||||||
|
>
|
||||||
|
<XIcon className="size-4" />
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</li>
|
||||||
|
)
|
||||||
|
}
|
||||||
84
apps/drive-web/src/files/file-grid.tsx
Normal file
84
apps/drive-web/src/files/file-grid.tsx
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import type { Doc, Id } from "@fileone/convex/dataModel"
|
||||||
|
import { memo, useCallback } from "react"
|
||||||
|
import { TextFileIcon } from "@/components/icons/text-file-icon"
|
||||||
|
import { MiddleTruncatedText } from "@/components/ui/middle-truncated-text"
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
export type FileGridSelection = Set<Id<"files">>
|
||||||
|
|
||||||
|
export function FileGrid({
|
||||||
|
files,
|
||||||
|
selectedFiles = new Set(),
|
||||||
|
onSelectionChange,
|
||||||
|
onContextMenu,
|
||||||
|
}: {
|
||||||
|
files: Doc<"files">[]
|
||||||
|
selectedFiles?: FileGridSelection
|
||||||
|
onSelectionChange?: (selection: FileGridSelection) => void
|
||||||
|
onContextMenu?: (file: Doc<"files">, event: React.MouseEvent) => void
|
||||||
|
}) {
|
||||||
|
const onItemSelect = useCallback(
|
||||||
|
(file: Doc<"files">) => {
|
||||||
|
onSelectionChange?.(new Set([file._id]))
|
||||||
|
},
|
||||||
|
[onSelectionChange],
|
||||||
|
)
|
||||||
|
|
||||||
|
const onItemContextMenu = useCallback(
|
||||||
|
(file: Doc<"files">, event: React.MouseEvent) => {
|
||||||
|
onContextMenu?.(file, event)
|
||||||
|
onSelectionChange?.(new Set([file._id]))
|
||||||
|
},
|
||||||
|
[onContextMenu, onSelectionChange],
|
||||||
|
)
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="grid auto-cols-max grid-flow-col gap-3">
|
||||||
|
{files.map((file) => (
|
||||||
|
<FileGridItem
|
||||||
|
selected={selectedFiles.has(file._id)}
|
||||||
|
key={file._id}
|
||||||
|
file={file}
|
||||||
|
onSelect={onItemSelect}
|
||||||
|
onContextMenu={onItemContextMenu}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const FileGridItem = memo(function FileGridItem({
|
||||||
|
selected,
|
||||||
|
file,
|
||||||
|
onSelect,
|
||||||
|
onContextMenu,
|
||||||
|
}: {
|
||||||
|
selected: boolean
|
||||||
|
file: Doc<"files">
|
||||||
|
onSelect?: (file: Doc<"files">) => void
|
||||||
|
onContextMenu?: (file: Doc<"files">, event: React.MouseEvent) => void
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
key={file._id}
|
||||||
|
className={cn(
|
||||||
|
"flex flex-col gap-2 items-center justify-center w-24 p-[calc(var(--spacing)*1+1px)] rounded-md",
|
||||||
|
{ "bg-muted border border-border p-1": selected },
|
||||||
|
)}
|
||||||
|
onClick={() => {
|
||||||
|
onSelect?.(file)
|
||||||
|
}}
|
||||||
|
onContextMenu={(event) => {
|
||||||
|
onContextMenu?.(file, event)
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<TextFileIcon className="size-10" />
|
||||||
|
<MiddleTruncatedText className="text-sm">
|
||||||
|
{file.name}
|
||||||
|
</MiddleTruncatedText>
|
||||||
|
</button>
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
export { FileGridItem }
|
||||||
21
apps/drive-web/src/files/file-preview-dialog.tsx
Normal file
21
apps/drive-web/src/files/file-preview-dialog.tsx
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import type { OpenedFile } from "@fileone/convex/filesystem"
|
||||||
|
import { ImagePreviewDialog } from "./image-preview-dialog"
|
||||||
|
|
||||||
|
export function FilePreviewDialog({
|
||||||
|
openedFile,
|
||||||
|
onClose,
|
||||||
|
}: {
|
||||||
|
openedFile: OpenedFile
|
||||||
|
onClose: () => void
|
||||||
|
}) {
|
||||||
|
switch (openedFile.file.mimeType) {
|
||||||
|
case "image/jpeg":
|
||||||
|
case "image/png":
|
||||||
|
case "image/gif":
|
||||||
|
return (
|
||||||
|
<ImagePreviewDialog openedFile={openedFile} onClose={onClose} />
|
||||||
|
)
|
||||||
|
default:
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
3
apps/drive-web/src/files/file-share.ts
Normal file
3
apps/drive-web/src/files/file-share.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export function fileShareUrl(shareToken: string) {
|
||||||
|
return `${import.meta.env.VITE_FILE_PROXY_URL}/files/${shareToken}`
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import { api } from "@fileone/convex/_generated/api"
|
import { api } from "@fileone/convex/api"
|
||||||
import type { Doc } from "@fileone/convex/_generated/dataModel"
|
import type { Doc } from "@fileone/convex/dataModel"
|
||||||
import type { DirectoryItem } from "@fileone/convex/model/directories"
|
import type { DirectoryItem } from "@fileone/convex/types"
|
||||||
import { useMutation } from "@tanstack/react-query"
|
import { useMutation } from "@tanstack/react-query"
|
||||||
import { Link } from "@tanstack/react-router"
|
import { Link } from "@tanstack/react-router"
|
||||||
import {
|
import {
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user