mirror of
https://github.com/get-drexa/drive.git
synced 2025-11-30 21:41:39 +00:00
Compare commits
56 Commits
e58caa6b16
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 3e96c42c4a | |||
| 6c61cbe1fd | |||
| ccdeaf0364 | |||
| a3110b67c3 | |||
| 1907cd83c8 | |||
| 033ad65d5f | |||
| 987edc0d4a | |||
| fb8e91dd47 | |||
| 0b8aee5d60 | |||
| 89b62f6d8a | |||
| 1c1392a0a1 | |||
| 6984bb209e | |||
| 629d56b5ab | |||
| 5e4e08c255 | |||
| 42b805fbd1 | |||
| ab4c14bc09 | |||
| fd3b2d3908 | |||
| 39824e45d9 | |||
| 6aee150a59 | |||
|
9ea76d2021
|
|||
|
987f36e1d2
|
|||
|
797b40a35c
|
|||
| e32e00a230 | |||
| b1e34f878c | |||
| c0e2f7ff37 | |||
| 834517f3c0 | |||
| 06c3951293 | |||
| 389fe35a0a | |||
| 81e3f7af75 | |||
|
1feac70f7f
|
|||
|
5cc13a34b2
|
|||
|
879287f8bf
|
|||
|
ad99bca7fd
|
|||
|
b241f4e211
|
|||
| 027a315a04 | |||
| 015524cd63 | |||
| 4ebb3fe620 | |||
| b8c46217f7 | |||
| 94d6a22ab2 | |||
| f20f1a93c7 | |||
| acfe1523df | |||
|
9b8367ade4
|
|||
|
d2c09f5d0f
|
|||
|
952a0e41b4
|
|||
|
8f194eec55
|
|||
|
a8c7a8f60b
|
|||
|
7fe5184e81
|
|||
|
3209ce1cd2
|
|||
|
af5d887bd1
|
|||
|
a862442979
|
|||
|
6234c5efd3
|
|||
|
6eded27121
|
|||
|
0307cbbf61
|
|||
|
d0893e13be
|
|||
|
a4544a3f09
|
|||
|
14e2ee1e28
|
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"name": "React + Bun + Convex Development",
|
||||
"build": {
|
||||
"context": ".",
|
||||
"dockerfile": "Dockerfile"
|
||||
@@ -7,11 +6,12 @@
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/git:1": {},
|
||||
"ghcr.io/devcontainers/features/github-cli:1": {},
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:2": {
|
||||
"moby": false
|
||||
},
|
||||
"ghcr.io/tailscale/codespace/tailscale": {
|
||||
"version": "latest"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/go:1": {
|
||||
"version": "1.25.4",
|
||||
"golangciLintVersion": "2.6.1"
|
||||
}
|
||||
},
|
||||
"postCreateCommand": "./scripts/setup-git.sh",
|
||||
@@ -20,21 +20,11 @@
|
||||
"extensions": [
|
||||
"biomejs.biome",
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"ms-vscode.vscode-typescript-next",
|
||||
"esbenp.prettier-vscode",
|
||||
"ms-vscode.vscode-json",
|
||||
"formulahendry.auto-rename-tag",
|
||||
"christian-kohler.path-intellisense",
|
||||
"ms-vscode.vscode-eslint",
|
||||
"convex.convex-vscode"
|
||||
"golang.go"
|
||||
],
|
||||
"settings": {
|
||||
"editor.defaultFormatter": "biomejs.biome",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports.biome": "explicit",
|
||||
"source.fixAll.biome": "explicit"
|
||||
},
|
||||
"typescript.preferences.importModuleSpecifier": "relative",
|
||||
"typescript.suggest.autoImports": true,
|
||||
"emmet.includeLanguages": {
|
||||
@@ -44,7 +34,63 @@
|
||||
"tailwindCSS.experimental.classRegex": [
|
||||
["cva\\(([^)]*)\\)", "[\"'`]([^\"'`]*).*?[\"'`]"],
|
||||
["cx\\(([^)]*)\\)", "(?:'|\"|`)([^']*)(?:'|\"|`)"]
|
||||
]
|
||||
],
|
||||
"[javascript]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "biomejs.biome",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports.biome": "explicit",
|
||||
"source.fixAll.biome": "explicit"
|
||||
}
|
||||
},
|
||||
"[javascriptreact]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "biomejs.biome",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports.biome": "explicit",
|
||||
"source.fixAll.biome": "explicit"
|
||||
}
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "biomejs.biome",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports.biome": "explicit",
|
||||
"source.fixAll.biome": "explicit"
|
||||
}
|
||||
},
|
||||
"[typescriptreact]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "biomejs.biome",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports.biome": "explicit",
|
||||
"source.fixAll.biome": "explicit"
|
||||
}
|
||||
},
|
||||
"[json]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "biomejs.biome",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.biome": "explicit"
|
||||
}
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "biomejs.biome",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.biome": "explicit"
|
||||
}
|
||||
},
|
||||
"[go]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "golang.go",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit"
|
||||
}
|
||||
},
|
||||
"go.formatTool": "goimports",
|
||||
"go.lintTool": "golangci-lint",
|
||||
"go.useLanguageServer": true
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,6 +6,7 @@ backend: convex
|
||||
This project uses npm workspaces.
|
||||
- `packages/convex` - convex functions and models
|
||||
- `apps/drive-web` - frontend dashboard
|
||||
- `apps/file-proxy` - proxies uploaded files via opaque share tokens
|
||||
- `packages/path` - path utils
|
||||
|
||||
# General Guidelines
|
||||
|
||||
34
apps/backend/cmd/drexa/main.go
Normal file
34
apps/backend/cmd/drexa/main.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/drexa"
|
||||
)
|
||||
|
||||
func main() {
|
||||
configPath := flag.String("config", "", "path to config file (required)")
|
||||
flag.Parse()
|
||||
|
||||
if *configPath == "" {
|
||||
fmt.Fprintln(os.Stderr, "error: --config is required")
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
config, err := drexa.ConfigFromFile(*configPath)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to load config: %v", err)
|
||||
}
|
||||
|
||||
server, err := drexa.NewServer(*config)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
log.Printf("starting server on :%d", config.Server.Port)
|
||||
log.Fatal(server.Listen(fmt.Sprintf(":%d", config.Server.Port)))
|
||||
}
|
||||
37
apps/backend/cmd/migration/main.go
Normal file
37
apps/backend/cmd/migration/main.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/database"
|
||||
"github.com/get-drexa/drexa/internal/drexa"
|
||||
)
|
||||
|
||||
func main() {
|
||||
configPath := flag.String("config", "", "path to config file (required)")
|
||||
flag.Parse()
|
||||
|
||||
if *configPath == "" {
|
||||
fmt.Fprintln(os.Stderr, "error: --config is required")
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
config, err := drexa.ConfigFromFile(*configPath)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to load config: %v", err)
|
||||
}
|
||||
|
||||
db := database.NewFromPostgres(config.Database.PostgresURL)
|
||||
defer db.Close()
|
||||
|
||||
log.Println("running migrations...")
|
||||
if err := database.RunMigrations(context.Background(), db); err != nil {
|
||||
log.Fatalf("failed to run migrations: %v", err)
|
||||
}
|
||||
log.Println("migrations completed successfully")
|
||||
}
|
||||
30
apps/backend/config.example.yaml
Normal file
30
apps/backend/config.example.yaml
Normal file
@@ -0,0 +1,30 @@
|
||||
# Drexa Backend Configuration
|
||||
# Copy this file to config.yaml and adjust values for your environment.
|
||||
|
||||
server:
|
||||
port: 8080
|
||||
|
||||
database:
|
||||
postgres_url: postgres://user:password@localhost:5432/drexa?sslmode=disable
|
||||
|
||||
jwt:
|
||||
issuer: drexa
|
||||
audience: drexa-api
|
||||
# Secret key can be provided via (in order of precedence):
|
||||
# 1. JWT_SECRET_KEY environment variable (base64 encoded)
|
||||
# 2. secret_key_base64 below (base64 encoded)
|
||||
# 3. secret_key_path below (file with base64 encoded content)
|
||||
# secret_key_base64: "base64encodedkey"
|
||||
secret_key_path: /run/secrets/jwt_secret_key
|
||||
|
||||
storage:
|
||||
# Mode: "flat" (UUID-based keys) or "hierarchical" (path-based keys)
|
||||
# Note: S3 backend only supports "flat" mode
|
||||
mode: flat
|
||||
# Backend: "fs" (filesystem) or "s3" (not yet implemented)
|
||||
backend: fs
|
||||
# Required when backend is "fs"
|
||||
root_path: /var/lib/drexa/blobs
|
||||
# Required when backend is "s3"
|
||||
# bucket: my-drexa-bucket
|
||||
|
||||
15
apps/backend/config.yaml
Normal file
15
apps/backend/config.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
server:
|
||||
port: 8080
|
||||
|
||||
database:
|
||||
postgres_url: postgres://drexa:hunter2@helian:5433/drexa?sslmode=disable
|
||||
|
||||
jwt:
|
||||
issuer: drexa
|
||||
audience: drexa-api
|
||||
secret_key_base64: "pNeUExoqdakfecZLFL53NJpY4iB9zFot9EuEBItlYKY="
|
||||
|
||||
storage:
|
||||
mode: hierarchical
|
||||
backend: fs
|
||||
root_path: ./data
|
||||
42
apps/backend/go.mod
Normal file
42
apps/backend/go.mod
Normal file
@@ -0,0 +1,42 @@
|
||||
module github.com/get-drexa/drexa
|
||||
|
||||
go 1.25.4
|
||||
|
||||
require (
|
||||
github.com/gabriel-vasile/mimetype v1.4.11
|
||||
github.com/gofiber/fiber/v2 v2.52.9
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/sqids/sqids-go v0.4.1
|
||||
github.com/uptrace/bun v1.2.16
|
||||
github.com/uptrace/bun/extra/bundebug v1.2.16
|
||||
golang.org/x/crypto v0.45.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/fatih/color v1.18.0 // indirect
|
||||
go.opentelemetry.io/otel v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.38.0 // indirect
|
||||
mellium.im/sasl v0.3.2 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/andybalholm/brotli v1.1.0 // indirect
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/klauspost/compress v1.17.9 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||
github.com/rivo/uniseg v0.2.0 // indirect
|
||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||
github.com/uptrace/bun/dialect/pgdialect v1.2.16
|
||||
github.com/uptrace/bun/driver/pgdriver v1.2.16
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
github.com/valyala/fasthttp v1.51.0 // indirect
|
||||
github.com/valyala/tcplisten v1.0.0 // indirect
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
)
|
||||
76
apps/backend/go.sum
Normal file
76
apps/backend/go.sum
Normal file
@@ -0,0 +1,76 @@
|
||||
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
|
||||
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
|
||||
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
|
||||
github.com/gabriel-vasile/mimetype v1.4.11 h1:AQvxbp830wPhHTqc1u7nzoLT+ZFxGY7emj5DR5DYFik=
|
||||
github.com/gabriel-vasile/mimetype v1.4.11/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
|
||||
github.com/gofiber/fiber/v2 v2.52.9 h1:YjKl5DOiyP3j0mO61u3NTmK7or8GzzWzCFzkboyP5cw=
|
||||
github.com/gofiber/fiber/v2 v2.52.9/go.mod h1:YEcBbO/FB+5M1IZNBP9FO3J9281zgPAreiI1oqg8nDw=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||
github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA=
|
||||
github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
||||
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg=
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
||||
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/sqids/sqids-go v0.4.1 h1:eQKYzmAZbLlRwHeHYPF35QhgxwZHLnlmVj9AkIj/rrw=
|
||||
github.com/sqids/sqids-go v0.4.1/go.mod h1:EMwHuPQgSNFS0A49jESTfIQS+066XQTVhukrzEPScl8=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYmW5DyG0UqvY96Bu5QYsTLvCHdrgo=
|
||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GHm1iF1pBvUfxfsH0Wmnc2VbpgvbI9ZWuIRs=
|
||||
github.com/uptrace/bun v1.2.16 h1:QlObi6ZIK5Ao7kAALnh91HWYNZUBbVwye52fmlQM9kc=
|
||||
github.com/uptrace/bun v1.2.16/go.mod h1:jMoNg2n56ckaawi/O/J92BHaECmrz6IRjuMWqlMaMTM=
|
||||
github.com/uptrace/bun/dialect/pgdialect v1.2.16 h1:KFNZ0LxAyczKNfK/IJWMyaleO6eI9/Z5tUv3DE1NVL4=
|
||||
github.com/uptrace/bun/dialect/pgdialect v1.2.16/go.mod h1:IJdMeV4sLfh0LDUZl7TIxLI0LipF1vwTK3hBC7p5qLo=
|
||||
github.com/uptrace/bun/driver/pgdriver v1.2.16 h1:b1kpXKUxtTSGYow5Vlsb+dKV3z0R7aSAJNfMfKp61ZU=
|
||||
github.com/uptrace/bun/driver/pgdriver v1.2.16/go.mod h1:H6lUZ9CBfp1X5Vq62YGSV7q96/v94ja9AYFjKvdoTk0=
|
||||
github.com/uptrace/bun/extra/bundebug v1.2.16 h1:3OXAfHTU4ydu2+4j05oB1BxPx6+ypdWIVzTugl/7zl0=
|
||||
github.com/uptrace/bun/extra/bundebug v1.2.16/go.mod h1:vk6R/1i67/S2RvUI5AH/m3P5e67mOkfDCmmCsAPUumo=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasthttp v1.51.0 h1:8b30A5JlZ6C7AS81RsWjYMQmrZG6feChmgAolCl1SqA=
|
||||
github.com/valyala/fasthttp v1.51.0/go.mod h1:oI2XroL+lI7vdXyYoQk03bXBThfFl2cVdIA3Xl7cH8g=
|
||||
github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8=
|
||||
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||
go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8=
|
||||
go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM=
|
||||
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
|
||||
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
mellium.im/sasl v0.3.2 h1:PT6Xp7ccn9XaXAnJ03FcEjmAn7kK1x7aoXV6F+Vmrl0=
|
||||
mellium.im/sasl v0.3.2/go.mod h1:NKXDi1zkr+BlMHLQjY3ofYuU4KSPFxknb8mfEu6SveY=
|
||||
23
apps/backend/internal/account/account.go
Normal file
23
apps/backend/internal/account/account.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package account
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type Account struct {
|
||||
bun.BaseModel `bun:"accounts"`
|
||||
|
||||
ID uuid.UUID `bun:",pk,type:uuid" json:"id"`
|
||||
UserID uuid.UUID `bun:"user_id,notnull,type:uuid" json:"userId"`
|
||||
StorageUsageBytes int64 `bun:"storage_usage_bytes,notnull" json:"storageUsageBytes"`
|
||||
StorageQuotaBytes int64 `bun:"storage_quota_bytes,notnull" json:"storageQuotaBytes"`
|
||||
CreatedAt time.Time `bun:"created_at,notnull,nullzero" json:"createdAt"`
|
||||
UpdatedAt time.Time `bun:"updated_at,notnull,nullzero" json:"updatedAt"`
|
||||
}
|
||||
|
||||
func newAccountID() (uuid.UUID, error) {
|
||||
return uuid.NewV7()
|
||||
}
|
||||
8
apps/backend/internal/account/err.go
Normal file
8
apps/backend/internal/account/err.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package account
|
||||
|
||||
import "errors"
|
||||
|
||||
var (
|
||||
ErrAccountNotFound = errors.New("account not found")
|
||||
ErrAccountAlreadyExists = errors.New("account already exists")
|
||||
)
|
||||
132
apps/backend/internal/account/http.go
Normal file
132
apps/backend/internal/account/http.go
Normal file
@@ -0,0 +1,132 @@
|
||||
package account
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/auth"
|
||||
"github.com/get-drexa/drexa/internal/httperr"
|
||||
"github.com/get-drexa/drexa/internal/user"
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type HTTPHandler struct {
|
||||
accountService *Service
|
||||
authService *auth.Service
|
||||
db *bun.DB
|
||||
authMiddleware fiber.Handler
|
||||
}
|
||||
|
||||
type registerAccountRequest struct {
|
||||
Email string `json:"email"`
|
||||
Password string `json:"password"`
|
||||
DisplayName string `json:"displayName"`
|
||||
}
|
||||
|
||||
type registerAccountResponse struct {
|
||||
Account *Account `json:"account"`
|
||||
User *user.User `json:"user"`
|
||||
AccessToken string `json:"accessToken"`
|
||||
RefreshToken string `json:"refreshToken"`
|
||||
}
|
||||
|
||||
const currentAccountKey = "currentAccount"
|
||||
|
||||
func CurrentAccount(c *fiber.Ctx) *Account {
|
||||
return c.Locals(currentAccountKey).(*Account)
|
||||
}
|
||||
|
||||
func NewHTTPHandler(accountService *Service, authService *auth.Service, db *bun.DB, authMiddleware fiber.Handler) *HTTPHandler {
|
||||
return &HTTPHandler{accountService: accountService, authService: authService, db: db, authMiddleware: authMiddleware}
|
||||
}
|
||||
|
||||
func (h *HTTPHandler) RegisterRoutes(api fiber.Router) fiber.Router {
|
||||
api.Post("/accounts", h.registerAccount)
|
||||
|
||||
account := api.Group("/accounts/:accountID")
|
||||
account.Use(h.authMiddleware)
|
||||
account.Use(h.accountMiddleware)
|
||||
|
||||
account.Get("/", h.getAccount)
|
||||
|
||||
return account
|
||||
}
|
||||
|
||||
func (h *HTTPHandler) accountMiddleware(c *fiber.Ctx) error {
|
||||
user, err := auth.AuthenticatedUser(c)
|
||||
if err != nil {
|
||||
return c.SendStatus(fiber.StatusUnauthorized)
|
||||
}
|
||||
|
||||
accountID, err := uuid.Parse(c.Params("accountID"))
|
||||
if err != nil {
|
||||
return c.SendStatus(fiber.StatusNotFound)
|
||||
}
|
||||
|
||||
account, err := h.accountService.AccountByID(c.Context(), h.db, user.ID, accountID)
|
||||
if err != nil {
|
||||
if errors.Is(err, ErrAccountNotFound) {
|
||||
return c.SendStatus(fiber.StatusNotFound)
|
||||
}
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
|
||||
c.Locals(currentAccountKey, account)
|
||||
|
||||
return c.Next()
|
||||
}
|
||||
|
||||
func (h *HTTPHandler) getAccount(c *fiber.Ctx) error {
|
||||
account := CurrentAccount(c)
|
||||
if account == nil {
|
||||
return c.SendStatus(fiber.StatusNotFound)
|
||||
}
|
||||
return c.JSON(account)
|
||||
}
|
||||
|
||||
func (h *HTTPHandler) registerAccount(c *fiber.Ctx) error {
|
||||
req := new(registerAccountRequest)
|
||||
if err := c.BodyParser(req); err != nil {
|
||||
return c.SendStatus(fiber.StatusBadRequest)
|
||||
}
|
||||
|
||||
tx, err := h.db.BeginTx(c.Context(), nil)
|
||||
if err != nil {
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
acc, u, err := h.accountService.Register(c.Context(), tx, RegisterOptions{
|
||||
Email: req.Email,
|
||||
Password: req.Password,
|
||||
DisplayName: req.DisplayName,
|
||||
})
|
||||
if err != nil {
|
||||
var ae *user.AlreadyExistsError
|
||||
if errors.As(err, &ae) {
|
||||
return c.SendStatus(fiber.StatusConflict)
|
||||
}
|
||||
if errors.Is(err, ErrAccountAlreadyExists) {
|
||||
return c.SendStatus(fiber.StatusConflict)
|
||||
}
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
|
||||
result, err := h.authService.GenerateTokenForUser(c.Context(), tx, u)
|
||||
if err != nil {
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
|
||||
return c.JSON(registerAccountResponse{
|
||||
Account: acc,
|
||||
User: u,
|
||||
AccessToken: result.AccessToken,
|
||||
RefreshToken: result.RefreshToken,
|
||||
})
|
||||
}
|
||||
115
apps/backend/internal/account/service.go
Normal file
115
apps/backend/internal/account/service.go
Normal file
@@ -0,0 +1,115 @@
|
||||
package account
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/database"
|
||||
"github.com/get-drexa/drexa/internal/password"
|
||||
"github.com/get-drexa/drexa/internal/user"
|
||||
"github.com/get-drexa/drexa/internal/virtualfs"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type Service struct {
|
||||
userService user.Service
|
||||
vfs *virtualfs.VirtualFS
|
||||
}
|
||||
|
||||
type RegisterOptions struct {
|
||||
Email string
|
||||
Password string
|
||||
DisplayName string
|
||||
}
|
||||
|
||||
type CreateAccountOptions struct {
|
||||
OrganizationID uuid.UUID
|
||||
QuotaBytes int64
|
||||
}
|
||||
|
||||
func NewService(userService *user.Service, vfs *virtualfs.VirtualFS) *Service {
|
||||
return &Service{
|
||||
userService: *userService,
|
||||
vfs: vfs,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Service) Register(ctx context.Context, db bun.IDB, opts RegisterOptions) (*Account, *user.User, error) {
|
||||
hashed, err := password.Hash(opts.Password)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
u, err := s.userService.RegisterUser(ctx, db, user.UserRegistrationOptions{
|
||||
Email: opts.Email,
|
||||
Password: hashed,
|
||||
DisplayName: opts.DisplayName,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
acc, err := s.CreateAccount(ctx, db, u.ID, CreateAccountOptions{
|
||||
// TODO: make quota configurable
|
||||
QuotaBytes: 1024 * 1024 * 1024, // 1GB
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
_, err = s.vfs.CreateDirectory(ctx, db, acc.ID, uuid.Nil, virtualfs.RootDirectoryName)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return acc, u, nil
|
||||
}
|
||||
|
||||
func (s *Service) CreateAccount(ctx context.Context, db bun.IDB, userID uuid.UUID, opts CreateAccountOptions) (*Account, error) {
|
||||
id, err := newAccountID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
account := &Account{
|
||||
ID: id,
|
||||
UserID: userID,
|
||||
StorageQuotaBytes: opts.QuotaBytes,
|
||||
}
|
||||
|
||||
_, err = db.NewInsert().Model(account).Returning("*").Exec(ctx)
|
||||
if err != nil {
|
||||
if database.IsUniqueViolation(err) {
|
||||
return nil, ErrAccountAlreadyExists
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return account, nil
|
||||
}
|
||||
|
||||
func (s *Service) AccountByUserID(ctx context.Context, db bun.IDB, userID uuid.UUID) (*Account, error) {
|
||||
var account Account
|
||||
err := db.NewSelect().Model(&account).Where("user_id = ?", userID).Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, ErrAccountNotFound
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return &account, nil
|
||||
}
|
||||
|
||||
func (s *Service) AccountByID(ctx context.Context, db bun.IDB, userID uuid.UUID, id uuid.UUID) (*Account, error) {
|
||||
var account Account
|
||||
err := db.NewSelect().Model(&account).Where("user_id = ?", userID).Where("id = ?", id).Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, ErrAccountNotFound
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return &account, nil
|
||||
}
|
||||
24
apps/backend/internal/auth/err.go
Normal file
24
apps/backend/internal/auth/err.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
var ErrUnauthenticatedRequest = errors.New("unauthenticated request")
|
||||
|
||||
type InvalidAccessTokenError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func newInvalidAccessTokenError(err error) *InvalidAccessTokenError {
|
||||
return &InvalidAccessTokenError{err}
|
||||
}
|
||||
|
||||
func (e *InvalidAccessTokenError) Error() string {
|
||||
return fmt.Sprintf("invalid access token: %v", e.err)
|
||||
}
|
||||
|
||||
func (e *InvalidAccessTokenError) Unwrap() error {
|
||||
return e.err
|
||||
}
|
||||
66
apps/backend/internal/auth/http.go
Normal file
66
apps/backend/internal/auth/http.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/httperr"
|
||||
"github.com/get-drexa/drexa/internal/user"
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type loginRequest struct {
|
||||
Email string `json:"email"`
|
||||
Password string `json:"password"`
|
||||
}
|
||||
|
||||
type loginResponse struct {
|
||||
User user.User `json:"user"`
|
||||
AccessToken string `json:"accessToken"`
|
||||
RefreshToken string `json:"refreshToken"`
|
||||
}
|
||||
|
||||
type HTTPHandler struct {
|
||||
service *Service
|
||||
db *bun.DB
|
||||
}
|
||||
|
||||
func NewHTTPHandler(s *Service, db *bun.DB) *HTTPHandler {
|
||||
return &HTTPHandler{service: s, db: db}
|
||||
}
|
||||
|
||||
func (h *HTTPHandler) RegisterRoutes(api fiber.Router) {
|
||||
auth := api.Group("/auth")
|
||||
auth.Post("/login", h.Login)
|
||||
}
|
||||
|
||||
func (h *HTTPHandler) Login(c *fiber.Ctx) error {
|
||||
req := new(loginRequest)
|
||||
if err := c.BodyParser(req); err != nil {
|
||||
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request"})
|
||||
}
|
||||
|
||||
tx, err := h.db.BeginTx(c.Context(), nil)
|
||||
if err != nil {
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
result, err := h.service.AuthenticateWithEmailAndPassword(c.Context(), tx, req.Email, req.Password)
|
||||
if err != nil {
|
||||
if errors.Is(err, ErrInvalidCredentials) {
|
||||
return c.Status(fiber.StatusUnauthorized).JSON(fiber.Map{"error": "Invalid credentials"})
|
||||
}
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
|
||||
return c.JSON(loginResponse{
|
||||
User: *result.User,
|
||||
AccessToken: result.AccessToken,
|
||||
RefreshToken: result.RefreshToken,
|
||||
})
|
||||
}
|
||||
63
apps/backend/internal/auth/middleware.go
Normal file
63
apps/backend/internal/auth/middleware.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"log/slog"
|
||||
"strings"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/httperr"
|
||||
"github.com/get-drexa/drexa/internal/user"
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
const authenticatedUserKey = "authenticatedUser"
|
||||
|
||||
// NewBearerAuthMiddleware is a middleware that authenticates a request using a bearer token.
|
||||
// To obtain the authenticated user in subsequent handlers, see AuthenticatedUser.
|
||||
func NewBearerAuthMiddleware(s *Service, db *bun.DB) fiber.Handler {
|
||||
return func(c *fiber.Ctx) error {
|
||||
authHeader := c.Get("Authorization")
|
||||
if authHeader == "" {
|
||||
slog.Info("no auth header")
|
||||
return c.SendStatus(fiber.StatusUnauthorized)
|
||||
}
|
||||
|
||||
parts := strings.Split(authHeader, " ")
|
||||
if len(parts) != 2 || parts[0] != "Bearer" {
|
||||
slog.Info("invalid auth header")
|
||||
return c.SendStatus(fiber.StatusUnauthorized)
|
||||
}
|
||||
|
||||
token := parts[1]
|
||||
u, err := s.AuthenticateWithAccessToken(c.Context(), db, token)
|
||||
if err != nil {
|
||||
var e *InvalidAccessTokenError
|
||||
if errors.As(err, &e) {
|
||||
slog.Info("invalid access token")
|
||||
return c.SendStatus(fiber.StatusUnauthorized)
|
||||
}
|
||||
|
||||
var nf *user.NotFoundError
|
||||
if errors.As(err, &nf) {
|
||||
slog.Info("user not found")
|
||||
return c.SendStatus(fiber.StatusUnauthorized)
|
||||
}
|
||||
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
|
||||
c.Locals(authenticatedUserKey, u)
|
||||
|
||||
return c.Next()
|
||||
}
|
||||
}
|
||||
|
||||
// AuthenticatedUser returns the authenticated user from the given fiber context.
|
||||
// Returns ErrUnauthenticatedRequest if not authenticated.
|
||||
func AuthenticatedUser(c *fiber.Ctx) (*user.User, error) {
|
||||
if u, ok := c.Locals(authenticatedUserKey).(*user.User); ok {
|
||||
return u, nil
|
||||
}
|
||||
return nil, ErrUnauthenticatedRequest
|
||||
}
|
||||
109
apps/backend/internal/auth/service.go
Normal file
109
apps/backend/internal/auth/service.go
Normal file
@@ -0,0 +1,109 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"log/slog"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/password"
|
||||
"github.com/get-drexa/drexa/internal/user"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type AuthenticationResult struct {
|
||||
User *user.User
|
||||
AccessToken string
|
||||
RefreshToken string
|
||||
}
|
||||
|
||||
var ErrInvalidCredentials = errors.New("invalid credentials")
|
||||
|
||||
type Service struct {
|
||||
userService *user.Service
|
||||
tokenConfig TokenConfig
|
||||
}
|
||||
|
||||
func NewService(userService *user.Service, tokenConfig TokenConfig) *Service {
|
||||
return &Service{
|
||||
userService: userService,
|
||||
tokenConfig: tokenConfig,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Service) GenerateTokenForUser(ctx context.Context, db bun.IDB, user *user.User) (*AuthenticationResult, error) {
|
||||
at, err := GenerateAccessToken(user, &s.tokenConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rt, err := GenerateRefreshToken(user, &s.tokenConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = db.NewInsert().Model(rt).Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AuthenticationResult{
|
||||
User: user,
|
||||
AccessToken: at,
|
||||
RefreshToken: hex.EncodeToString(rt.Token),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *Service) AuthenticateWithEmailAndPassword(ctx context.Context, db bun.IDB, email, plain string) (*AuthenticationResult, error) {
|
||||
u, err := s.userService.UserByEmail(ctx, db, email)
|
||||
if err != nil {
|
||||
var nf *user.NotFoundError
|
||||
if errors.As(err, &nf) {
|
||||
return nil, ErrInvalidCredentials
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ok, err := password.Verify(plain, u.Password)
|
||||
if err != nil || !ok {
|
||||
return nil, ErrInvalidCredentials
|
||||
}
|
||||
|
||||
at, err := GenerateAccessToken(u, &s.tokenConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rt, err := GenerateRefreshToken(u, &s.tokenConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = db.NewInsert().Model(rt).Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AuthenticationResult{
|
||||
User: u,
|
||||
AccessToken: at,
|
||||
RefreshToken: hex.EncodeToString(rt.Token),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *Service) AuthenticateWithAccessToken(ctx context.Context, db bun.IDB, token string) (*user.User, error) {
|
||||
claims, err := ParseAccessToken(token, &s.tokenConfig)
|
||||
if err != nil {
|
||||
slog.Info("failed to parse access token", "error", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
id, err := uuid.Parse(claims.Subject)
|
||||
if err != nil {
|
||||
slog.Info("failed to parse access token subject", "error", err)
|
||||
return nil, newInvalidAccessTokenError(err)
|
||||
}
|
||||
|
||||
return s.userService.UserByID(ctx, db, id)
|
||||
}
|
||||
98
apps/backend/internal/auth/tokens.go
Normal file
98
apps/backend/internal/auth/tokens.go
Normal file
@@ -0,0 +1,98 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/user"
|
||||
"github.com/golang-jwt/jwt/v5"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
const (
|
||||
accessTokenValidFor = time.Minute * 15
|
||||
refreshTokenByteLength = 32
|
||||
refreshTokenValidFor = time.Hour * 24 * 30
|
||||
)
|
||||
|
||||
type TokenConfig struct {
|
||||
Issuer string
|
||||
Audience string
|
||||
SecretKey []byte
|
||||
}
|
||||
|
||||
type RefreshToken struct {
|
||||
bun.BaseModel `bun:"refresh_tokens"`
|
||||
|
||||
ID uuid.UUID `bun:",pk,type:uuid"`
|
||||
UserID uuid.UUID `bun:"user_id,notnull"`
|
||||
Token []byte `bun:"-"`
|
||||
TokenHash string `bun:"token_hash,notnull"`
|
||||
ExpiresAt time.Time `bun:"expires_at,notnull"`
|
||||
CreatedAt time.Time `bun:"created_at,notnull,nullzero"`
|
||||
}
|
||||
|
||||
func newTokenID() (uuid.UUID, error) {
|
||||
return uuid.NewV7()
|
||||
}
|
||||
|
||||
func GenerateAccessToken(user *user.User, c *TokenConfig) (string, error) {
|
||||
now := time.Now()
|
||||
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.RegisteredClaims{
|
||||
Issuer: c.Issuer,
|
||||
Audience: jwt.ClaimStrings{c.Audience},
|
||||
Subject: user.ID.String(),
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(accessTokenValidFor)),
|
||||
IssuedAt: jwt.NewNumericDate(now),
|
||||
})
|
||||
|
||||
signed, err := token.SignedString(c.SecretKey)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to sign token: %w", err)
|
||||
}
|
||||
|
||||
return signed, nil
|
||||
}
|
||||
|
||||
func GenerateRefreshToken(user *user.User, c *TokenConfig) (*RefreshToken, error) {
|
||||
now := time.Now()
|
||||
|
||||
buf := make([]byte, refreshTokenByteLength)
|
||||
if _, err := rand.Read(buf); err != nil {
|
||||
return nil, fmt.Errorf("failed to generate refresh token: %w", err)
|
||||
}
|
||||
|
||||
id, err := newTokenID()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to generate token ID: %w", err)
|
||||
}
|
||||
|
||||
h := sha256.Sum256(buf)
|
||||
hex := hex.EncodeToString(h[:])
|
||||
|
||||
return &RefreshToken{
|
||||
ID: id,
|
||||
UserID: user.ID,
|
||||
Token: buf,
|
||||
TokenHash: hex,
|
||||
ExpiresAt: now.Add(refreshTokenValidFor),
|
||||
CreatedAt: now,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ParseAccessToken parses a JWT access token and returns the claims.
|
||||
// Returns an InvalidAccessTokenError if the token is invalid.
|
||||
func ParseAccessToken(token string, c *TokenConfig) (*jwt.RegisteredClaims, error) {
|
||||
parsed, err := jwt.ParseWithClaims(token, &jwt.RegisteredClaims{}, func(token *jwt.Token) (any, error) {
|
||||
return c.SecretKey, nil
|
||||
}, jwt.WithIssuer(c.Issuer), jwt.WithExpirationRequired(), jwt.WithAudience(c.Audience))
|
||||
if err != nil {
|
||||
return nil, newInvalidAccessTokenError(err)
|
||||
}
|
||||
return parsed.Claims.(*jwt.RegisteredClaims), nil
|
||||
}
|
||||
9
apps/backend/internal/blob/err.go
Normal file
9
apps/backend/internal/blob/err.go
Normal file
@@ -0,0 +1,9 @@
|
||||
package blob
|
||||
|
||||
import "errors"
|
||||
|
||||
var (
|
||||
ErrConflict = errors.New("key already used for a different blob")
|
||||
ErrNotFound = errors.New("key not found")
|
||||
ErrInvalidFileContent = errors.New("invalid file content. must provide either a reader or a blob key")
|
||||
)
|
||||
154
apps/backend/internal/blob/fs_store.go
Normal file
154
apps/backend/internal/blob/fs_store.go
Normal file
@@ -0,0 +1,154 @@
|
||||
package blob
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/ioext"
|
||||
)
|
||||
|
||||
var _ Store = &FSStore{}
|
||||
|
||||
type FSStore struct {
|
||||
config FSStoreConfig
|
||||
}
|
||||
|
||||
type FSStoreConfig struct {
|
||||
Root string
|
||||
}
|
||||
|
||||
func NewFSStore(config FSStoreConfig) *FSStore {
|
||||
return &FSStore{config: config}
|
||||
}
|
||||
|
||||
func (s *FSStore) Initialize(ctx context.Context) error {
|
||||
return os.MkdirAll(s.config.Root, 0755)
|
||||
}
|
||||
|
||||
func (s *FSStore) Put(ctx context.Context, key Key, reader io.Reader) error {
|
||||
path := filepath.Join(s.config.Root, string(key))
|
||||
|
||||
err := os.MkdirAll(filepath.Dir(path), 0755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
f, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_EXCL, 0644)
|
||||
if err != nil {
|
||||
if os.IsExist(err) {
|
||||
return ErrConflict
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
defer f.Close()
|
||||
_, err = io.Copy(f, reader)
|
||||
if err != nil {
|
||||
_ = os.Remove(path)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *FSStore) Read(ctx context.Context, key Key) (io.ReadCloser, error) {
|
||||
path := filepath.Join(s.config.Root, string(key))
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func (s *FSStore) ReadRange(ctx context.Context, key Key, offset, length int64) (io.ReadCloser, error) {
|
||||
path := filepath.Join(s.config.Root, string(key))
|
||||
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = f.Seek(offset, io.SeekStart)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ioext.NewLimitReadCloser(f, length), nil
|
||||
}
|
||||
|
||||
func (s *FSStore) ReadSize(ctx context.Context, key Key) (int64, error) {
|
||||
path := filepath.Join(s.config.Root, string(key))
|
||||
fi, err := os.Stat(path)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return 0, ErrNotFound
|
||||
}
|
||||
return 0, err
|
||||
}
|
||||
return fi.Size(), nil
|
||||
}
|
||||
|
||||
func (s *FSStore) Delete(ctx context.Context, key Key) error {
|
||||
err := os.Remove(filepath.Join(s.config.Root, string(key)))
|
||||
// no op if file does not exist
|
||||
// swallow error if file does not exist
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *FSStore) DeletePrefix(ctx context.Context, prefix Key) error {
|
||||
prefixPath := filepath.Join(s.config.Root, string(prefix))
|
||||
err := os.RemoveAll(prefixPath)
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *FSStore) Update(ctx context.Context, key Key, opts UpdateOptions) error {
|
||||
// Update is a no-op for FSStore
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *FSStore) Move(ctx context.Context, srcKey, dstKey Key) error {
|
||||
oldPath := filepath.Join(s.config.Root, string(srcKey))
|
||||
newPath := filepath.Join(s.config.Root, string(dstKey))
|
||||
|
||||
_, err := os.Stat(newPath)
|
||||
if err == nil {
|
||||
return ErrConflict
|
||||
}
|
||||
|
||||
err = os.MkdirAll(filepath.Dir(newPath), 0755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = os.Rename(oldPath, newPath)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return ErrNotFound
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *FSStore) SupportsDirectUpload() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *FSStore) GenerateUploadURL(ctx context.Context, key Key, opts UploadURLOptions) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
7
apps/backend/internal/blob/key.go
Normal file
7
apps/backend/internal/blob/key.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package blob
|
||||
|
||||
type Key string
|
||||
|
||||
func (k Key) IsNil() bool {
|
||||
return k == ""
|
||||
}
|
||||
33
apps/backend/internal/blob/store.go
Normal file
33
apps/backend/internal/blob/store.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package blob
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"time"
|
||||
)
|
||||
|
||||
type UploadURLOptions struct {
|
||||
Duration time.Duration
|
||||
}
|
||||
|
||||
type UpdateOptions struct {
|
||||
ContentType string
|
||||
}
|
||||
|
||||
type Store interface {
|
||||
Initialize(ctx context.Context) error
|
||||
Put(ctx context.Context, key Key, reader io.Reader) error
|
||||
Update(ctx context.Context, key Key, opts UpdateOptions) error
|
||||
Delete(ctx context.Context, key Key) error
|
||||
DeletePrefix(ctx context.Context, prefix Key) error
|
||||
Move(ctx context.Context, srcKey, dstKey Key) error
|
||||
Read(ctx context.Context, key Key) (io.ReadCloser, error)
|
||||
ReadRange(ctx context.Context, key Key, offset, length int64) (io.ReadCloser, error)
|
||||
ReadSize(ctx context.Context, key Key) (int64, error)
|
||||
|
||||
// SupportsDirectUpload returns true if the store allows files to be uploaded directly to the blob store.
|
||||
SupportsDirectUpload() bool
|
||||
|
||||
// GenerateUploadURL generates a URL that can be used to upload a file directly to the blob store. If unsupported, returns an empty string with no error.
|
||||
GenerateUploadURL(ctx context.Context, key Key, opts UploadURLOptions) (string, error)
|
||||
}
|
||||
61
apps/backend/internal/database/errs.go
Normal file
61
apps/backend/internal/database/errs.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/uptrace/bun/driver/pgdriver"
|
||||
)
|
||||
|
||||
// PostgreSQL SQLSTATE error codes.
|
||||
// See: https://www.postgresql.org/docs/current/errcodes-appendix.html
|
||||
const (
|
||||
PgUniqueViolation = "23505"
|
||||
PgForeignKeyViolation = "23503"
|
||||
PgNotNullViolation = "23502"
|
||||
)
|
||||
|
||||
// PostgreSQL protocol error field identifiers used with pgdriver.Error.Field().
|
||||
// See: https://www.postgresql.org/docs/current/protocol-error-fields.html
|
||||
//
|
||||
// Common fields:
|
||||
// - 'C' - SQLSTATE code (e.g., "23505")
|
||||
// - 'M' - Primary error message
|
||||
// - 'D' - Detail message
|
||||
// - 'H' - Hint
|
||||
// - 's' - Schema name
|
||||
// - 't' - Table name
|
||||
// - 'c' - Column name
|
||||
// - 'n' - Constraint name
|
||||
const (
|
||||
pgFieldCode = 'C'
|
||||
pgFieldConstraint = 'n'
|
||||
)
|
||||
|
||||
// IsUniqueViolation checks if the error is a PostgreSQL unique constraint violation.
|
||||
func IsUniqueViolation(err error) bool {
|
||||
return hasPgCode(err, PgUniqueViolation)
|
||||
}
|
||||
|
||||
// IsForeignKeyViolation checks if the error is a PostgreSQL foreign key violation.
|
||||
func IsForeignKeyViolation(err error) bool {
|
||||
return hasPgCode(err, PgForeignKeyViolation)
|
||||
}
|
||||
|
||||
// IsNotNullViolation checks if the error is a PostgreSQL not-null constraint violation.
|
||||
func IsNotNullViolation(err error) bool {
|
||||
return hasPgCode(err, PgNotNullViolation)
|
||||
}
|
||||
|
||||
// ConstraintName returns the constraint name from a PostgreSQL error, or empty string if not applicable.
|
||||
func ConstraintName(err error) string {
|
||||
var pgErr pgdriver.Error
|
||||
if errors.As(err, &pgErr) {
|
||||
return pgErr.Field(pgFieldConstraint)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func hasPgCode(err error, code string) bool {
|
||||
var pgErr pgdriver.Error
|
||||
return errors.As(err, &pgErr) && pgErr.Field(pgFieldCode) == code
|
||||
}
|
||||
28
apps/backend/internal/database/migrate.go
Normal file
28
apps/backend/internal/database/migrate.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
"embed"
|
||||
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
//go:embed migrations/*.sql
|
||||
var sqlMigrations embed.FS
|
||||
|
||||
// RunMigrations discovers and runs all migrations against the database.
|
||||
func RunMigrations(ctx context.Context, db *bun.DB) error {
|
||||
migrations := migrate.NewMigrations()
|
||||
if err := migrations.Discover(sqlMigrations); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
migrator := migrate.NewMigrator(db, migrations)
|
||||
if err := migrator.Init(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := migrator.Migrate(ctx)
|
||||
return err
|
||||
}
|
||||
106
apps/backend/internal/database/migrations/001_initial.up.sql
Normal file
106
apps/backend/internal/database/migrations/001_initial.up.sql
Normal file
@@ -0,0 +1,106 @@
|
||||
-- ============================================================================
|
||||
-- Application Tables
|
||||
-- ============================================================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id UUID PRIMARY KEY,
|
||||
display_name TEXT,
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
password TEXT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_users_email ON users(email);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS accounts (
|
||||
id UUID PRIMARY KEY,
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
storage_usage_bytes BIGINT NOT NULL DEFAULT 0,
|
||||
storage_quota_bytes BIGINT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_accounts_user_id ON accounts(user_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS refresh_tokens (
|
||||
id UUID PRIMARY KEY,
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
token_hash TEXT NOT NULL UNIQUE,
|
||||
expires_at TIMESTAMPTZ NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_refresh_tokens_user_id ON refresh_tokens(user_id);
|
||||
CREATE INDEX idx_refresh_tokens_token_hash ON refresh_tokens(token_hash);
|
||||
CREATE INDEX idx_refresh_tokens_expires_at ON refresh_tokens(expires_at);
|
||||
|
||||
-- Virtual filesystem nodes (unified files + directories)
|
||||
CREATE TABLE IF NOT EXISTS vfs_nodes (
|
||||
id UUID PRIMARY KEY,
|
||||
public_id TEXT NOT NULL UNIQUE, -- opaque ID for external API (no timestamp leak)
|
||||
account_id UUID NOT NULL REFERENCES accounts(id) ON DELETE CASCADE,
|
||||
parent_id UUID REFERENCES vfs_nodes(id) ON DELETE CASCADE, -- NULL = root directory
|
||||
kind TEXT NOT NULL CHECK (kind IN ('file', 'directory')),
|
||||
status TEXT NOT NULL DEFAULT 'ready' CHECK (status IN ('pending', 'ready')),
|
||||
name TEXT NOT NULL,
|
||||
|
||||
-- File-specific fields (NULL for directories)
|
||||
blob_key TEXT, -- reference to blob storage (flat mode), NULL for hierarchical
|
||||
size BIGINT, -- file size in bytes
|
||||
mime_type TEXT, -- content type
|
||||
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
deleted_at TIMESTAMPTZ, -- soft delete for trash
|
||||
|
||||
-- No duplicate names in same parent (per account, excluding deleted)
|
||||
CONSTRAINT unique_node_name UNIQUE NULLS NOT DISTINCT (account_id, parent_id, name, deleted_at)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_vfs_nodes_account_id ON vfs_nodes(account_id) WHERE deleted_at IS NULL;
|
||||
CREATE INDEX idx_vfs_nodes_parent_id ON vfs_nodes(parent_id) WHERE deleted_at IS NULL;
|
||||
CREATE INDEX idx_vfs_nodes_account_parent ON vfs_nodes(account_id, parent_id) WHERE deleted_at IS NULL;
|
||||
CREATE INDEX idx_vfs_nodes_kind ON vfs_nodes(account_id, kind) WHERE deleted_at IS NULL;
|
||||
CREATE INDEX idx_vfs_nodes_deleted ON vfs_nodes(account_id, deleted_at) WHERE deleted_at IS NOT NULL;
|
||||
CREATE INDEX idx_vfs_nodes_public_id ON vfs_nodes(public_id);
|
||||
CREATE UNIQUE INDEX idx_vfs_nodes_account_root ON vfs_nodes(account_id) WHERE parent_id IS NULL; -- one root per account
|
||||
CREATE INDEX idx_vfs_nodes_pending ON vfs_nodes(created_at) WHERE status = 'pending'; -- for cleanup job
|
||||
|
||||
CREATE TABLE IF NOT EXISTS node_shares (
|
||||
id UUID PRIMARY KEY,
|
||||
node_id UUID NOT NULL REFERENCES vfs_nodes(id) ON DELETE CASCADE,
|
||||
share_token TEXT NOT NULL UNIQUE,
|
||||
expires_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_node_shares_share_token ON node_shares(share_token);
|
||||
CREATE INDEX idx_node_shares_node_id ON node_shares(node_id);
|
||||
CREATE INDEX idx_node_shares_expires_at ON node_shares(expires_at) WHERE expires_at IS NOT NULL;
|
||||
|
||||
-- ============================================================================
|
||||
-- Triggers for updated_at timestamps
|
||||
-- ============================================================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER update_users_updated_at BEFORE UPDATE ON users
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_vfs_nodes_updated_at BEFORE UPDATE ON vfs_nodes
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_node_shares_updated_at BEFORE UPDATE ON node_shares
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_accounts_updated_at BEFORE UPDATE ON accounts
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
27
apps/backend/internal/database/postgres.go
Normal file
27
apps/backend/internal/database/postgres.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"time"
|
||||
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/dialect/pgdialect"
|
||||
"github.com/uptrace/bun/driver/pgdriver"
|
||||
)
|
||||
|
||||
func NewFromPostgres(url string) *bun.DB {
|
||||
sqldb := sql.OpenDB(pgdriver.NewConnector(pgdriver.WithDSN(url)))
|
||||
|
||||
// Configure connection pool to prevent "database closed" errors
|
||||
// SetMaxOpenConns sets the maximum number of open connections to the database
|
||||
sqldb.SetMaxOpenConns(25)
|
||||
// SetMaxIdleConns sets the maximum number of connections in the idle connection pool
|
||||
sqldb.SetMaxIdleConns(5)
|
||||
// SetConnMaxLifetime sets the maximum amount of time a connection may be reused
|
||||
sqldb.SetConnMaxLifetime(5 * time.Minute)
|
||||
// SetConnMaxIdleTime sets the maximum amount of time a connection may be idle
|
||||
sqldb.SetConnMaxIdleTime(10 * time.Minute)
|
||||
|
||||
db := bun.NewDB(sqldb, pgdialect.New())
|
||||
return db
|
||||
}
|
||||
155
apps/backend/internal/drexa/config.go
Normal file
155
apps/backend/internal/drexa/config.go
Normal file
@@ -0,0 +1,155 @@
|
||||
package drexa
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type StorageMode string
|
||||
type StorageBackend string
|
||||
|
||||
const (
|
||||
StorageModeFlat StorageMode = "flat"
|
||||
StorageModeHierarchical StorageMode = "hierarchical"
|
||||
)
|
||||
|
||||
const (
|
||||
StorageBackendFS StorageBackend = "fs"
|
||||
StorageBackendS3 StorageBackend = "s3"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Server ServerConfig `yaml:"server"`
|
||||
Database DatabaseConfig `yaml:"database"`
|
||||
JWT JWTConfig `yaml:"jwt"`
|
||||
Storage StorageConfig `yaml:"storage"`
|
||||
}
|
||||
|
||||
type ServerConfig struct {
|
||||
Port int `yaml:"port"`
|
||||
}
|
||||
|
||||
type DatabaseConfig struct {
|
||||
PostgresURL string `yaml:"postgres_url"`
|
||||
}
|
||||
|
||||
type JWTConfig struct {
|
||||
Issuer string `yaml:"issuer"`
|
||||
Audience string `yaml:"audience"`
|
||||
SecretKeyBase64 string `yaml:"secret_key_base64"`
|
||||
SecretKeyPath string `yaml:"secret_key_path"`
|
||||
SecretKey []byte `yaml:"-"`
|
||||
}
|
||||
|
||||
type StorageConfig struct {
|
||||
Mode StorageMode `yaml:"mode"`
|
||||
Backend StorageBackend `yaml:"backend"`
|
||||
RootPath string `yaml:"root_path"`
|
||||
Bucket string `yaml:"bucket"`
|
||||
}
|
||||
|
||||
// ConfigFromFile loads configuration from a YAML file.
|
||||
// JWT secret key is loaded from JWT_SECRET_KEY env var (base64 encoded),
|
||||
// falling back to the file path specified in jwt.secret_key_path.
|
||||
func ConfigFromFile(path string) (*Config, error) {
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return nil, fmt.Errorf("config file not found: %s", path)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var config Config
|
||||
if err := yaml.Unmarshal(data, &config); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Load JWT secret key (priority: env var > config base64 > config file path)
|
||||
if envKey := os.Getenv("JWT_SECRET_KEY"); envKey != "" {
|
||||
key, err := base64.StdEncoding.DecodeString(envKey)
|
||||
if err != nil {
|
||||
return nil, errors.New("JWT_SECRET_KEY env var is not valid base64")
|
||||
}
|
||||
config.JWT.SecretKey = key
|
||||
} else if config.JWT.SecretKeyBase64 != "" {
|
||||
key, err := base64.StdEncoding.DecodeString(config.JWT.SecretKeyBase64)
|
||||
if err != nil {
|
||||
return nil, errors.New("jwt.secret_key_base64 is not valid base64")
|
||||
}
|
||||
config.JWT.SecretKey = key
|
||||
} else if config.JWT.SecretKeyPath != "" {
|
||||
keyData, err := os.ReadFile(config.JWT.SecretKeyPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
key, err := base64.StdEncoding.DecodeString(string(keyData))
|
||||
if err != nil {
|
||||
return nil, errors.New("jwt.secret_key_path file content is not valid base64")
|
||||
}
|
||||
config.JWT.SecretKey = key
|
||||
}
|
||||
|
||||
if errs := config.Validate(); len(errs) > 0 {
|
||||
return nil, NewConfigError(errs...)
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
// Validate checks for required configuration fields.
|
||||
func (c *Config) Validate() []error {
|
||||
var errs []error
|
||||
|
||||
// Server
|
||||
if c.Server.Port == 0 {
|
||||
errs = append(errs, errors.New("server.port is required"))
|
||||
}
|
||||
|
||||
// Database
|
||||
if c.Database.PostgresURL == "" {
|
||||
errs = append(errs, errors.New("database.postgres_url is required"))
|
||||
}
|
||||
|
||||
// JWT
|
||||
if c.JWT.Issuer == "" {
|
||||
errs = append(errs, errors.New("jwt.issuer is required"))
|
||||
}
|
||||
if c.JWT.Audience == "" {
|
||||
errs = append(errs, errors.New("jwt.audience is required"))
|
||||
}
|
||||
if len(c.JWT.SecretKey) == 0 {
|
||||
errs = append(errs, errors.New("jwt secret key is required (set JWT_SECRET_KEY env var, jwt.secret_key_base64, or jwt.secret_key_path)"))
|
||||
}
|
||||
|
||||
// Storage
|
||||
if c.Storage.Mode == "" {
|
||||
errs = append(errs, errors.New("storage.mode is required"))
|
||||
} else if c.Storage.Mode != StorageModeFlat && c.Storage.Mode != StorageModeHierarchical {
|
||||
errs = append(errs, errors.New("storage.mode must be 'flat' or 'hierarchical'"))
|
||||
}
|
||||
|
||||
if c.Storage.Backend == "" {
|
||||
errs = append(errs, errors.New("storage.backend is required"))
|
||||
} else if c.Storage.Backend != StorageBackendFS && c.Storage.Backend != StorageBackendS3 {
|
||||
errs = append(errs, errors.New("storage.backend must be 'fs' or 's3'"))
|
||||
}
|
||||
|
||||
if c.Storage.Backend == StorageBackendFS && c.Storage.RootPath == "" {
|
||||
errs = append(errs, errors.New("storage.root_path is required when backend is 'fs'"))
|
||||
}
|
||||
if c.Storage.Backend == StorageBackendS3 {
|
||||
if c.Storage.Bucket == "" {
|
||||
errs = append(errs, errors.New("storage.bucket is required when backend is 's3'"))
|
||||
}
|
||||
if c.Storage.Mode == StorageModeHierarchical {
|
||||
errs = append(errs, errors.New("storage.mode must be 'flat' when backend is 's3'"))
|
||||
}
|
||||
}
|
||||
|
||||
return errs
|
||||
}
|
||||
23
apps/backend/internal/drexa/err.go
Normal file
23
apps/backend/internal/drexa/err.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package drexa
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ConfigError struct {
|
||||
Errors []error
|
||||
}
|
||||
|
||||
func NewConfigError(errs ...error) *ConfigError {
|
||||
return &ConfigError{Errors: errs}
|
||||
}
|
||||
|
||||
func (e *ConfigError) Error() string {
|
||||
sb := strings.Builder{}
|
||||
sb.WriteString("invalid config:\n")
|
||||
for _, err := range e.Errors {
|
||||
sb.WriteString(fmt.Sprintf(" - %s\n", err.Error()))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
83
apps/backend/internal/drexa/server.go
Normal file
83
apps/backend/internal/drexa/server.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package drexa
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/account"
|
||||
"github.com/get-drexa/drexa/internal/auth"
|
||||
"github.com/get-drexa/drexa/internal/blob"
|
||||
"github.com/get-drexa/drexa/internal/database"
|
||||
"github.com/get-drexa/drexa/internal/httperr"
|
||||
"github.com/get-drexa/drexa/internal/upload"
|
||||
"github.com/get-drexa/drexa/internal/user"
|
||||
"github.com/get-drexa/drexa/internal/virtualfs"
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/gofiber/fiber/v2/middleware/logger"
|
||||
"github.com/uptrace/bun/extra/bundebug"
|
||||
)
|
||||
|
||||
func NewServer(c Config) (*fiber.App, error) {
|
||||
app := fiber.New(fiber.Config{
|
||||
ErrorHandler: httperr.ErrorHandler,
|
||||
StreamRequestBody: true,
|
||||
})
|
||||
app.Use(logger.New())
|
||||
|
||||
db := database.NewFromPostgres(c.Database.PostgresURL)
|
||||
db.AddQueryHook(bundebug.NewQueryHook(bundebug.WithVerbose(true)))
|
||||
|
||||
// Initialize blob store based on config
|
||||
var blobStore blob.Store
|
||||
switch c.Storage.Backend {
|
||||
case StorageBackendFS:
|
||||
blobStore = blob.NewFSStore(blob.FSStoreConfig{
|
||||
Root: c.Storage.RootPath,
|
||||
})
|
||||
case StorageBackendS3:
|
||||
return nil, fmt.Errorf("s3 storage backend not yet implemented")
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown storage backend: %s", c.Storage.Backend)
|
||||
}
|
||||
|
||||
err := blobStore.Initialize(context.Background())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to initialize blob store: %w", err)
|
||||
}
|
||||
|
||||
// Initialize key resolver based on config
|
||||
var keyResolver virtualfs.BlobKeyResolver
|
||||
switch c.Storage.Mode {
|
||||
case StorageModeFlat:
|
||||
keyResolver = virtualfs.NewFlatKeyResolver()
|
||||
case StorageModeHierarchical:
|
||||
keyResolver = virtualfs.NewHierarchicalKeyResolver(db)
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown storage mode: %s", c.Storage.Mode)
|
||||
}
|
||||
|
||||
vfs, err := virtualfs.NewVirtualFS(blobStore, keyResolver)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create virtual file system: %w", err)
|
||||
}
|
||||
|
||||
userService := user.NewService()
|
||||
authService := auth.NewService(userService, auth.TokenConfig{
|
||||
Issuer: c.JWT.Issuer,
|
||||
Audience: c.JWT.Audience,
|
||||
SecretKey: c.JWT.SecretKey,
|
||||
})
|
||||
uploadService := upload.NewService(vfs, blobStore)
|
||||
accountService := account.NewService(userService, vfs)
|
||||
|
||||
authMiddleware := auth.NewBearerAuthMiddleware(authService, db)
|
||||
|
||||
api := app.Group("/api")
|
||||
|
||||
accRouter := account.NewHTTPHandler(accountService, authService, db, authMiddleware).RegisterRoutes(api)
|
||||
|
||||
auth.NewHTTPHandler(authService, db).RegisterRoutes(api)
|
||||
upload.NewHTTPHandler(uploadService, db).RegisterRoutes(accRouter)
|
||||
|
||||
return app, nil
|
||||
}
|
||||
42
apps/backend/internal/httperr/error.go
Normal file
42
apps/backend/internal/httperr/error.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package httperr
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
// HTTPError represents an HTTP error with a status code and underlying error.
|
||||
type HTTPError struct {
|
||||
Code int
|
||||
Message string
|
||||
Err error
|
||||
}
|
||||
|
||||
// Error implements the error interface.
|
||||
func (e *HTTPError) Error() string {
|
||||
if e.Err != nil {
|
||||
return fmt.Sprintf("HTTP %d: %s: %v", e.Code, e.Message, e.Err)
|
||||
}
|
||||
return fmt.Sprintf("HTTP %d: %s", e.Code, e.Message)
|
||||
}
|
||||
|
||||
// Unwrap returns the underlying error.
|
||||
func (e *HTTPError) Unwrap() error {
|
||||
return e.Err
|
||||
}
|
||||
|
||||
// NewHTTPError creates a new HTTPError with the given status code, message, and underlying error.
|
||||
func NewHTTPError(code int, message string, err error) *HTTPError {
|
||||
return &HTTPError{
|
||||
Code: code,
|
||||
Message: message,
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
// Internal creates a new HTTPError with status 500.
|
||||
func Internal(err error) *HTTPError {
|
||||
return NewHTTPError(fiber.StatusInternalServerError, "Internal", err)
|
||||
}
|
||||
|
||||
64
apps/backend/internal/httperr/handler.go
Normal file
64
apps/backend/internal/httperr/handler.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package httperr
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"log/slog"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
// ErrorHandler is a global error handler for Fiber that logs errors and returns appropriate responses.
|
||||
func ErrorHandler(c *fiber.Ctx, err error) error {
|
||||
// Default status code
|
||||
code := fiber.StatusInternalServerError
|
||||
message := "Internal"
|
||||
|
||||
// Check if it's our custom HTTPError
|
||||
var httpErr *HTTPError
|
||||
if errors.As(err, &httpErr) {
|
||||
code = httpErr.Code
|
||||
message = httpErr.Message
|
||||
|
||||
// Log the error with underlying error details
|
||||
if httpErr.Err != nil {
|
||||
slog.Error("HTTP error",
|
||||
"status", code,
|
||||
"message", message,
|
||||
"error", httpErr.Err.Error(),
|
||||
"path", c.Path(),
|
||||
"method", c.Method(),
|
||||
)
|
||||
} else {
|
||||
slog.Warn("HTTP error",
|
||||
"status", code,
|
||||
"message", message,
|
||||
"path", c.Path(),
|
||||
"method", c.Method(),
|
||||
)
|
||||
}
|
||||
} else {
|
||||
// Check if it's a Fiber error
|
||||
var fiberErr *fiber.Error
|
||||
if errors.As(err, &fiberErr) {
|
||||
code = fiberErr.Code
|
||||
message = fiberErr.Message
|
||||
} else {
|
||||
// Generic error - log it
|
||||
slog.Error("Unhandled error",
|
||||
"status", code,
|
||||
"error", err.Error(),
|
||||
"path", c.Path(),
|
||||
"method", c.Method(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Set Content-Type header
|
||||
c.Set(fiber.HeaderContentType, fiber.MIMEApplicationJSONCharsetUTF8)
|
||||
|
||||
// Return JSON response
|
||||
return c.Status(code).JSON(fiber.Map{
|
||||
"error": message,
|
||||
})
|
||||
}
|
||||
|
||||
23
apps/backend/internal/ioext/counting_reader.go
Normal file
23
apps/backend/internal/ioext/counting_reader.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package ioext
|
||||
|
||||
import "io"
|
||||
|
||||
type CountingReader struct {
|
||||
reader io.Reader
|
||||
count int64
|
||||
}
|
||||
|
||||
func NewCountingReader(reader io.Reader) *CountingReader {
|
||||
return &CountingReader{reader: reader}
|
||||
}
|
||||
|
||||
func (r *CountingReader) Read(p []byte) (n int, err error) {
|
||||
n, err = r.reader.Read(p)
|
||||
r.count += int64(n)
|
||||
return n, err
|
||||
}
|
||||
|
||||
func (r *CountingReader) Count() int64 {
|
||||
return r.count
|
||||
}
|
||||
|
||||
24
apps/backend/internal/ioext/limit_read_closer.go
Normal file
24
apps/backend/internal/ioext/limit_read_closer.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package ioext
|
||||
|
||||
import "io"
|
||||
|
||||
type LimitReadCloser struct {
|
||||
reader io.ReadCloser
|
||||
limitReader io.Reader
|
||||
}
|
||||
|
||||
func NewLimitReadCloser(reader io.ReadCloser, length int64) *LimitReadCloser {
|
||||
return &LimitReadCloser{
|
||||
reader: reader,
|
||||
limitReader: io.LimitReader(reader, length),
|
||||
}
|
||||
}
|
||||
|
||||
func (r *LimitReadCloser) Read(p []byte) (n int, err error) {
|
||||
return r.limitReader.Read(p)
|
||||
}
|
||||
|
||||
func (r *LimitReadCloser) Close() error {
|
||||
return r.reader.Close()
|
||||
}
|
||||
|
||||
138
apps/backend/internal/password/password.go
Normal file
138
apps/backend/internal/password/password.go
Normal file
@@ -0,0 +1,138 @@
|
||||
package password
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/subtle"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/crypto/argon2"
|
||||
)
|
||||
|
||||
// Hashed represents a securely hashed password.
|
||||
// This type ensures plaintext passwords cannot be accidentally stored.
|
||||
type Hashed string
|
||||
|
||||
// argon2id parameters
|
||||
const (
|
||||
memory = 64 * 1024
|
||||
iterations = 3
|
||||
parallelism = 2
|
||||
saltLength = 16
|
||||
keyLength = 32
|
||||
)
|
||||
|
||||
var (
|
||||
ErrInvalidHash = errors.New("invalid hash format")
|
||||
ErrIncompatibleHash = errors.New("incompatible hash algorithm")
|
||||
ErrIncompatibleVersion = errors.New("incompatible argon2 version")
|
||||
)
|
||||
|
||||
type argon2Hash struct {
|
||||
memory uint32
|
||||
iterations uint32
|
||||
parallelism uint8
|
||||
salt []byte
|
||||
hash []byte
|
||||
}
|
||||
|
||||
// Hash securely hashes a plaintext password using argon2id.
|
||||
func Hash(plain string) (Hashed, error) {
|
||||
salt := make([]byte, saltLength)
|
||||
if _, err := rand.Read(salt); err != nil {
|
||||
return "", fmt.Errorf("failed to generate salt: %w", err)
|
||||
}
|
||||
|
||||
hash := argon2.IDKey(
|
||||
[]byte(plain),
|
||||
salt,
|
||||
iterations,
|
||||
memory,
|
||||
parallelism,
|
||||
keyLength,
|
||||
)
|
||||
|
||||
b64Salt := base64.RawStdEncoding.EncodeToString(salt)
|
||||
b64Hash := base64.RawStdEncoding.EncodeToString(hash)
|
||||
|
||||
encoded := fmt.Sprintf(
|
||||
"$argon2id$v=%d$m=%d,t=%d,p=%d$%s$%s",
|
||||
argon2.Version,
|
||||
memory,
|
||||
iterations,
|
||||
parallelism,
|
||||
b64Salt,
|
||||
b64Hash,
|
||||
)
|
||||
|
||||
return Hashed(encoded), nil
|
||||
}
|
||||
|
||||
// Verify checks if a plaintext password matches a hashed password.
|
||||
func Verify(plain string, hashed Hashed) (bool, error) {
|
||||
h, err := decodeHash(string(hashed))
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
otherHash := argon2.IDKey(
|
||||
[]byte(plain),
|
||||
h.salt,
|
||||
h.iterations,
|
||||
h.memory,
|
||||
h.parallelism,
|
||||
uint32(len(h.hash)),
|
||||
)
|
||||
|
||||
if subtle.ConstantTimeCompare(h.hash, otherHash) == 1 {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func decodeHash(encodedHash string) (*argon2Hash, error) {
|
||||
parts := strings.Split(encodedHash, "$")
|
||||
if len(parts) != 6 {
|
||||
return nil, ErrInvalidHash
|
||||
}
|
||||
|
||||
if parts[1] != "argon2id" {
|
||||
return nil, ErrIncompatibleHash
|
||||
}
|
||||
|
||||
var version int
|
||||
if _, err := fmt.Sscanf(parts[2], "v=%d", &version); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse version: %w", err)
|
||||
}
|
||||
if version != argon2.Version {
|
||||
return nil, ErrIncompatibleVersion
|
||||
}
|
||||
|
||||
h := &argon2Hash{}
|
||||
if _, err := fmt.Sscanf(
|
||||
parts[3],
|
||||
"m=%d,t=%d,p=%d",
|
||||
&h.memory,
|
||||
&h.iterations,
|
||||
&h.parallelism,
|
||||
); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse parameters: %w", err)
|
||||
}
|
||||
|
||||
salt, err := base64.RawStdEncoding.DecodeString(parts[4])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode salt: %w", err)
|
||||
}
|
||||
h.salt = salt
|
||||
|
||||
hash, err := base64.RawStdEncoding.DecodeString(parts[5])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode hash: %w", err)
|
||||
}
|
||||
h.hash = hash
|
||||
|
||||
return h, nil
|
||||
}
|
||||
10
apps/backend/internal/upload/err.go
Normal file
10
apps/backend/internal/upload/err.go
Normal file
@@ -0,0 +1,10 @@
|
||||
package upload
|
||||
|
||||
import "errors"
|
||||
|
||||
var (
|
||||
ErrNotFound = errors.New("not found")
|
||||
ErrParentNotDirectory = errors.New("parent is not a directory")
|
||||
ErrConflict = errors.New("node conflict")
|
||||
ErrContentNotUploaded = errors.New("content has not been uploaded")
|
||||
)
|
||||
120
apps/backend/internal/upload/http.go
Normal file
120
apps/backend/internal/upload/http.go
Normal file
@@ -0,0 +1,120 @@
|
||||
package upload
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/account"
|
||||
"github.com/get-drexa/drexa/internal/httperr"
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type createUploadRequest struct {
|
||||
ParentID string `json:"parentId"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type updateUploadRequest struct {
|
||||
Status Status `json:"status"`
|
||||
}
|
||||
|
||||
type HTTPHandler struct {
|
||||
service *Service
|
||||
db *bun.DB
|
||||
}
|
||||
|
||||
func NewHTTPHandler(s *Service, db *bun.DB) *HTTPHandler {
|
||||
return &HTTPHandler{service: s, db: db}
|
||||
}
|
||||
|
||||
func (h *HTTPHandler) RegisterRoutes(api fiber.Router) {
|
||||
upload := api.Group("/uploads")
|
||||
|
||||
upload.Post("/", h.Create)
|
||||
upload.Put("/:uploadID/content", h.ReceiveContent)
|
||||
upload.Patch("/:uploadID", h.Update)
|
||||
}
|
||||
|
||||
func (h *HTTPHandler) Create(c *fiber.Ctx) error {
|
||||
account := account.CurrentAccount(c)
|
||||
if account == nil {
|
||||
return c.SendStatus(fiber.StatusUnauthorized)
|
||||
}
|
||||
|
||||
req := new(createUploadRequest)
|
||||
if err := c.BodyParser(req); err != nil {
|
||||
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request"})
|
||||
}
|
||||
|
||||
upload, err := h.service.CreateUpload(c.Context(), h.db, account.ID, CreateUploadOptions{
|
||||
ParentID: req.ParentID,
|
||||
Name: req.Name,
|
||||
})
|
||||
if err != nil {
|
||||
if errors.Is(err, ErrNotFound) {
|
||||
return c.SendStatus(fiber.StatusNotFound)
|
||||
}
|
||||
if errors.Is(err, ErrParentNotDirectory) {
|
||||
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Parent is not a directory"})
|
||||
}
|
||||
if errors.Is(err, ErrConflict) {
|
||||
return c.Status(fiber.StatusConflict).JSON(fiber.Map{"error": "A file with this name already exists"})
|
||||
}
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
|
||||
if upload.UploadURL == "" {
|
||||
upload.UploadURL = fmt.Sprintf("%s%s/%s/content", c.BaseURL(), c.OriginalURL(), upload.ID)
|
||||
}
|
||||
|
||||
return c.JSON(upload)
|
||||
}
|
||||
|
||||
func (h *HTTPHandler) ReceiveContent(c *fiber.Ctx) error {
|
||||
account := account.CurrentAccount(c)
|
||||
if account == nil {
|
||||
return c.SendStatus(fiber.StatusUnauthorized)
|
||||
}
|
||||
|
||||
uploadID := c.Params("uploadID")
|
||||
|
||||
err := h.service.ReceiveUpload(c.Context(), h.db, account.ID, uploadID, c.Context().RequestBodyStream())
|
||||
defer c.Context().Request.CloseBodyStream()
|
||||
if err != nil {
|
||||
if errors.Is(err, ErrNotFound) {
|
||||
return c.SendStatus(fiber.StatusNotFound)
|
||||
}
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
|
||||
return c.SendStatus(fiber.StatusNoContent)
|
||||
}
|
||||
|
||||
func (h *HTTPHandler) Update(c *fiber.Ctx) error {
|
||||
account := account.CurrentAccount(c)
|
||||
if account == nil {
|
||||
return c.SendStatus(fiber.StatusUnauthorized)
|
||||
}
|
||||
|
||||
req := new(updateUploadRequest)
|
||||
if err := c.BodyParser(req); err != nil {
|
||||
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request"})
|
||||
}
|
||||
|
||||
if req.Status == StatusCompleted {
|
||||
upload, err := h.service.CompleteUpload(c.Context(), h.db, account.ID, c.Params("uploadID"))
|
||||
if err != nil {
|
||||
if errors.Is(err, ErrNotFound) {
|
||||
return c.SendStatus(fiber.StatusNotFound)
|
||||
}
|
||||
if errors.Is(err, ErrContentNotUploaded) {
|
||||
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Content has not been uploaded"})
|
||||
}
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
return c.JSON(upload)
|
||||
}
|
||||
|
||||
return c.SendStatus(fiber.StatusBadRequest)
|
||||
}
|
||||
144
apps/backend/internal/upload/service.go
Normal file
144
apps/backend/internal/upload/service.go
Normal file
@@ -0,0 +1,144 @@
|
||||
package upload
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/blob"
|
||||
"github.com/get-drexa/drexa/internal/virtualfs"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type Service struct {
|
||||
vfs *virtualfs.VirtualFS
|
||||
blobStore blob.Store
|
||||
|
||||
pendingUploads sync.Map
|
||||
}
|
||||
|
||||
func NewService(vfs *virtualfs.VirtualFS, blobStore blob.Store) *Service {
|
||||
return &Service{
|
||||
vfs: vfs,
|
||||
blobStore: blobStore,
|
||||
|
||||
pendingUploads: sync.Map{},
|
||||
}
|
||||
}
|
||||
|
||||
type CreateUploadOptions struct {
|
||||
ParentID string
|
||||
Name string
|
||||
}
|
||||
|
||||
func (s *Service) CreateUpload(ctx context.Context, db bun.IDB, accountID uuid.UUID, opts CreateUploadOptions) (*Upload, error) {
|
||||
parentNode, err := s.vfs.FindNodeByPublicID(ctx, db, accountID, opts.ParentID)
|
||||
if err != nil {
|
||||
if errors.Is(err, virtualfs.ErrNodeNotFound) {
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if parentNode.Kind != virtualfs.NodeKindDirectory {
|
||||
return nil, ErrParentNotDirectory
|
||||
}
|
||||
|
||||
node, err := s.vfs.CreateFile(ctx, db, accountID, virtualfs.CreateFileOptions{
|
||||
ParentID: parentNode.ID,
|
||||
Name: opts.Name,
|
||||
})
|
||||
if err != nil {
|
||||
if errors.Is(err, virtualfs.ErrNodeConflict) {
|
||||
return nil, ErrConflict
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var uploadURL string
|
||||
if s.blobStore.SupportsDirectUpload() {
|
||||
uploadURL, err = s.blobStore.GenerateUploadURL(ctx, node.BlobKey, blob.UploadURLOptions{
|
||||
Duration: 1 * time.Hour,
|
||||
})
|
||||
if err != nil {
|
||||
_ = s.vfs.PermanentlyDeleteNode(ctx, db, node)
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
uploadURL = ""
|
||||
}
|
||||
|
||||
upload := &Upload{
|
||||
ID: node.PublicID,
|
||||
Status: StatusPending,
|
||||
TargetNode: node,
|
||||
UploadURL: uploadURL,
|
||||
}
|
||||
|
||||
s.pendingUploads.Store(upload.ID, upload)
|
||||
|
||||
return upload, nil
|
||||
}
|
||||
|
||||
func (s *Service) ReceiveUpload(ctx context.Context, db bun.IDB, accountID uuid.UUID, uploadID string, reader io.Reader) error {
|
||||
fmt.Printf("reader: %v\n", reader)
|
||||
n, ok := s.pendingUploads.Load(uploadID)
|
||||
if !ok {
|
||||
return ErrNotFound
|
||||
}
|
||||
|
||||
upload, ok := n.(*Upload)
|
||||
if !ok {
|
||||
return ErrNotFound
|
||||
}
|
||||
|
||||
if upload.TargetNode.AccountID != accountID {
|
||||
return ErrNotFound
|
||||
}
|
||||
|
||||
err := s.vfs.WriteFile(ctx, db, upload.TargetNode, virtualfs.FileContentFromReader(reader))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
upload.Status = StatusCompleted
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Service) CompleteUpload(ctx context.Context, db bun.IDB, accountID uuid.UUID, uploadID string) (*Upload, error) {
|
||||
n, ok := s.pendingUploads.Load(uploadID)
|
||||
if !ok {
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
|
||||
upload, ok := n.(*Upload)
|
||||
if !ok {
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
|
||||
if upload.TargetNode.AccountID != accountID {
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
|
||||
if upload.TargetNode.Status == virtualfs.NodeStatusReady && upload.Status == StatusCompleted {
|
||||
return upload, nil
|
||||
}
|
||||
|
||||
err := s.vfs.WriteFile(ctx, db, upload.TargetNode, virtualfs.FileContentFromBlobKey(upload.TargetNode.BlobKey))
|
||||
if err != nil {
|
||||
if errors.Is(err, blob.ErrNotFound) {
|
||||
return nil, ErrContentNotUploaded
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
upload.Status = StatusCompleted
|
||||
s.pendingUploads.Delete(uploadID)
|
||||
|
||||
return upload, nil
|
||||
}
|
||||
18
apps/backend/internal/upload/upload.go
Normal file
18
apps/backend/internal/upload/upload.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package upload
|
||||
|
||||
import "github.com/get-drexa/drexa/internal/virtualfs"
|
||||
|
||||
type Status string
|
||||
|
||||
const (
|
||||
StatusPending Status = "pending"
|
||||
StatusCompleted Status = "completed"
|
||||
StatusFailed Status = "failed"
|
||||
)
|
||||
|
||||
type Upload struct {
|
||||
ID string `json:"id"`
|
||||
Status Status `json:"status"`
|
||||
TargetNode *virtualfs.Node `json:"-"`
|
||||
UploadURL string `json:"uploadUrl"`
|
||||
}
|
||||
36
apps/backend/internal/user/err.go
Normal file
36
apps/backend/internal/user/err.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package user
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type NotFoundError struct {
|
||||
// ID is the ID that was used to try to find the user.
|
||||
// Not set if not tried.
|
||||
id uuid.UUID
|
||||
|
||||
// Email is the email that was used to try to find the user.
|
||||
// Not set if not tried.
|
||||
email string
|
||||
}
|
||||
|
||||
func newNotFoundError(id uuid.UUID, email string) *NotFoundError {
|
||||
return &NotFoundError{id, email}
|
||||
}
|
||||
func (e *NotFoundError) Error() string {
|
||||
return fmt.Sprintf("user not found: %v", e.id)
|
||||
}
|
||||
|
||||
type AlreadyExistsError struct {
|
||||
// Email is the email that was used to try to create the user.
|
||||
Email string
|
||||
}
|
||||
|
||||
func newAlreadyExistsError(email string) *AlreadyExistsError {
|
||||
return &AlreadyExistsError{email}
|
||||
}
|
||||
func (e *AlreadyExistsError) Error() string {
|
||||
return fmt.Sprintf("user with email %s already exists", e.Email)
|
||||
}
|
||||
76
apps/backend/internal/user/service.go
Normal file
76
apps/backend/internal/user/service.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package user
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/database"
|
||||
"github.com/get-drexa/drexa/internal/password"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type Service struct{}
|
||||
|
||||
type UserRegistrationOptions struct {
|
||||
Email string
|
||||
DisplayName string
|
||||
Password password.Hashed
|
||||
}
|
||||
|
||||
func NewService() *Service {
|
||||
return &Service{}
|
||||
}
|
||||
|
||||
func (s *Service) RegisterUser(ctx context.Context, db bun.IDB, opts UserRegistrationOptions) (*User, error) {
|
||||
uid, err := newUserID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
u := User{
|
||||
ID: uid,
|
||||
Email: opts.Email,
|
||||
DisplayName: opts.DisplayName,
|
||||
Password: opts.Password,
|
||||
}
|
||||
|
||||
_, err = db.NewInsert().Model(&u).Returning("*").Exec(ctx)
|
||||
if err != nil {
|
||||
if database.IsUniqueViolation(err) {
|
||||
return nil, newAlreadyExistsError(u.Email)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &u, nil
|
||||
}
|
||||
|
||||
func (s *Service) UserByID(ctx context.Context, db bun.IDB, id uuid.UUID) (*User, error) {
|
||||
var user User
|
||||
err := db.NewSelect().Model(&user).Where("id = ?", id).Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, newNotFoundError(id, "")
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return &user, nil
|
||||
}
|
||||
|
||||
func (s *Service) UserByEmail(ctx context.Context, db bun.IDB, email string) (*User, error) {
|
||||
var user User
|
||||
err := db.NewSelect().Model(&user).Where("email = ?", email).Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, newNotFoundError(uuid.Nil, email)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return &user, nil
|
||||
}
|
||||
|
||||
func (s *Service) UserExistsByEmail(ctx context.Context, db bun.IDB, email string) (bool, error) {
|
||||
return db.NewSelect().Model(&User{}).Where("email = ?", email).Exists(ctx)
|
||||
}
|
||||
24
apps/backend/internal/user/user.go
Normal file
24
apps/backend/internal/user/user.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package user
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/password"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
bun.BaseModel `bun:"users"`
|
||||
|
||||
ID uuid.UUID `bun:",pk,type:uuid" json:"id"`
|
||||
DisplayName string `bun:"display_name" json:"displayName"`
|
||||
Email string `bun:"email,unique,notnull" json:"email"`
|
||||
Password password.Hashed `bun:"password,notnull" json:"-"`
|
||||
CreatedAt time.Time `bun:"created_at,notnull,nullzero" json:"createdAt"`
|
||||
UpdatedAt time.Time `bun:"updated_at,notnull,nullzero" json:"updatedAt"`
|
||||
}
|
||||
|
||||
func newUserID() (uuid.UUID, error) {
|
||||
return uuid.NewV7()
|
||||
}
|
||||
9
apps/backend/internal/virtualfs/err.go
Normal file
9
apps/backend/internal/virtualfs/err.go
Normal file
@@ -0,0 +1,9 @@
|
||||
package virtualfs
|
||||
|
||||
import "errors"
|
||||
|
||||
var (
|
||||
ErrNodeNotFound = errors.New("node not found")
|
||||
ErrNodeConflict = errors.New("node conflict")
|
||||
ErrUnsupportedOperation = errors.New("unsupported operation")
|
||||
)
|
||||
35
apps/backend/internal/virtualfs/flat_key_resolver.go
Normal file
35
apps/backend/internal/virtualfs/flat_key_resolver.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package virtualfs
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/blob"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type FlatKeyResolver struct{}
|
||||
|
||||
var _ BlobKeyResolver = &FlatKeyResolver{}
|
||||
|
||||
func NewFlatKeyResolver() *FlatKeyResolver {
|
||||
return &FlatKeyResolver{}
|
||||
}
|
||||
|
||||
func (r *FlatKeyResolver) ShouldPersistKey() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (r *FlatKeyResolver) Resolve(ctx context.Context, node *Node) (blob.Key, error) {
|
||||
if node.BlobKey == "" {
|
||||
id, err := uuid.NewV7()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return blob.Key(id.String()), nil
|
||||
}
|
||||
return node.BlobKey, nil
|
||||
}
|
||||
|
||||
func (r *FlatKeyResolver) ResolveDeletionKeys(ctx context.Context, node *Node, allKeys []blob.Key) (*DeletionPlan, error) {
|
||||
return &DeletionPlan{Keys: allKeys}, nil
|
||||
}
|
||||
40
apps/backend/internal/virtualfs/hierarchical_key_resolver.go
Normal file
40
apps/backend/internal/virtualfs/hierarchical_key_resolver.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package virtualfs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/blob"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type HierarchicalKeyResolver struct {
|
||||
db *bun.DB
|
||||
}
|
||||
|
||||
var _ BlobKeyResolver = &HierarchicalKeyResolver{}
|
||||
|
||||
func NewHierarchicalKeyResolver(db *bun.DB) *HierarchicalKeyResolver {
|
||||
return &HierarchicalKeyResolver{db: db}
|
||||
}
|
||||
|
||||
func (r *HierarchicalKeyResolver) ShouldPersistKey() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *HierarchicalKeyResolver) Resolve(ctx context.Context, node *Node) (blob.Key, error) {
|
||||
path, err := buildNodeAbsolutePath(ctx, r.db, node.ID)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return blob.Key(fmt.Sprintf("%s/%s", node.AccountID, path)), nil
|
||||
}
|
||||
|
||||
func (r *HierarchicalKeyResolver) ResolveDeletionKeys(ctx context.Context, node *Node, allKeys []blob.Key) (*DeletionPlan, error) {
|
||||
path, err := buildNodeAbsolutePath(ctx, r.db, node.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &DeletionPlan{Prefix: blob.Key(path)}, nil
|
||||
}
|
||||
21
apps/backend/internal/virtualfs/key_resolver.go
Normal file
21
apps/backend/internal/virtualfs/key_resolver.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package virtualfs
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/blob"
|
||||
)
|
||||
|
||||
type BlobKeyResolver interface {
|
||||
// ShouldPersistKey returns true if the resolved key should be stored in node.BlobKey.
|
||||
// Flat keys (e.g. UUIDs) return true - key is generated once and stored.
|
||||
// Hierarchical keys return false - key is derived from path each time.
|
||||
ShouldPersistKey() bool
|
||||
Resolve(ctx context.Context, node *Node) (blob.Key, error)
|
||||
ResolveDeletionKeys(ctx context.Context, node *Node, allKeys []blob.Key) (*DeletionPlan, error)
|
||||
}
|
||||
|
||||
type DeletionPlan struct {
|
||||
Prefix blob.Key
|
||||
Keys []blob.Key
|
||||
}
|
||||
53
apps/backend/internal/virtualfs/node.go
Normal file
53
apps/backend/internal/virtualfs/node.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package virtualfs
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/get-drexa/drexa/internal/blob"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type NodeKind string
|
||||
|
||||
const (
|
||||
NodeKindFile NodeKind = "file"
|
||||
NodeKindDirectory NodeKind = "directory"
|
||||
)
|
||||
|
||||
type NodeStatus string
|
||||
|
||||
const (
|
||||
NodeStatusPending NodeStatus = "pending"
|
||||
NodeStatusReady NodeStatus = "ready"
|
||||
)
|
||||
|
||||
type Node struct {
|
||||
bun.BaseModel `bun:"vfs_nodes"`
|
||||
|
||||
ID uuid.UUID `bun:",pk,type:uuid"`
|
||||
PublicID string `bun:"public_id,notnull"`
|
||||
AccountID uuid.UUID `bun:"account_id,notnull,type:uuid"`
|
||||
ParentID uuid.UUID `bun:"parent_id,nullzero"`
|
||||
Kind NodeKind `bun:"kind,notnull"`
|
||||
Status NodeStatus `bun:"status,notnull"`
|
||||
Name string `bun:"name,notnull"`
|
||||
|
||||
BlobKey blob.Key `bun:"blob_key,nullzero"`
|
||||
Size int64 `bun:"size"`
|
||||
MimeType string `bun:"mime_type,nullzero"`
|
||||
|
||||
CreatedAt time.Time `bun:"created_at,notnull,nullzero"`
|
||||
UpdatedAt time.Time `bun:"updated_at,notnull,nullzero"`
|
||||
DeletedAt time.Time `bun:"deleted_at,nullzero"`
|
||||
}
|
||||
|
||||
func newNodeID() (uuid.UUID, error) {
|
||||
return uuid.NewV7()
|
||||
}
|
||||
|
||||
// IsAccessible returns true if the node can be accessed.
|
||||
// If the node is not ready or if it is soft deleted, it cannot be accessed.
|
||||
func (n *Node) IsAccessible() bool {
|
||||
return n.DeletedAt.IsZero() && n.Status == NodeStatusReady
|
||||
}
|
||||
42
apps/backend/internal/virtualfs/path.go
Normal file
42
apps/backend/internal/virtualfs/path.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package virtualfs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
const absolutePathQuery = `WITH RECURSIVE path AS (
|
||||
SELECT id, parent_id, name, 1 as depth
|
||||
FROM vfs_nodes WHERE id = ? AND deleted_at IS NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT n.id, n.parent_id, n.name, p.depth + 1
|
||||
FROM vfs_nodes n
|
||||
JOIN path p ON n.id = p.parent_id
|
||||
WHERE n.deleted_at IS NULL
|
||||
)
|
||||
SELECT name FROM path
|
||||
WHERE EXISTS (SELECT 1 FROM path WHERE parent_id IS NULL)
|
||||
ORDER BY depth DESC;`
|
||||
|
||||
func JoinPath(parts ...string) string {
|
||||
return strings.Join(parts, "/")
|
||||
}
|
||||
|
||||
func buildNodeAbsolutePath(ctx context.Context, db bun.IDB, nodeID uuid.UUID) (string, error) {
|
||||
var path []string
|
||||
err := db.NewRaw(absolutePathQuery, nodeID).Scan(ctx, &path)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return "", ErrNodeNotFound
|
||||
}
|
||||
return "", err
|
||||
}
|
||||
return JoinPath(path...), nil
|
||||
}
|
||||
520
apps/backend/internal/virtualfs/vfs.go
Normal file
520
apps/backend/internal/virtualfs/vfs.go
Normal file
@@ -0,0 +1,520 @@
|
||||
package virtualfs
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"database/sql"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"github.com/gabriel-vasile/mimetype"
|
||||
"github.com/get-drexa/drexa/internal/blob"
|
||||
"github.com/get-drexa/drexa/internal/database"
|
||||
"github.com/get-drexa/drexa/internal/ioext"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sqids/sqids-go"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type VirtualFS struct {
|
||||
blobStore blob.Store
|
||||
keyResolver BlobKeyResolver
|
||||
|
||||
sqid *sqids.Sqids
|
||||
}
|
||||
|
||||
type CreateNodeOptions struct {
|
||||
ParentID uuid.UUID
|
||||
Kind NodeKind
|
||||
Name string
|
||||
}
|
||||
|
||||
type CreateFileOptions struct {
|
||||
ParentID uuid.UUID
|
||||
Name string
|
||||
}
|
||||
|
||||
type FileContent struct {
|
||||
reader io.Reader
|
||||
blobKey blob.Key
|
||||
}
|
||||
|
||||
const RootDirectoryName = "root"
|
||||
|
||||
func FileContentFromReader(reader io.Reader) FileContent {
|
||||
return FileContent{reader: reader}
|
||||
}
|
||||
|
||||
func FileContentFromBlobKey(blobKey blob.Key) FileContent {
|
||||
return FileContent{blobKey: blobKey}
|
||||
}
|
||||
|
||||
func NewVirtualFS(blobStore blob.Store, keyResolver BlobKeyResolver) (*VirtualFS, error) {
|
||||
sqid, err := sqids.New()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &VirtualFS{
|
||||
blobStore: blobStore,
|
||||
keyResolver: keyResolver,
|
||||
sqid: sqid,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) FindNode(ctx context.Context, db bun.IDB, accountID, fileID string) (*Node, error) {
|
||||
var node Node
|
||||
err := db.NewSelect().Model(&node).
|
||||
Where("account_id = ?", accountID).
|
||||
Where("id = ?", fileID).
|
||||
Where("status = ?", NodeStatusReady).
|
||||
Where("deleted_at IS NULL").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, ErrNodeNotFound
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return &node, nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) FindNodeByPublicID(ctx context.Context, db bun.IDB, accountID uuid.UUID, publicID string) (*Node, error) {
|
||||
var node Node
|
||||
err := db.NewSelect().Model(&node).
|
||||
Where("account_id = ?", accountID).
|
||||
Where("public_id = ?", publicID).
|
||||
Where("status = ?", NodeStatusReady).
|
||||
Where("deleted_at IS NULL").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, ErrNodeNotFound
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return &node, nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) ListChildren(ctx context.Context, db bun.IDB, node *Node) ([]*Node, error) {
|
||||
if !node.IsAccessible() {
|
||||
return nil, ErrNodeNotFound
|
||||
}
|
||||
|
||||
var nodes []*Node
|
||||
err := db.NewSelect().Model(&nodes).
|
||||
Where("account_id = ?", node.AccountID).
|
||||
Where("parent_id = ?", node.ID).
|
||||
Where("status = ?", NodeStatusReady).
|
||||
Where("deleted_at IS NULL").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return make([]*Node, 0), nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return nodes, nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) CreateFile(ctx context.Context, db bun.IDB, accountID uuid.UUID, opts CreateFileOptions) (*Node, error) {
|
||||
pid, err := vfs.generatePublicID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
id, err := newNodeID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
node := Node{
|
||||
ID: id,
|
||||
PublicID: pid,
|
||||
AccountID: accountID,
|
||||
ParentID: opts.ParentID,
|
||||
Kind: NodeKindFile,
|
||||
Status: NodeStatusPending,
|
||||
Name: opts.Name,
|
||||
}
|
||||
|
||||
if vfs.keyResolver.ShouldPersistKey() {
|
||||
node.BlobKey, err = vfs.keyResolver.Resolve(ctx, &node)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
_, err = db.NewInsert().Model(&node).Returning("*").Exec(ctx)
|
||||
if err != nil {
|
||||
if database.IsUniqueViolation(err) {
|
||||
return nil, ErrNodeConflict
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &node, nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) WriteFile(ctx context.Context, db bun.IDB, node *Node, content FileContent) error {
|
||||
if content.reader == nil && content.blobKey.IsNil() {
|
||||
return blob.ErrInvalidFileContent
|
||||
}
|
||||
|
||||
if !node.DeletedAt.IsZero() {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
|
||||
setCols := make([]string, 0, 4)
|
||||
|
||||
if content.reader != nil {
|
||||
key, err := vfs.keyResolver.Resolve(ctx, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
buf := make([]byte, 3072)
|
||||
n, err := io.ReadFull(content.reader, buf)
|
||||
if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
|
||||
return err
|
||||
}
|
||||
buf = buf[:n]
|
||||
|
||||
mt := mimetype.Detect(buf)
|
||||
cr := ioext.NewCountingReader(io.MultiReader(bytes.NewReader(buf), content.reader))
|
||||
|
||||
err = vfs.blobStore.Put(ctx, key, cr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if vfs.keyResolver.ShouldPersistKey() {
|
||||
node.BlobKey = key
|
||||
setCols = append(setCols, "blob_key")
|
||||
}
|
||||
|
||||
node.MimeType = mt.String()
|
||||
node.Size = cr.Count()
|
||||
node.Status = NodeStatusReady
|
||||
|
||||
setCols = append(setCols, "mime_type", "size", "status")
|
||||
} else {
|
||||
node.BlobKey = content.blobKey
|
||||
|
||||
b, err := vfs.blobStore.ReadRange(ctx, content.blobKey, 0, 3072)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer b.Close()
|
||||
|
||||
buf := make([]byte, 3072)
|
||||
n, err := io.ReadFull(b, buf)
|
||||
if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
|
||||
return err
|
||||
}
|
||||
buf = buf[:n]
|
||||
mt := mimetype.Detect(buf)
|
||||
node.MimeType = mt.String()
|
||||
node.Status = NodeStatusReady
|
||||
|
||||
s, err := vfs.blobStore.ReadSize(ctx, content.blobKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
node.Size = s
|
||||
|
||||
setCols = append(setCols, "mime_type", "blob_key", "size", "status")
|
||||
}
|
||||
|
||||
_, err := db.NewUpdate().Model(node).
|
||||
Column(setCols...).
|
||||
WherePK().
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) CreateDirectory(ctx context.Context, db bun.IDB, accountID uuid.UUID, parentID uuid.UUID, name string) (*Node, error) {
|
||||
pid, err := vfs.generatePublicID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
id, err := newNodeID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
node := Node{
|
||||
ID: id,
|
||||
PublicID: pid,
|
||||
AccountID: accountID,
|
||||
ParentID: parentID,
|
||||
Kind: NodeKindDirectory,
|
||||
Status: NodeStatusReady,
|
||||
Name: name,
|
||||
}
|
||||
|
||||
_, err = db.NewInsert().Model(node).Exec(ctx)
|
||||
if err != nil {
|
||||
if database.IsUniqueViolation(err) {
|
||||
return nil, ErrNodeConflict
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &node, nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) SoftDeleteNode(ctx context.Context, db bun.IDB, node *Node) error {
|
||||
if !node.IsAccessible() {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
|
||||
_, err := db.NewUpdate().Model(node).
|
||||
WherePK().
|
||||
Where("deleted_at IS NULL").
|
||||
Where("status = ?", NodeStatusReady).
|
||||
Set("deleted_at = NOW()").
|
||||
Returning("deleted_at").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) RestoreNode(ctx context.Context, db bun.IDB, node *Node) error {
|
||||
if node.Status != NodeStatusReady {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
|
||||
_, err := db.NewUpdate().Model(node).
|
||||
WherePK().
|
||||
Where("deleted_at IS NOT NULL").
|
||||
Set("deleted_at = NULL").
|
||||
Returning("deleted_at").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) RenameNode(ctx context.Context, db bun.IDB, node *Node, name string) error {
|
||||
if !node.IsAccessible() {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
|
||||
_, err := db.NewUpdate().Model(node).
|
||||
WherePK().
|
||||
Where("status = ?", NodeStatusReady).
|
||||
Where("deleted_at IS NULL").
|
||||
Set("name = ?", name).
|
||||
Returning("name, updated_at").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) MoveNode(ctx context.Context, db bun.IDB, node *Node, parentID uuid.UUID) error {
|
||||
if !node.IsAccessible() {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
|
||||
oldKey, err := vfs.keyResolver.Resolve(ctx, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = db.NewUpdate().Model(node).
|
||||
WherePK().
|
||||
Where("status = ?", NodeStatusReady).
|
||||
Where("deleted_at IS NULL").
|
||||
Set("parent_id = ?", parentID).
|
||||
Returning("parent_id, updated_at").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
if database.IsUniqueViolation(err) {
|
||||
return ErrNodeConflict
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
newKey, err := vfs.keyResolver.Resolve(ctx, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = vfs.blobStore.Move(ctx, oldKey, newKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if vfs.keyResolver.ShouldPersistKey() {
|
||||
node.BlobKey = newKey
|
||||
_, err = db.NewUpdate().Model(node).
|
||||
WherePK().
|
||||
Set("blob_key = ?", newKey).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) AbsolutePath(ctx context.Context, db bun.IDB, node *Node) (string, error) {
|
||||
if !node.IsAccessible() {
|
||||
return "", ErrNodeNotFound
|
||||
}
|
||||
return buildNodeAbsolutePath(ctx, db, node.ID)
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) PermanentlyDeleteNode(ctx context.Context, db bun.IDB, node *Node) error {
|
||||
if !node.IsAccessible() {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
switch node.Kind {
|
||||
case NodeKindFile:
|
||||
return vfs.permanentlyDeleteFileNode(ctx, db, node)
|
||||
case NodeKindDirectory:
|
||||
return vfs.permanentlyDeleteDirectoryNode(ctx, db, node)
|
||||
default:
|
||||
return ErrUnsupportedOperation
|
||||
}
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) permanentlyDeleteFileNode(ctx context.Context, db bun.IDB, node *Node) error {
|
||||
err := vfs.blobStore.Delete(ctx, node.BlobKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = db.NewDelete().Model(node).WherePK().Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) permanentlyDeleteDirectoryNode(ctx context.Context, db bun.IDB, node *Node) error {
|
||||
const descendantsQuery = `WITH RECURSIVE descendants AS (
|
||||
SELECT id, blob_key FROM vfs_nodes WHERE id = ?
|
||||
UNION ALL
|
||||
SELECT n.id, n.blob_key FROM vfs_nodes n
|
||||
JOIN descendants d ON n.parent_id = d.id
|
||||
)
|
||||
SELECT id, blob_key FROM descendants`
|
||||
|
||||
type nodeRecord struct {
|
||||
ID uuid.UUID `bun:"id"`
|
||||
BlobKey blob.Key `bun:"blob_key"`
|
||||
}
|
||||
|
||||
// If db is already a transaction, use it directly; otherwise start a new transaction
|
||||
var tx bun.IDB
|
||||
var startedTx *bun.Tx
|
||||
switch v := db.(type) {
|
||||
case *bun.DB:
|
||||
newTx, err := v.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
startedTx = &newTx
|
||||
tx = newTx
|
||||
defer func() {
|
||||
if startedTx != nil {
|
||||
(*startedTx).Rollback()
|
||||
}
|
||||
}()
|
||||
default:
|
||||
// Assume it's already a transaction
|
||||
tx = db
|
||||
}
|
||||
|
||||
var records []nodeRecord
|
||||
err := tx.NewRaw(descendantsQuery, node.ID).Scan(ctx, &records)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if len(records) == 0 {
|
||||
return ErrNodeNotFound
|
||||
}
|
||||
|
||||
nodeIDs := make([]uuid.UUID, 0, len(records))
|
||||
blobKeys := make([]blob.Key, 0, len(records))
|
||||
for _, r := range records {
|
||||
nodeIDs = append(nodeIDs, r.ID)
|
||||
if !r.BlobKey.IsNil() {
|
||||
blobKeys = append(blobKeys, r.BlobKey)
|
||||
}
|
||||
}
|
||||
|
||||
plan, err := vfs.keyResolver.ResolveDeletionKeys(ctx, node, blobKeys)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = tx.NewDelete().
|
||||
Model((*Node)(nil)).
|
||||
Where("id IN (?)", bun.In(nodeIDs)).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !plan.Prefix.IsNil() {
|
||||
_ = vfs.blobStore.DeletePrefix(ctx, plan.Prefix)
|
||||
} else {
|
||||
for _, key := range plan.Keys {
|
||||
_ = vfs.blobStore.Delete(ctx, key)
|
||||
}
|
||||
}
|
||||
|
||||
// Only commit if we started the transaction
|
||||
if startedTx != nil {
|
||||
err := (*startedTx).Commit()
|
||||
startedTx = nil // Prevent defer from rolling back
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) generatePublicID() (string, error) {
|
||||
var b [8]byte
|
||||
_, err := rand.Read(b[:])
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
n := binary.BigEndian.Uint64(b[:])
|
||||
return vfs.sqid.Encode([]uint64{n})
|
||||
}
|
||||
60
apps/cli/README.md
Normal file
60
apps/cli/README.md
Normal file
@@ -0,0 +1,60 @@
|
||||
# @drexa/cli
|
||||
|
||||
Admin CLI tool for managing Drexa resources.
|
||||
|
||||
## Usage
|
||||
|
||||
From the project root:
|
||||
|
||||
```bash
|
||||
bun drexa <command> [subcommand] [options]
|
||||
```
|
||||
|
||||
## Commands
|
||||
|
||||
### `generate apikey`
|
||||
|
||||
Generate a new API key for authentication.
|
||||
|
||||
```bash
|
||||
bun drexa generate apikey
|
||||
```
|
||||
|
||||
The command will interactively prompt you for (using Node.js readline):
|
||||
- **Prefix**: A short identifier for the key (e.g., 'proxy', 'admin'). Cannot contain dashes.
|
||||
- **Key byte length**: Length of the key in bytes (default: 32)
|
||||
- **Description**: A description of what this key is for
|
||||
- **Expiration date**: Optional expiration date in YYYY-MM-DD format
|
||||
|
||||
The command will output:
|
||||
- **Unhashed key**: Save this securely - it won't be shown again
|
||||
- **Hashed key**: Store this in your database
|
||||
- **Description**: The description you provided
|
||||
- **Expiration date**: When the key expires (if set)
|
||||
|
||||
## Development
|
||||
|
||||
Run the CLI directly:
|
||||
|
||||
```bash
|
||||
bun run apps/cli/index.ts <command>
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
apps/cli/
|
||||
├── index.ts # Main entry point
|
||||
├── prompts.ts # Interactive prompt utilities
|
||||
└── commands/ # Command structure mirrors CLI structure
|
||||
└── generate/
|
||||
├── index.ts # Generate command group
|
||||
└── apikey.ts # API key generation command
|
||||
```
|
||||
|
||||
## Adding New Commands
|
||||
|
||||
1. Create a new directory under `commands/` for command groups
|
||||
2. Create command files following the pattern in `commands/generate/apikey.ts`
|
||||
3. Export commands from an `index.ts` in the command group directory
|
||||
4. Register the command group in the main `index.ts`
|
||||
68
apps/cli/commands/generate/apikey.ts
Normal file
68
apps/cli/commands/generate/apikey.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { generateApiKey, newPrefix } from "@drexa/auth"
|
||||
import chalk from "chalk"
|
||||
import { Command } from "commander"
|
||||
import { promptNumber, promptOptionalDate, promptText } from "../../prompts.ts"
|
||||
|
||||
export const apikeyCommand = new Command("apikey")
|
||||
.description("Generate a new API key")
|
||||
.action(async () => {
|
||||
console.log(chalk.bold.blue("\n🔑 Generate API Key\n"))
|
||||
|
||||
// Prompt for all required information
|
||||
const prefixInput = await promptText(
|
||||
"Enter API key prefix (e.g., 'proxy', 'admin'):",
|
||||
)
|
||||
const prefix = newPrefix(prefixInput)
|
||||
|
||||
if (!prefix) {
|
||||
console.error(
|
||||
chalk.red(
|
||||
'✗ Invalid prefix: cannot contain "-" character. Please use alphanumeric characters only.',
|
||||
),
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const keyByteLength = await promptNumber("Enter key byte length:", 32)
|
||||
const description = await promptText("Enter description:")
|
||||
const expiresAt = await promptOptionalDate("Enter expiration date")
|
||||
|
||||
console.log(chalk.dim("\n⏳ Generating API key...\n"))
|
||||
|
||||
// Generate the API key
|
||||
const result = await generateApiKey({
|
||||
prefix,
|
||||
keyByteLength,
|
||||
description,
|
||||
expiresAt,
|
||||
})
|
||||
|
||||
// Display results
|
||||
console.log(chalk.green.bold("✓ API Key Generated Successfully!\n"))
|
||||
console.log(chalk.gray("─".repeat(60)))
|
||||
console.log(
|
||||
chalk.yellow.bold(
|
||||
"\n⚠️ IMPORTANT: Save the unhashed key now. It won't be shown again!\n",
|
||||
),
|
||||
)
|
||||
console.log(chalk.bold("Unhashed Key ") + chalk.dim("(save this):"))
|
||||
console.log(chalk.green(` ${result.unhashedKey}\n`))
|
||||
console.log(chalk.gray("─".repeat(60)))
|
||||
console.log(
|
||||
chalk.bold("\nHashed Key ") +
|
||||
chalk.dim("(store this in your database):"),
|
||||
)
|
||||
console.log(chalk.dim(` ${result.hashedKey}\n`))
|
||||
console.log(chalk.bold("Description:"))
|
||||
console.log(chalk.white(` ${result.description}\n`))
|
||||
|
||||
if (result.expiresAt) {
|
||||
console.log(chalk.bold("Expires At:"))
|
||||
console.log(chalk.yellow(` ${result.expiresAt.toISOString()}\n`))
|
||||
} else {
|
||||
console.log(chalk.bold("Expires At:"))
|
||||
console.log(chalk.dim(" Never\n"))
|
||||
}
|
||||
|
||||
console.log(chalk.gray("─".repeat(60)) + "\n")
|
||||
})
|
||||
6
apps/cli/commands/generate/index.ts
Normal file
6
apps/cli/commands/generate/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { Command } from "commander"
|
||||
import { apikeyCommand } from "./apikey.ts"
|
||||
|
||||
export const generateCommand = new Command("generate")
|
||||
.description("Generate various resources")
|
||||
.addCommand(apikeyCommand)
|
||||
17
apps/cli/index.ts
Executable file
17
apps/cli/index.ts
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { Command } from "commander"
|
||||
import { generateCommand } from "./commands/generate/index.ts"
|
||||
|
||||
const program = new Command()
|
||||
|
||||
program
|
||||
.name("drexa")
|
||||
.description("Drexa CLI - Admin tools for managing Drexa resources")
|
||||
.version("0.1.0")
|
||||
|
||||
// Register command groups
|
||||
program.addCommand(generateCommand)
|
||||
|
||||
// Parse command line arguments
|
||||
program.parse()
|
||||
23
apps/cli/package.json
Normal file
23
apps/cli/package.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "@drexa/cli",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"drexa": "./index.ts"
|
||||
},
|
||||
"scripts": {
|
||||
"cli": "bun run index.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@drexa/auth": "workspace:*",
|
||||
"chalk": "^5.3.0",
|
||||
"commander": "^12.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
111
apps/cli/prompts.ts
Normal file
111
apps/cli/prompts.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import * as readline from "node:readline/promises"
|
||||
import chalk from "chalk"
|
||||
|
||||
function createReadlineInterface() {
|
||||
return readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
})
|
||||
}
|
||||
|
||||
export async function promptText(message: string): Promise<string> {
|
||||
const rl = createReadlineInterface()
|
||||
try {
|
||||
const input = await rl.question(chalk.cyan(`${message} `))
|
||||
|
||||
if (!input || input.trim() === "") {
|
||||
console.error(chalk.red("✗ Input is required"))
|
||||
process.exit(1)
|
||||
}
|
||||
return input.trim()
|
||||
} finally {
|
||||
rl.close()
|
||||
}
|
||||
}
|
||||
|
||||
export async function promptNumber(
|
||||
message: string,
|
||||
defaultValue?: number,
|
||||
): Promise<number> {
|
||||
const rl = createReadlineInterface()
|
||||
try {
|
||||
const defaultStr = defaultValue
|
||||
? chalk.dim(` (default: ${defaultValue})`)
|
||||
: ""
|
||||
const input = await rl.question(chalk.cyan(`${message}${defaultStr} `))
|
||||
|
||||
if ((!input || input.trim() === "") && defaultValue !== undefined) {
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
if (!input || input.trim() === "") {
|
||||
console.error(chalk.red("✗ Input is required"))
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const num = Number.parseInt(input.trim(), 10)
|
||||
if (Number.isNaN(num) || num <= 0) {
|
||||
console.error(chalk.red("✗ Please enter a valid positive number"))
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
return num
|
||||
} finally {
|
||||
rl.close()
|
||||
}
|
||||
}
|
||||
|
||||
export async function promptOptionalDate(
|
||||
message: string,
|
||||
): Promise<Date | undefined> {
|
||||
const rl = createReadlineInterface()
|
||||
try {
|
||||
const input = await rl.question(
|
||||
chalk.cyan(`${message} `) +
|
||||
chalk.dim("(optional, format: YYYY-MM-DD) "),
|
||||
)
|
||||
|
||||
if (!input || input.trim() === "") {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const date = new Date(input.trim())
|
||||
if (Number.isNaN(date.getTime())) {
|
||||
console.error(
|
||||
chalk.red("✗ Invalid date format. Please use YYYY-MM-DD"),
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (date < new Date()) {
|
||||
console.error(chalk.red("✗ Expiration date must be in the future"))
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
return date
|
||||
} finally {
|
||||
rl.close()
|
||||
}
|
||||
}
|
||||
|
||||
export async function promptConfirm(
|
||||
message: string,
|
||||
defaultValue = false,
|
||||
): Promise<boolean> {
|
||||
const rl = createReadlineInterface()
|
||||
try {
|
||||
const defaultStr = defaultValue
|
||||
? chalk.dim(" (Y/n)")
|
||||
: chalk.dim(" (y/N)")
|
||||
const input = await rl.question(chalk.cyan(`${message}${defaultStr} `))
|
||||
|
||||
if (!input || input.trim() === "") {
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
const normalized = input.toLowerCase().trim()
|
||||
return normalized === "y" || normalized === "yes"
|
||||
} finally {
|
||||
rl.close()
|
||||
}
|
||||
}
|
||||
83
apps/cli/test-example.md
Normal file
83
apps/cli/test-example.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Testing the CLI
|
||||
|
||||
To test the API key generation interactively, run:
|
||||
|
||||
```bash
|
||||
bun drexa generate apikey
|
||||
```
|
||||
|
||||
## Example Session
|
||||
|
||||
The CLI now uses **chalk** for beautiful colored output!
|
||||
|
||||
```
|
||||
$ bun drexa generate apikey
|
||||
|
||||
🔑 Generate API Key
|
||||
|
||||
Enter API key prefix (e.g., 'proxy', 'admin'): testkey
|
||||
Enter key byte length: (default: 32)
|
||||
Enter description: Test API Key for development
|
||||
Enter expiration date (optional, format: YYYY-MM-DD):
|
||||
|
||||
⏳ Generating API key...
|
||||
|
||||
✓ API Key Generated Successfully!
|
||||
|
||||
────────────────────────────────────────────────────────────
|
||||
|
||||
⚠️ IMPORTANT: Save the unhashed key now. It won't be shown again!
|
||||
|
||||
Unhashed Key (save this):
|
||||
sk-testkey-AbCdEfGhIjKlMnOpQrStUvWxYz0123456789
|
||||
|
||||
────────────────────────────────────────────────────────────
|
||||
|
||||
Hashed Key (store this in your database):
|
||||
$argon2id$v=19$m=4,t=3,p=1$...
|
||||
|
||||
Description:
|
||||
Test API Key for development
|
||||
|
||||
Expires At:
|
||||
Never
|
||||
|
||||
────────────────────────────────────────────────────────────
|
||||
```
|
||||
|
||||
### Color Scheme
|
||||
- **Prompts**: Cyan text with dimmed hints
|
||||
- **Success messages**: Green with checkmark
|
||||
- **Warnings**: Yellow with warning icon
|
||||
- **Errors**: Red with X mark
|
||||
- **Important data**: Green (unhashed key), dimmed (hashed key)
|
||||
- **Separators**: Gray lines
|
||||
|
||||
## Testing with Invalid Input
|
||||
|
||||
### Invalid prefix (contains dash)
|
||||
```bash
|
||||
$ bun drexa generate apikey
|
||||
Enter API key prefix (e.g., 'proxy', 'admin'): test-key
|
||||
✗ Invalid prefix: cannot contain "-" character. Please use alphanumeric characters only.
|
||||
```
|
||||
|
||||
### Invalid key byte length
|
||||
```bash
|
||||
$ bun drexa generate apikey
|
||||
Enter API key prefix (e.g., 'proxy', 'admin'): testkey
|
||||
Enter key byte length: (default: 32) -5
|
||||
✗ Please enter a valid positive number
|
||||
```
|
||||
|
||||
### Invalid date format
|
||||
```bash
|
||||
$ bun drexa generate apikey
|
||||
Enter API key prefix (e.g., 'proxy', 'admin'): testkey
|
||||
Enter key byte length: (default: 32)
|
||||
Enter description: Test
|
||||
Enter expiration date (optional, format: YYYY-MM-DD): invalid-date
|
||||
✗ Invalid date format. Please use YYYY-MM-DD
|
||||
```
|
||||
|
||||
All error messages are displayed in red for better visibility.
|
||||
28
apps/cli/tsconfig.json
Normal file
28
apps/cli/tsconfig.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
// Environment setup & latest features
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "Preserve",
|
||||
"moduleDetection": "force",
|
||||
"allowJs": true,
|
||||
|
||||
// Bundler mode
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"noEmit": true,
|
||||
|
||||
// Best practices
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noImplicitOverride": true,
|
||||
|
||||
// Some stricter flags (disabled by default)
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"noPropertyAccessFromIndexSignature": false
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
# this is the url to the convex instance (NOT THE DASHBOARD)
|
||||
VITE_CONVEX_URL=
|
||||
# this is the convex url for invoking http actions
|
||||
VITE_CONVEX_SITE_URL=
|
||||
VITE_CONVEX_SITE_URL=
|
||||
# this is the url to the file proxy
|
||||
FILE_PROXY_URL=
|
||||
@@ -1,39 +1,50 @@
|
||||
import { useRef, type FormEvent } from "react";
|
||||
import { type FormEvent, useRef } from "react"
|
||||
|
||||
export function APITester() {
|
||||
const responseInputRef = useRef<HTMLTextAreaElement>(null);
|
||||
const responseInputRef = useRef<HTMLTextAreaElement>(null)
|
||||
|
||||
const testEndpoint = async (e: FormEvent<HTMLFormElement>) => {
|
||||
e.preventDefault();
|
||||
const testEndpoint = async (e: FormEvent<HTMLFormElement>) => {
|
||||
e.preventDefault()
|
||||
|
||||
try {
|
||||
const form = e.currentTarget;
|
||||
const formData = new FormData(form);
|
||||
const endpoint = formData.get("endpoint") as string;
|
||||
const url = new URL(endpoint, location.href);
|
||||
const method = formData.get("method") as string;
|
||||
const res = await fetch(url, { method });
|
||||
try {
|
||||
const form = e.currentTarget
|
||||
const formData = new FormData(form)
|
||||
const endpoint = formData.get("endpoint") as string
|
||||
const url = new URL(endpoint, location.href)
|
||||
const method = formData.get("method") as string
|
||||
const res = await fetch(url, { method })
|
||||
|
||||
const data = await res.json();
|
||||
responseInputRef.current!.value = JSON.stringify(data, null, 2);
|
||||
} catch (error) {
|
||||
responseInputRef.current!.value = String(error);
|
||||
}
|
||||
};
|
||||
const data = await res.json()
|
||||
responseInputRef.current!.value = JSON.stringify(data, null, 2)
|
||||
} catch (error) {
|
||||
responseInputRef.current!.value = String(error)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="api-tester">
|
||||
<form onSubmit={testEndpoint} className="endpoint-row">
|
||||
<select name="method" className="method">
|
||||
<option value="GET">GET</option>
|
||||
<option value="PUT">PUT</option>
|
||||
</select>
|
||||
<input type="text" name="endpoint" defaultValue="/api/hello" className="url-input" placeholder="/api/hello" />
|
||||
<button type="submit" className="send-button">
|
||||
Send
|
||||
</button>
|
||||
</form>
|
||||
<textarea ref={responseInputRef} readOnly placeholder="Response will appear here..." className="response-area" />
|
||||
</div>
|
||||
);
|
||||
return (
|
||||
<div className="api-tester">
|
||||
<form onSubmit={testEndpoint} className="endpoint-row">
|
||||
<select name="method" className="method">
|
||||
<option value="GET">GET</option>
|
||||
<option value="PUT">PUT</option>
|
||||
</select>
|
||||
<input
|
||||
type="text"
|
||||
name="endpoint"
|
||||
defaultValue="/api/hello"
|
||||
className="url-input"
|
||||
placeholder="/api/hello"
|
||||
/>
|
||||
<button type="submit" className="send-button">
|
||||
Send
|
||||
</button>
|
||||
</form>
|
||||
<textarea
|
||||
ref={responseInputRef}
|
||||
readOnly
|
||||
placeholder="Response will appear here..."
|
||||
className="response-area"
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,92 +1,92 @@
|
||||
import * as React from "react"
|
||||
import type * as React from "react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function Card({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card"
|
||||
className={cn(
|
||||
"bg-card text-card-foreground flex flex-col gap-6 rounded-xl border py-6 shadow-sm",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
data-slot="card"
|
||||
className={cn(
|
||||
"bg-card text-card-foreground flex flex-col gap-6 rounded-xl border py-6 shadow-sm",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardHeader({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-header"
|
||||
className={cn(
|
||||
"@container/card-header grid auto-rows-min grid-rows-[auto_auto] items-start gap-2 px-6 has-data-[slot=card-action]:grid-cols-[1fr_auto] [.border-b]:pb-6",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
data-slot="card-header"
|
||||
className={cn(
|
||||
"@container/card-header grid auto-rows-min grid-rows-[auto_auto] items-start gap-2 px-6 has-data-[slot=card-action]:grid-cols-[1fr_auto] [.border-b]:pb-6",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardTitle({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-title"
|
||||
className={cn("leading-none font-semibold", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
data-slot="card-title"
|
||||
className={cn("leading-none font-semibold", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardDescription({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-description"
|
||||
className={cn("text-muted-foreground text-sm", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
data-slot="card-description"
|
||||
className={cn("text-muted-foreground text-sm", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardAction({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-action"
|
||||
className={cn(
|
||||
"col-start-2 row-span-2 row-start-1 self-start justify-self-end",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
data-slot="card-action"
|
||||
className={cn(
|
||||
"col-start-2 row-span-2 row-start-1 self-start justify-self-end",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardContent({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-content"
|
||||
className={cn("px-6", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
data-slot="card-content"
|
||||
className={cn("px-6", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardFooter({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-footer"
|
||||
className={cn("flex items-center px-6 [.border-t]:pt-6", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
data-slot="card-footer"
|
||||
className={cn("flex items-center px-6 [.border-t]:pt-6", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export {
|
||||
Card,
|
||||
CardHeader,
|
||||
CardFooter,
|
||||
CardTitle,
|
||||
CardAction,
|
||||
CardDescription,
|
||||
CardContent,
|
||||
Card,
|
||||
CardHeader,
|
||||
CardFooter,
|
||||
CardTitle,
|
||||
CardAction,
|
||||
CardDescription,
|
||||
CardContent,
|
||||
}
|
||||
|
||||
@@ -1,242 +1,241 @@
|
||||
import { useMemo } from "react"
|
||||
import { cva, type VariantProps } from "class-variance-authority"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
import { useMemo } from "react"
|
||||
import { Label } from "@/components/ui/label"
|
||||
import { Separator } from "@/components/ui/separator"
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function FieldSet({ className, ...props }: React.ComponentProps<"fieldset">) {
|
||||
return (
|
||||
<fieldset
|
||||
data-slot="field-set"
|
||||
className={cn(
|
||||
"flex flex-col gap-6",
|
||||
"has-[>[data-slot=checkbox-group]]:gap-3 has-[>[data-slot=radio-group]]:gap-3",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<fieldset
|
||||
data-slot="field-set"
|
||||
className={cn(
|
||||
"flex flex-col gap-6",
|
||||
"has-[>[data-slot=checkbox-group]]:gap-3 has-[>[data-slot=radio-group]]:gap-3",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function FieldLegend({
|
||||
className,
|
||||
variant = "legend",
|
||||
...props
|
||||
className,
|
||||
variant = "legend",
|
||||
...props
|
||||
}: React.ComponentProps<"legend"> & { variant?: "legend" | "label" }) {
|
||||
return (
|
||||
<legend
|
||||
data-slot="field-legend"
|
||||
data-variant={variant}
|
||||
className={cn(
|
||||
"mb-3 font-medium",
|
||||
"data-[variant=legend]:text-base",
|
||||
"data-[variant=label]:text-sm",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<legend
|
||||
data-slot="field-legend"
|
||||
data-variant={variant}
|
||||
className={cn(
|
||||
"mb-3 font-medium",
|
||||
"data-[variant=legend]:text-base",
|
||||
"data-[variant=label]:text-sm",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function FieldGroup({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="field-group"
|
||||
className={cn(
|
||||
"group/field-group @container/field-group flex w-full flex-col gap-7 data-[slot=checkbox-group]:gap-3 [&>[data-slot=field-group]]:gap-4",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
data-slot="field-group"
|
||||
className={cn(
|
||||
"group/field-group @container/field-group flex w-full flex-col gap-7 data-[slot=checkbox-group]:gap-3 [&>[data-slot=field-group]]:gap-4",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
const fieldVariants = cva(
|
||||
"group/field flex w-full gap-3 data-[invalid=true]:text-destructive",
|
||||
{
|
||||
variants: {
|
||||
orientation: {
|
||||
vertical: ["flex-col [&>*]:w-full [&>.sr-only]:w-auto"],
|
||||
horizontal: [
|
||||
"flex-row items-center",
|
||||
"[&>[data-slot=field-label]]:flex-auto",
|
||||
"has-[>[data-slot=field-content]]:items-start has-[>[data-slot=field-content]]:[&>[role=checkbox],[role=radio]]:mt-px",
|
||||
],
|
||||
responsive: [
|
||||
"flex-col [&>*]:w-full [&>.sr-only]:w-auto @md/field-group:flex-row @md/field-group:items-center @md/field-group:[&>*]:w-auto",
|
||||
"@md/field-group:[&>[data-slot=field-label]]:flex-auto",
|
||||
"@md/field-group:has-[>[data-slot=field-content]]:items-start @md/field-group:has-[>[data-slot=field-content]]:[&>[role=checkbox],[role=radio]]:mt-px",
|
||||
],
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
orientation: "vertical",
|
||||
},
|
||||
}
|
||||
"group/field flex w-full gap-3 data-[invalid=true]:text-destructive",
|
||||
{
|
||||
variants: {
|
||||
orientation: {
|
||||
vertical: ["flex-col [&>*]:w-full [&>.sr-only]:w-auto"],
|
||||
horizontal: [
|
||||
"flex-row items-center",
|
||||
"[&>[data-slot=field-label]]:flex-auto",
|
||||
"has-[>[data-slot=field-content]]:items-start has-[>[data-slot=field-content]]:[&>[role=checkbox],[role=radio]]:mt-px",
|
||||
],
|
||||
responsive: [
|
||||
"flex-col [&>*]:w-full [&>.sr-only]:w-auto @md/field-group:flex-row @md/field-group:items-center @md/field-group:[&>*]:w-auto",
|
||||
"@md/field-group:[&>[data-slot=field-label]]:flex-auto",
|
||||
"@md/field-group:has-[>[data-slot=field-content]]:items-start @md/field-group:has-[>[data-slot=field-content]]:[&>[role=checkbox],[role=radio]]:mt-px",
|
||||
],
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
orientation: "vertical",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
function Field({
|
||||
className,
|
||||
orientation = "vertical",
|
||||
...props
|
||||
className,
|
||||
orientation = "vertical",
|
||||
...props
|
||||
}: React.ComponentProps<"div"> & VariantProps<typeof fieldVariants>) {
|
||||
return (
|
||||
<div
|
||||
role="group"
|
||||
data-slot="field"
|
||||
data-orientation={orientation}
|
||||
className={cn(fieldVariants({ orientation }), className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
role="group"
|
||||
data-slot="field"
|
||||
data-orientation={orientation}
|
||||
className={cn(fieldVariants({ orientation }), className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function FieldContent({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="field-content"
|
||||
className={cn(
|
||||
"group/field-content flex flex-1 flex-col gap-1.5 leading-snug",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
data-slot="field-content"
|
||||
className={cn(
|
||||
"group/field-content flex flex-1 flex-col gap-1.5 leading-snug",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function FieldLabel({
|
||||
className,
|
||||
...props
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof Label>) {
|
||||
return (
|
||||
<Label
|
||||
data-slot="field-label"
|
||||
className={cn(
|
||||
"group/field-label peer/field-label flex w-fit gap-2 leading-snug group-data-[disabled=true]/field:opacity-50",
|
||||
"has-[>[data-slot=field]]:w-full has-[>[data-slot=field]]:flex-col has-[>[data-slot=field]]:rounded-md has-[>[data-slot=field]]:border [&>*]:data-[slot=field]:p-4",
|
||||
"has-data-[state=checked]:bg-primary/5 has-data-[state=checked]:border-primary dark:has-data-[state=checked]:bg-primary/10",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<Label
|
||||
data-slot="field-label"
|
||||
className={cn(
|
||||
"group/field-label peer/field-label flex w-fit gap-2 leading-snug group-data-[disabled=true]/field:opacity-50",
|
||||
"has-[>[data-slot=field]]:w-full has-[>[data-slot=field]]:flex-col has-[>[data-slot=field]]:rounded-md has-[>[data-slot=field]]:border [&>*]:data-[slot=field]:p-4",
|
||||
"has-data-[state=checked]:bg-primary/5 has-data-[state=checked]:border-primary dark:has-data-[state=checked]:bg-primary/10",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function FieldTitle({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="field-label"
|
||||
className={cn(
|
||||
"flex w-fit items-center gap-2 text-sm leading-snug font-medium group-data-[disabled=true]/field:opacity-50",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
data-slot="field-label"
|
||||
className={cn(
|
||||
"flex w-fit items-center gap-2 text-sm leading-snug font-medium group-data-[disabled=true]/field:opacity-50",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function FieldDescription({ className, ...props }: React.ComponentProps<"p">) {
|
||||
return (
|
||||
<p
|
||||
data-slot="field-description"
|
||||
className={cn(
|
||||
"text-muted-foreground text-sm leading-normal font-normal group-has-[[data-orientation=horizontal]]/field:text-balance",
|
||||
"last:mt-0 nth-last-2:-mt-1 [[data-variant=legend]+&]:-mt-1.5",
|
||||
"[&>a:hover]:text-primary [&>a]:underline [&>a]:underline-offset-4",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<p
|
||||
data-slot="field-description"
|
||||
className={cn(
|
||||
"text-muted-foreground text-sm leading-normal font-normal group-has-[[data-orientation=horizontal]]/field:text-balance",
|
||||
"last:mt-0 nth-last-2:-mt-1 [[data-variant=legend]+&]:-mt-1.5",
|
||||
"[&>a:hover]:text-primary [&>a]:underline [&>a]:underline-offset-4",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function FieldSeparator({
|
||||
children,
|
||||
className,
|
||||
...props
|
||||
children,
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<"div"> & {
|
||||
children?: React.ReactNode
|
||||
children?: React.ReactNode
|
||||
}) {
|
||||
return (
|
||||
<div
|
||||
data-slot="field-separator"
|
||||
data-content={!!children}
|
||||
className={cn(
|
||||
"relative -my-2 h-5 text-sm group-data-[variant=outline]/field-group:-mb-2",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<Separator className="absolute inset-0 top-1/2" />
|
||||
{children && (
|
||||
<span
|
||||
className="bg-background text-muted-foreground relative mx-auto block w-fit px-2"
|
||||
data-slot="field-separator-content"
|
||||
>
|
||||
{children}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
data-slot="field-separator"
|
||||
data-content={!!children}
|
||||
className={cn(
|
||||
"relative -my-2 h-5 text-sm group-data-[variant=outline]/field-group:-mb-2",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<Separator className="absolute inset-0 top-1/2" />
|
||||
{children && (
|
||||
<span
|
||||
className="bg-background text-muted-foreground relative mx-auto block w-fit px-2"
|
||||
data-slot="field-separator-content"
|
||||
>
|
||||
{children}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
function FieldError({
|
||||
className,
|
||||
children,
|
||||
errors,
|
||||
...props
|
||||
className,
|
||||
children,
|
||||
errors,
|
||||
...props
|
||||
}: React.ComponentProps<"div"> & {
|
||||
errors?: Array<{ message?: string } | undefined>
|
||||
errors?: Array<{ message?: string } | undefined>
|
||||
}) {
|
||||
const content = useMemo(() => {
|
||||
if (children) {
|
||||
return children
|
||||
}
|
||||
const content = useMemo(() => {
|
||||
if (children) {
|
||||
return children
|
||||
}
|
||||
|
||||
if (!errors) {
|
||||
return null
|
||||
}
|
||||
if (!errors) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (errors?.length === 1 && errors[0]?.message) {
|
||||
return errors[0].message
|
||||
}
|
||||
if (errors?.length === 1 && errors[0]?.message) {
|
||||
return errors[0].message
|
||||
}
|
||||
|
||||
return (
|
||||
<ul className="ml-4 flex list-disc flex-col gap-1">
|
||||
{errors.map(
|
||||
(error, index) =>
|
||||
error?.message && <li key={index}>{error.message}</li>
|
||||
)}
|
||||
</ul>
|
||||
)
|
||||
}, [children, errors])
|
||||
return (
|
||||
<ul className="ml-4 flex list-disc flex-col gap-1">
|
||||
{errors.map(
|
||||
(error, index) =>
|
||||
error?.message && <li key={index}>{error.message}</li>,
|
||||
)}
|
||||
</ul>
|
||||
)
|
||||
}, [children, errors])
|
||||
|
||||
if (!content) {
|
||||
return null
|
||||
}
|
||||
if (!content) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
role="alert"
|
||||
data-slot="field-error"
|
||||
className={cn("text-destructive text-sm font-normal", className)}
|
||||
{...props}
|
||||
>
|
||||
{content}
|
||||
</div>
|
||||
)
|
||||
return (
|
||||
<div
|
||||
role="alert"
|
||||
data-slot="field-error"
|
||||
className={cn("text-destructive text-sm font-normal", className)}
|
||||
{...props}
|
||||
>
|
||||
{content}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export {
|
||||
Field,
|
||||
FieldLabel,
|
||||
FieldDescription,
|
||||
FieldError,
|
||||
FieldGroup,
|
||||
FieldLegend,
|
||||
FieldSeparator,
|
||||
FieldSet,
|
||||
FieldContent,
|
||||
FieldTitle,
|
||||
Field,
|
||||
FieldLabel,
|
||||
FieldDescription,
|
||||
FieldError,
|
||||
FieldGroup,
|
||||
FieldLegend,
|
||||
FieldSeparator,
|
||||
FieldSet,
|
||||
FieldContent,
|
||||
FieldTitle,
|
||||
}
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
import * as React from "react"
|
||||
import * as LabelPrimitive from "@radix-ui/react-label"
|
||||
import type * as React from "react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function Label({
|
||||
className,
|
||||
...props
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof LabelPrimitive.Root>) {
|
||||
return (
|
||||
<LabelPrimitive.Root
|
||||
data-slot="label"
|
||||
className={cn(
|
||||
"flex items-center gap-2 text-sm leading-none font-medium select-none group-data-[disabled=true]:pointer-events-none group-data-[disabled=true]:opacity-50 peer-disabled:cursor-not-allowed peer-disabled:opacity-50",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<LabelPrimitive.Root
|
||||
data-slot="label"
|
||||
className={cn(
|
||||
"flex items-center gap-2 text-sm leading-none font-medium select-none group-data-[disabled=true]:pointer-events-none group-data-[disabled=true]:opacity-50 peer-disabled:cursor-not-allowed peer-disabled:opacity-50",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export { Label }
|
||||
|
||||
21
apps/drive-web/src/components/ui/middle-truncated-text.tsx
Normal file
21
apps/drive-web/src/components/ui/middle-truncated-text.tsx
Normal file
@@ -0,0 +1,21 @@
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function MiddleTruncatedText({
|
||||
children,
|
||||
className,
|
||||
}: {
|
||||
children: string
|
||||
className?: string
|
||||
}) {
|
||||
const LAST_PART_LENGTH = 3
|
||||
const lastPart = children.slice(children.length - LAST_PART_LENGTH)
|
||||
const firstPart = children.slice(0, children.length - LAST_PART_LENGTH)
|
||||
return (
|
||||
<p className={cn("max-w-full flex", className)}>
|
||||
<span className="flex-1 truncate">{firstPart}</span>
|
||||
<span className="w-min">{lastPart}</span>
|
||||
</p>
|
||||
)
|
||||
}
|
||||
|
||||
export { MiddleTruncatedText }
|
||||
@@ -1,29 +1,29 @@
|
||||
import * as React from "react"
|
||||
import * as ProgressPrimitive from "@radix-ui/react-progress"
|
||||
import type * as React from "react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function Progress({
|
||||
className,
|
||||
value,
|
||||
...props
|
||||
className,
|
||||
value,
|
||||
...props
|
||||
}: React.ComponentProps<typeof ProgressPrimitive.Root>) {
|
||||
return (
|
||||
<ProgressPrimitive.Root
|
||||
data-slot="progress"
|
||||
className={cn(
|
||||
"bg-primary/20 relative h-2 w-full overflow-hidden rounded-full",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<ProgressPrimitive.Indicator
|
||||
data-slot="progress-indicator"
|
||||
className="bg-primary h-full w-full flex-1 transition-all"
|
||||
style={{ transform: `translateX(-${100 - (value || 0)}%)` }}
|
||||
/>
|
||||
</ProgressPrimitive.Root>
|
||||
)
|
||||
return (
|
||||
<ProgressPrimitive.Root
|
||||
data-slot="progress"
|
||||
className={cn(
|
||||
"bg-primary/20 relative h-2 w-full overflow-hidden rounded-full",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<ProgressPrimitive.Indicator
|
||||
data-slot="progress-indicator"
|
||||
className="bg-primary h-full w-full flex-1 transition-all"
|
||||
style={{ transform: `translateX(-${100 - (value || 0)}%)` }}
|
||||
/>
|
||||
</ProgressPrimitive.Root>
|
||||
)
|
||||
}
|
||||
|
||||
export { Progress }
|
||||
|
||||
@@ -1,28 +1,28 @@
|
||||
"use client"
|
||||
|
||||
import * as React from "react"
|
||||
import * as SeparatorPrimitive from "@radix-ui/react-separator"
|
||||
import type * as React from "react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function Separator({
|
||||
className,
|
||||
orientation = "horizontal",
|
||||
decorative = true,
|
||||
...props
|
||||
className,
|
||||
orientation = "horizontal",
|
||||
decorative = true,
|
||||
...props
|
||||
}: React.ComponentProps<typeof SeparatorPrimitive.Root>) {
|
||||
return (
|
||||
<SeparatorPrimitive.Root
|
||||
data-slot="separator"
|
||||
decorative={decorative}
|
||||
orientation={orientation}
|
||||
className={cn(
|
||||
"bg-border shrink-0 data-[orientation=horizontal]:h-px data-[orientation=horizontal]:w-full data-[orientation=vertical]:h-full data-[orientation=vertical]:w-px",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
return (
|
||||
<SeparatorPrimitive.Root
|
||||
data-slot="separator"
|
||||
decorative={decorative}
|
||||
orientation={orientation}
|
||||
className={cn(
|
||||
"bg-border shrink-0 data-[orientation=horizontal]:h-px data-[orientation=horizontal]:w-full data-[orientation=vertical]:h-full data-[orientation=vertical]:w-px",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export { Separator }
|
||||
|
||||
@@ -1,15 +1,25 @@
|
||||
import { api } from "@fileone/convex/api"
|
||||
import { Link, useLocation } from "@tanstack/react-router"
|
||||
import { useQuery as useConvexQuery } from "convex/react"
|
||||
import { useAtomValue } from "jotai"
|
||||
import { newDirectoryHandle } from "@fileone/convex/filesystem"
|
||||
import { useMutation } from "@tanstack/react-query"
|
||||
import { Link, useLocation, useParams } from "@tanstack/react-router"
|
||||
import {
|
||||
useMutation as useConvexMutation,
|
||||
useQuery as useConvexQuery,
|
||||
} from "convex/react"
|
||||
import { useAtomValue, useSetAtom, useStore } from "jotai"
|
||||
import {
|
||||
CircleXIcon,
|
||||
ClockIcon,
|
||||
FilesIcon,
|
||||
HomeIcon,
|
||||
FolderInputIcon,
|
||||
LogOutIcon,
|
||||
ScissorsIcon,
|
||||
SettingsIcon,
|
||||
TrashIcon,
|
||||
User2Icon,
|
||||
} from "lucide-react"
|
||||
import { toast } from "sonner"
|
||||
import { Card, CardFooter, CardHeader, CardTitle } from "@/components/ui/card"
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
@@ -26,7 +36,10 @@ import {
|
||||
SidebarMenuButton,
|
||||
SidebarMenuItem,
|
||||
} from "@/components/ui/sidebar"
|
||||
import { formatError } from "@/lib/error"
|
||||
import { Button } from "../components/ui/button"
|
||||
import { LoadingSpinner } from "../components/ui/loading-spinner"
|
||||
import { clearCutItemsAtom, cutHandlesAtom } from "../files/store"
|
||||
import { backgroundTaskProgressAtom } from "./state"
|
||||
|
||||
export function DashboardSidebar() {
|
||||
@@ -46,6 +59,7 @@ export function DashboardSidebar() {
|
||||
</SidebarContent>
|
||||
<SidebarFooter>
|
||||
<SidebarMenu>
|
||||
<CutItemsCard />
|
||||
<BackgroundTaskProgressItem />
|
||||
</SidebarMenu>
|
||||
</SidebarFooter>
|
||||
@@ -66,10 +80,10 @@ function MainSidebarMenu() {
|
||||
return (
|
||||
<SidebarMenu>
|
||||
<SidebarMenuItem>
|
||||
<SidebarMenuButton asChild isActive={isActive("/")}>
|
||||
<Link to="/">
|
||||
<HomeIcon />
|
||||
<span>Home</span>
|
||||
<SidebarMenuButton asChild isActive={isActive("/recent")}>
|
||||
<Link to="/recent">
|
||||
<ClockIcon />
|
||||
<span>Recent</span>
|
||||
</Link>
|
||||
</SidebarMenuButton>
|
||||
</SidebarMenuItem>
|
||||
@@ -134,6 +148,93 @@ function BackgroundTaskProgressItem() {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Displays the number of cut items and allows the user to perform actions on them, such as moving them to a target directory.
|
||||
* Visible when there are cut items.
|
||||
*/
|
||||
function CutItemsCard() {
|
||||
const { directoryId } = useParams({ strict: false })
|
||||
const cutHandles = useAtomValue(cutHandlesAtom)
|
||||
const clearCutItems = useSetAtom(clearCutItemsAtom)
|
||||
const setCutHandles = useSetAtom(cutHandlesAtom)
|
||||
const setBackgroundTaskProgress = useSetAtom(backgroundTaskProgressAtom)
|
||||
const store = useStore()
|
||||
|
||||
const _moveItems = useConvexMutation(api.filesystem.moveItems)
|
||||
const { mutate: moveItems } = useMutation({
|
||||
mutationFn: _moveItems,
|
||||
onMutate: () => {
|
||||
setBackgroundTaskProgress({
|
||||
label: "Moving items…",
|
||||
})
|
||||
const cutHandles = store.get(cutHandlesAtom)
|
||||
clearCutItems()
|
||||
return { cutHandles }
|
||||
},
|
||||
onError: (error, _variables, context) => {
|
||||
if (context?.cutHandles) {
|
||||
setCutHandles(context.cutHandles)
|
||||
}
|
||||
toast.error("Failed to move items", {
|
||||
description: formatError(error),
|
||||
})
|
||||
},
|
||||
onSuccess: () => {
|
||||
toast.success("Items moved")
|
||||
},
|
||||
onSettled: () => {
|
||||
setBackgroundTaskProgress(null)
|
||||
},
|
||||
})
|
||||
|
||||
if (cutHandles.length === 0) return null
|
||||
|
||||
const moveCutItems = () => {
|
||||
if (directoryId) {
|
||||
moveItems({
|
||||
targetDirectory: newDirectoryHandle(directoryId),
|
||||
items: cutHandles,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<SidebarMenuItem>
|
||||
<Card className="p-0 gap-0 rounded-md overflow-clip">
|
||||
<CardHeader className="px-3.5 py-1.5! gap-0 border-b border-b-primary-foreground/10 bg-primary text-primary-foreground">
|
||||
<CardTitle className="p-0 m-0 text-xs uppercase">
|
||||
<div className="flex items-center gap-1.5">
|
||||
<ScissorsIcon size={16} /> {cutHandles.length} Cut
|
||||
Items
|
||||
</div>
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<CardFooter className="p-1 flex flex-col">
|
||||
<Button
|
||||
size="sm"
|
||||
variant="ghost"
|
||||
className="w-full justify-start transition-none"
|
||||
disabled={!directoryId}
|
||||
onClick={moveCutItems}
|
||||
>
|
||||
<FolderInputIcon size={16} />
|
||||
Move items here
|
||||
</Button>
|
||||
<Button
|
||||
size="sm"
|
||||
variant="ghost"
|
||||
className="w-full justify-start transition-none"
|
||||
onClick={() => clearCutItems()}
|
||||
>
|
||||
<CircleXIcon size={16} />
|
||||
Clear
|
||||
</Button>
|
||||
</CardFooter>
|
||||
</Card>
|
||||
</SidebarMenuItem>
|
||||
)
|
||||
}
|
||||
|
||||
function UserMenu() {
|
||||
function handleSignOut() {}
|
||||
|
||||
|
||||
@@ -113,4 +113,4 @@ function RenameMenuItem() {
|
||||
Rename
|
||||
</ContextMenuItem>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
84
apps/drive-web/src/files/file-grid.tsx
Normal file
84
apps/drive-web/src/files/file-grid.tsx
Normal file
@@ -0,0 +1,84 @@
|
||||
import type { Doc, Id } from "@fileone/convex/dataModel"
|
||||
import { memo, useCallback } from "react"
|
||||
import { TextFileIcon } from "@/components/icons/text-file-icon"
|
||||
import { MiddleTruncatedText } from "@/components/ui/middle-truncated-text"
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
export type FileGridSelection = Set<Id<"files">>
|
||||
|
||||
export function FileGrid({
|
||||
files,
|
||||
selectedFiles = new Set(),
|
||||
onSelectionChange,
|
||||
onContextMenu,
|
||||
}: {
|
||||
files: Doc<"files">[]
|
||||
selectedFiles?: FileGridSelection
|
||||
onSelectionChange?: (selection: FileGridSelection) => void
|
||||
onContextMenu?: (file: Doc<"files">, event: React.MouseEvent) => void
|
||||
}) {
|
||||
const onItemSelect = useCallback(
|
||||
(file: Doc<"files">) => {
|
||||
onSelectionChange?.(new Set([file._id]))
|
||||
},
|
||||
[onSelectionChange],
|
||||
)
|
||||
|
||||
const onItemContextMenu = useCallback(
|
||||
(file: Doc<"files">, event: React.MouseEvent) => {
|
||||
onContextMenu?.(file, event)
|
||||
onSelectionChange?.(new Set([file._id]))
|
||||
},
|
||||
[onContextMenu, onSelectionChange],
|
||||
)
|
||||
|
||||
return (
|
||||
<div className="grid auto-cols-max grid-flow-col gap-3">
|
||||
{files.map((file) => (
|
||||
<FileGridItem
|
||||
selected={selectedFiles.has(file._id)}
|
||||
key={file._id}
|
||||
file={file}
|
||||
onSelect={onItemSelect}
|
||||
onContextMenu={onItemContextMenu}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const FileGridItem = memo(function FileGridItem({
|
||||
selected,
|
||||
file,
|
||||
onSelect,
|
||||
onContextMenu,
|
||||
}: {
|
||||
selected: boolean
|
||||
file: Doc<"files">
|
||||
onSelect?: (file: Doc<"files">) => void
|
||||
onContextMenu?: (file: Doc<"files">, event: React.MouseEvent) => void
|
||||
}) {
|
||||
return (
|
||||
<button
|
||||
type="button"
|
||||
key={file._id}
|
||||
className={cn(
|
||||
"flex flex-col gap-2 items-center justify-center w-24 p-[calc(var(--spacing)*1+1px)] rounded-md",
|
||||
{ "bg-muted border border-border p-1": selected },
|
||||
)}
|
||||
onClick={() => {
|
||||
onSelect?.(file)
|
||||
}}
|
||||
onContextMenu={(event) => {
|
||||
onContextMenu?.(file, event)
|
||||
}}
|
||||
>
|
||||
<TextFileIcon className="size-10" />
|
||||
<MiddleTruncatedText className="text-sm">
|
||||
{file.name}
|
||||
</MiddleTruncatedText>
|
||||
</button>
|
||||
)
|
||||
})
|
||||
|
||||
export { FileGridItem }
|
||||
@@ -1,20 +1,20 @@
|
||||
import type { Doc } from "@fileone/convex/dataModel"
|
||||
import type { OpenedFile } from "@fileone/convex/filesystem"
|
||||
import { ImagePreviewDialog } from "./image-preview-dialog"
|
||||
|
||||
export function FilePreviewDialog({
|
||||
file,
|
||||
openedFile,
|
||||
onClose,
|
||||
}: {
|
||||
file: Doc<"files">
|
||||
openedFile: OpenedFile
|
||||
onClose: () => void
|
||||
}) {
|
||||
if (!file) return null
|
||||
|
||||
switch (file.mimeType) {
|
||||
switch (openedFile.file.mimeType) {
|
||||
case "image/jpeg":
|
||||
case "image/png":
|
||||
case "image/gif":
|
||||
return <ImagePreviewDialog file={file} onClose={onClose} />
|
||||
return (
|
||||
<ImagePreviewDialog openedFile={openedFile} onClose={onClose} />
|
||||
)
|
||||
default:
|
||||
return null
|
||||
}
|
||||
|
||||
3
apps/drive-web/src/files/file-share.ts
Normal file
3
apps/drive-web/src/files/file-share.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function fileShareUrl(shareToken: string) {
|
||||
return `${import.meta.env.VITE_FILE_PROXY_URL}/files/${shareToken}`
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
import { api } from "@fileone/convex/api"
|
||||
import type { Doc } from "@fileone/convex/dataModel"
|
||||
import type { OpenedFile } from "@fileone/convex/filesystem"
|
||||
import { DialogTitle } from "@radix-ui/react-dialog"
|
||||
import { useQuery as useConvexQuery } from "convex/react"
|
||||
import { atom, useAtom, useAtomValue, useSetAtom } from "jotai"
|
||||
import {
|
||||
DownloadIcon,
|
||||
@@ -18,9 +16,8 @@ import {
|
||||
DialogClose,
|
||||
DialogContent,
|
||||
DialogHeader,
|
||||
DialogOverlay,
|
||||
} from "@/components/ui/dialog"
|
||||
import { LoadingSpinner } from "@/components/ui/loading-spinner"
|
||||
import { fileShareUrl } from "./file-share"
|
||||
|
||||
const zoomLevelAtom = atom(
|
||||
1,
|
||||
@@ -35,15 +32,12 @@ const zoomLevelAtom = atom(
|
||||
)
|
||||
|
||||
export function ImagePreviewDialog({
|
||||
file,
|
||||
openedFile,
|
||||
onClose,
|
||||
}: {
|
||||
file: Doc<"files">
|
||||
openedFile: OpenedFile
|
||||
onClose: () => void
|
||||
}) {
|
||||
const fileUrl = useConvexQuery(api.filesystem.fetchFileUrl, {
|
||||
fileId: file._id,
|
||||
})
|
||||
const setZoomLevel = useSetAtom(zoomLevelAtom)
|
||||
|
||||
useEffect(
|
||||
@@ -62,23 +56,12 @@ export function ImagePreviewDialog({
|
||||
}
|
||||
}}
|
||||
>
|
||||
<DialogOverlay className="flex items-center justify-center">
|
||||
{!fileUrl ? (
|
||||
<LoadingSpinner className="text-neutral-200 size-10" />
|
||||
) : null}
|
||||
</DialogOverlay>
|
||||
{fileUrl ? <PreviewContent fileUrl={fileUrl} file={file} /> : null}
|
||||
<PreviewContent openedFile={openedFile} />
|
||||
</Dialog>
|
||||
)
|
||||
}
|
||||
|
||||
function PreviewContent({
|
||||
fileUrl,
|
||||
file,
|
||||
}: {
|
||||
fileUrl: string
|
||||
file: Doc<"files">
|
||||
}) {
|
||||
function PreviewContent({ openedFile }: { openedFile: OpenedFile }) {
|
||||
return (
|
||||
<DialogContent
|
||||
showCloseButton={false}
|
||||
@@ -86,10 +69,10 @@ function PreviewContent({
|
||||
>
|
||||
<DialogHeader className="overflow-auto border-b border-b-border p-4 flex flex-row items-center justify-between">
|
||||
<DialogTitle className="truncate flex-1">
|
||||
{file.name}
|
||||
{openedFile.file.name}
|
||||
</DialogTitle>
|
||||
<div className="flex flex-row items-center space-x-2">
|
||||
<Toolbar fileUrl={fileUrl} file={file} />
|
||||
<Toolbar openedFile={openedFile} />
|
||||
<Button variant="ghost" size="icon" asChild>
|
||||
<DialogClose>
|
||||
<XIcon />
|
||||
@@ -99,13 +82,13 @@ function PreviewContent({
|
||||
</div>
|
||||
</DialogHeader>
|
||||
<div className="w-full h-full flex items-center justify-center max-h-[calc(100vh-10rem)] overflow-auto">
|
||||
<ImagePreview fileUrl={fileUrl} file={file} />
|
||||
<ImagePreview openedFile={openedFile} />
|
||||
</div>
|
||||
</DialogContent>
|
||||
)
|
||||
}
|
||||
|
||||
function Toolbar({ fileUrl, file }: { fileUrl: string; file: Doc<"files"> }) {
|
||||
function Toolbar({ openedFile }: { openedFile: OpenedFile }) {
|
||||
const setZoomLevel = useSetAtom(zoomLevelAtom)
|
||||
const zoomInterval = useRef<ReturnType<typeof setInterval> | null>(null)
|
||||
|
||||
@@ -159,8 +142,8 @@ function Toolbar({ fileUrl, file }: { fileUrl: string; file: Doc<"files"> }) {
|
||||
</Button>
|
||||
<Button asChild>
|
||||
<a
|
||||
href={fileUrl}
|
||||
download={file.name}
|
||||
href={fileShareUrl(openedFile.shareToken)}
|
||||
download={openedFile.file.name}
|
||||
target="_blank"
|
||||
className="flex flex-row items-center"
|
||||
>
|
||||
@@ -191,18 +174,12 @@ function ResetZoomButton() {
|
||||
)
|
||||
}
|
||||
|
||||
function ImagePreview({
|
||||
fileUrl,
|
||||
file,
|
||||
}: {
|
||||
fileUrl: string
|
||||
file: Doc<"files">
|
||||
}) {
|
||||
function ImagePreview({ openedFile }: { openedFile: OpenedFile }) {
|
||||
const zoomLevel = useAtomValue(zoomLevelAtom)
|
||||
return (
|
||||
<img
|
||||
src={fileUrl}
|
||||
alt={file.name}
|
||||
src={fileShareUrl(openedFile.shareToken)}
|
||||
alt={openedFile.file.name}
|
||||
className="object-contain"
|
||||
style={{ transform: `scale(${zoomLevel})` }}
|
||||
/>
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import type { Id } from "@fileone/convex/dataModel"
|
||||
import type {
|
||||
DirectoryItem,
|
||||
DirectoryItemKind,
|
||||
} from "@fileone/convex/types"
|
||||
import type { DirectoryItem, DirectoryItemKind } from "@fileone/convex/types"
|
||||
import type { RowSelectionState } from "@tanstack/react-table"
|
||||
import { atom } from "jotai"
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { FileSystemHandle } from "@fileone/convex/filesystem"
|
||||
import { atom } from "jotai"
|
||||
import { atomFamily } from "jotai/utils"
|
||||
|
||||
@@ -53,12 +54,9 @@ export const clearFileUploadStatusesAtom = atom(
|
||||
},
|
||||
)
|
||||
|
||||
export const clearAllFileUploadStatusesAtom = atom(
|
||||
null,
|
||||
(get, set) => {
|
||||
set(fileUploadStatusesAtom, {})
|
||||
},
|
||||
)
|
||||
export const clearAllFileUploadStatusesAtom = atom(null, (_, set) => {
|
||||
set(fileUploadStatusesAtom, {})
|
||||
})
|
||||
|
||||
export const fileUploadCountAtom = atom(
|
||||
(get) => Object.keys(get(fileUploadStatusesAtom)).length,
|
||||
@@ -95,3 +93,8 @@ export const hasFileUploadsErrorAtom = atom((get) => {
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
export const cutHandlesAtom = atom<FileSystemHandle[]>([])
|
||||
export const clearCutItemsAtom = atom(null, (_, set) => {
|
||||
set(cutHandlesAtom, [])
|
||||
})
|
||||
|
||||
@@ -88,7 +88,6 @@ function useUploadFilesAtom({
|
||||
)
|
||||
},
|
||||
}).catch((error) => {
|
||||
console.log("error", error)
|
||||
store.set(
|
||||
fileUploadStatusAtomFamily(pickedFile.id),
|
||||
{
|
||||
@@ -130,6 +129,9 @@ function useUploadFilesAtom({
|
||||
toast.success("All files uploaded successfully")
|
||||
}
|
||||
},
|
||||
onError: (error) => {
|
||||
toast.error(formatError(error))
|
||||
},
|
||||
}),
|
||||
[uploadFile, store.set],
|
||||
)
|
||||
@@ -270,6 +272,7 @@ export function UploadFileDialog({
|
||||
onClick={openFilePicker}
|
||||
uploadFilesAtom={uploadFilesAtom}
|
||||
/>
|
||||
<ClearUploadErrorsButton />
|
||||
<UploadButton
|
||||
uploadFilesAtom={uploadFilesAtom}
|
||||
onClick={onUploadButtonClick}
|
||||
@@ -373,10 +376,10 @@ function SelectMoreFilesButton({
|
||||
uploadFilesAtom: UploadFilesAtom
|
||||
}) {
|
||||
const pickedFiles = useAtomValue(pickedFilesAtom)
|
||||
const { data: uploadResults, isPending: isUploading } =
|
||||
useAtomValue(uploadFilesAtom)
|
||||
const fileUploadCount = useAtomValue(fileUploadCountAtom)
|
||||
const { isPending: isUploading } = useAtomValue(uploadFilesAtom)
|
||||
|
||||
if (pickedFiles.length === 0 || uploadResults) {
|
||||
if (pickedFiles.length === 0 || fileUploadCount > 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -387,6 +390,29 @@ function SelectMoreFilesButton({
|
||||
)
|
||||
}
|
||||
|
||||
function ClearUploadErrorsButton() {
|
||||
const hasUploadErrors = useAtomValue(hasFileUploadsErrorAtom)
|
||||
const clearAllFileUploadStatuses = useSetAtom(
|
||||
clearAllFileUploadStatusesAtom,
|
||||
)
|
||||
const setPickedFiles = useSetAtom(pickedFilesAtom)
|
||||
|
||||
if (!hasUploadErrors) {
|
||||
return null
|
||||
}
|
||||
|
||||
function clearUploadErrors() {
|
||||
setPickedFiles([])
|
||||
clearAllFileUploadStatuses()
|
||||
}
|
||||
|
||||
return (
|
||||
<Button variant="outline" onClick={clearUploadErrors}>
|
||||
Clear uploads
|
||||
</Button>
|
||||
)
|
||||
}
|
||||
|
||||
function UploadButton({
|
||||
uploadFilesAtom,
|
||||
onClick,
|
||||
@@ -533,7 +559,6 @@ function PickedFileItem({
|
||||
}) {
|
||||
const fileUploadAtom = fileUploadStatusAtomFamily(pickedFile.id)
|
||||
const fileUpload = useAtomValue(fileUploadAtom)
|
||||
console.log("fileUpload", fileUpload)
|
||||
const { file, id } = pickedFile
|
||||
|
||||
let statusIndicator: React.ReactNode
|
||||
|
||||
@@ -54,7 +54,7 @@ export function useFileDrop({
|
||||
errors: Err.ApplicationErrorData[]
|
||||
}) => {
|
||||
const conflictCount = errors.reduce((acc, error) => {
|
||||
if (error.code === Err.Code.Conflict) {
|
||||
if (error.code === Err.ErrorCode.Conflict) {
|
||||
return acc + 1
|
||||
}
|
||||
return acc
|
||||
|
||||
@@ -9,7 +9,7 @@ function useUploadFile({
|
||||
targetDirectory: Doc<"directories">
|
||||
}) {
|
||||
const generateUploadUrl = useConvexMutation(api.files.generateUploadUrl)
|
||||
const saveFile = useConvexMutation(api.files.saveFile)
|
||||
const saveFile = useConvexMutation(api.filesystem.saveFile)
|
||||
|
||||
async function upload({
|
||||
file,
|
||||
@@ -44,8 +44,6 @@ function useUploadFile({
|
||||
saveFile({
|
||||
storageId,
|
||||
name: file.name,
|
||||
size: file.size,
|
||||
mimeType: file.type,
|
||||
directoryId: targetDirectory._id,
|
||||
}),
|
||||
)
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import {
|
||||
Code as ErrorCode,
|
||||
type ApplicationErrorData,
|
||||
ErrorCode,
|
||||
isApplicationError,
|
||||
} from "@fileone/convex/error"
|
||||
import { ConvexError } from "convex/values"
|
||||
import { toast } from "sonner"
|
||||
|
||||
const ERROR_MESSAGE = {
|
||||
@@ -9,13 +11,19 @@ const ERROR_MESSAGE = {
|
||||
[ErrorCode.FileExists]: "File already exists",
|
||||
[ErrorCode.Internal]: "Internal application error",
|
||||
[ErrorCode.Conflict]: "Conflict",
|
||||
[ErrorCode.DirectoryNotFound]: "Directory not found",
|
||||
[ErrorCode.FileNotFound]: "File not found",
|
||||
[ErrorCode.Unauthenticated]: "Unauthenticated",
|
||||
[ErrorCode.NotFound]: "Not found",
|
||||
[ErrorCode.StorageQuotaExceeded]: "Storage is full",
|
||||
} as const
|
||||
|
||||
export function isApplicationConvexError(
|
||||
error: unknown,
|
||||
): error is ConvexError<ApplicationErrorData> {
|
||||
return error instanceof ConvexError && isApplicationError(error.data)
|
||||
}
|
||||
|
||||
export function formatError(error: unknown): string {
|
||||
if (isApplicationError(error)) {
|
||||
if (isApplicationConvexError(error)) {
|
||||
return ERROR_MESSAGE[error.data.code]
|
||||
}
|
||||
if (error instanceof Error) {
|
||||
@@ -25,8 +33,12 @@ export function formatError(error: unknown): string {
|
||||
}
|
||||
|
||||
export function defaultOnError(error: unknown) {
|
||||
console.log(error)
|
||||
toast.error(formatError(error))
|
||||
if (isApplicationConvexError(error)) {
|
||||
toast.error(formatError(error))
|
||||
} else {
|
||||
console.error("Catastrophic error:", error)
|
||||
toast.error("An unexpected error occurred")
|
||||
}
|
||||
}
|
||||
|
||||
export function withDefaultOnError(fn: (error: unknown) => void) {
|
||||
|
||||
@@ -15,6 +15,7 @@ import { Route as AuthenticatedRouteImport } from './routes/_authenticated'
|
||||
import { Route as AuthenticatedIndexRouteImport } from './routes/_authenticated/index'
|
||||
import { Route as LoginCallbackRouteImport } from './routes/login_.callback'
|
||||
import { Route as AuthenticatedSidebarLayoutRouteImport } from './routes/_authenticated/_sidebar-layout'
|
||||
import { Route as AuthenticatedSidebarLayoutRecentRouteImport } from './routes/_authenticated/_sidebar-layout/recent'
|
||||
import { Route as AuthenticatedSidebarLayoutHomeRouteImport } from './routes/_authenticated/_sidebar-layout/home'
|
||||
import { Route as AuthenticatedSidebarLayoutDirectoriesDirectoryIdRouteImport } from './routes/_authenticated/_sidebar-layout/directories.$directoryId'
|
||||
import { Route as AuthenticatedSidebarLayoutTrashDirectoriesDirectoryIdRouteImport } from './routes/_authenticated/_sidebar-layout/trash.directories.$directoryId'
|
||||
@@ -48,6 +49,12 @@ const AuthenticatedSidebarLayoutRoute =
|
||||
id: '/_sidebar-layout',
|
||||
getParentRoute: () => AuthenticatedRoute,
|
||||
} as any)
|
||||
const AuthenticatedSidebarLayoutRecentRoute =
|
||||
AuthenticatedSidebarLayoutRecentRouteImport.update({
|
||||
id: '/recent',
|
||||
path: '/recent',
|
||||
getParentRoute: () => AuthenticatedSidebarLayoutRoute,
|
||||
} as any)
|
||||
const AuthenticatedSidebarLayoutHomeRoute =
|
||||
AuthenticatedSidebarLayoutHomeRouteImport.update({
|
||||
id: '/home',
|
||||
@@ -73,6 +80,7 @@ export interface FileRoutesByFullPath {
|
||||
'/login/callback': typeof LoginCallbackRoute
|
||||
'/': typeof AuthenticatedIndexRoute
|
||||
'/home': typeof AuthenticatedSidebarLayoutHomeRoute
|
||||
'/recent': typeof AuthenticatedSidebarLayoutRecentRoute
|
||||
'/directories/$directoryId': typeof AuthenticatedSidebarLayoutDirectoriesDirectoryIdRoute
|
||||
'/trash/directories/$directoryId': typeof AuthenticatedSidebarLayoutTrashDirectoriesDirectoryIdRoute
|
||||
}
|
||||
@@ -82,6 +90,7 @@ export interface FileRoutesByTo {
|
||||
'/login/callback': typeof LoginCallbackRoute
|
||||
'/': typeof AuthenticatedIndexRoute
|
||||
'/home': typeof AuthenticatedSidebarLayoutHomeRoute
|
||||
'/recent': typeof AuthenticatedSidebarLayoutRecentRoute
|
||||
'/directories/$directoryId': typeof AuthenticatedSidebarLayoutDirectoriesDirectoryIdRoute
|
||||
'/trash/directories/$directoryId': typeof AuthenticatedSidebarLayoutTrashDirectoriesDirectoryIdRoute
|
||||
}
|
||||
@@ -94,6 +103,7 @@ export interface FileRoutesById {
|
||||
'/login_/callback': typeof LoginCallbackRoute
|
||||
'/_authenticated/': typeof AuthenticatedIndexRoute
|
||||
'/_authenticated/_sidebar-layout/home': typeof AuthenticatedSidebarLayoutHomeRoute
|
||||
'/_authenticated/_sidebar-layout/recent': typeof AuthenticatedSidebarLayoutRecentRoute
|
||||
'/_authenticated/_sidebar-layout/directories/$directoryId': typeof AuthenticatedSidebarLayoutDirectoriesDirectoryIdRoute
|
||||
'/_authenticated/_sidebar-layout/trash/directories/$directoryId': typeof AuthenticatedSidebarLayoutTrashDirectoriesDirectoryIdRoute
|
||||
}
|
||||
@@ -105,6 +115,7 @@ export interface FileRouteTypes {
|
||||
| '/login/callback'
|
||||
| '/'
|
||||
| '/home'
|
||||
| '/recent'
|
||||
| '/directories/$directoryId'
|
||||
| '/trash/directories/$directoryId'
|
||||
fileRoutesByTo: FileRoutesByTo
|
||||
@@ -114,6 +125,7 @@ export interface FileRouteTypes {
|
||||
| '/login/callback'
|
||||
| '/'
|
||||
| '/home'
|
||||
| '/recent'
|
||||
| '/directories/$directoryId'
|
||||
| '/trash/directories/$directoryId'
|
||||
id:
|
||||
@@ -125,6 +137,7 @@ export interface FileRouteTypes {
|
||||
| '/login_/callback'
|
||||
| '/_authenticated/'
|
||||
| '/_authenticated/_sidebar-layout/home'
|
||||
| '/_authenticated/_sidebar-layout/recent'
|
||||
| '/_authenticated/_sidebar-layout/directories/$directoryId'
|
||||
| '/_authenticated/_sidebar-layout/trash/directories/$directoryId'
|
||||
fileRoutesById: FileRoutesById
|
||||
@@ -180,6 +193,13 @@ declare module '@tanstack/react-router' {
|
||||
preLoaderRoute: typeof AuthenticatedSidebarLayoutRouteImport
|
||||
parentRoute: typeof AuthenticatedRoute
|
||||
}
|
||||
'/_authenticated/_sidebar-layout/recent': {
|
||||
id: '/_authenticated/_sidebar-layout/recent'
|
||||
path: '/recent'
|
||||
fullPath: '/recent'
|
||||
preLoaderRoute: typeof AuthenticatedSidebarLayoutRecentRouteImport
|
||||
parentRoute: typeof AuthenticatedSidebarLayoutRoute
|
||||
}
|
||||
'/_authenticated/_sidebar-layout/home': {
|
||||
id: '/_authenticated/_sidebar-layout/home'
|
||||
path: '/home'
|
||||
@@ -206,6 +226,7 @@ declare module '@tanstack/react-router' {
|
||||
|
||||
interface AuthenticatedSidebarLayoutRouteChildren {
|
||||
AuthenticatedSidebarLayoutHomeRoute: typeof AuthenticatedSidebarLayoutHomeRoute
|
||||
AuthenticatedSidebarLayoutRecentRoute: typeof AuthenticatedSidebarLayoutRecentRoute
|
||||
AuthenticatedSidebarLayoutDirectoriesDirectoryIdRoute: typeof AuthenticatedSidebarLayoutDirectoriesDirectoryIdRoute
|
||||
AuthenticatedSidebarLayoutTrashDirectoriesDirectoryIdRoute: typeof AuthenticatedSidebarLayoutTrashDirectoriesDirectoryIdRoute
|
||||
}
|
||||
@@ -213,6 +234,8 @@ interface AuthenticatedSidebarLayoutRouteChildren {
|
||||
const AuthenticatedSidebarLayoutRouteChildren: AuthenticatedSidebarLayoutRouteChildren =
|
||||
{
|
||||
AuthenticatedSidebarLayoutHomeRoute: AuthenticatedSidebarLayoutHomeRoute,
|
||||
AuthenticatedSidebarLayoutRecentRoute:
|
||||
AuthenticatedSidebarLayoutRecentRoute,
|
||||
AuthenticatedSidebarLayoutDirectoriesDirectoryIdRoute:
|
||||
AuthenticatedSidebarLayoutDirectoriesDirectoryIdRoute,
|
||||
AuthenticatedSidebarLayoutTrashDirectoriesDirectoryIdRoute:
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createFileRoute, Outlet } from "@tanstack/react-router"
|
||||
import { SidebarInset, SidebarProvider } from "@/components/ui/sidebar"
|
||||
import { Toaster } from "@/components/ui/sonner"
|
||||
import { DashboardSidebar } from "@/dashboard/dashboard-sidebar"
|
||||
|
||||
export const Route = createFileRoute("/_authenticated/_sidebar-layout")({
|
||||
|
||||
@@ -3,18 +3,21 @@ import type { Doc, Id } from "@fileone/convex/dataModel"
|
||||
import {
|
||||
type FileSystemItem,
|
||||
newFileSystemHandle,
|
||||
type OpenedFile,
|
||||
} from "@fileone/convex/filesystem"
|
||||
import { useMutation } from "@tanstack/react-query"
|
||||
import { createFileRoute } from "@tanstack/react-router"
|
||||
import type { Row, Table } from "@tanstack/react-table"
|
||||
import {
|
||||
useMutation as useContextMutation,
|
||||
useMutation as useConvexMutation,
|
||||
useQuery as useConvexQuery,
|
||||
} from "convex/react"
|
||||
import { atom, useAtom, useAtomValue, useSetAtom, useStore } from "jotai"
|
||||
import {
|
||||
ChevronDownIcon,
|
||||
PlusIcon,
|
||||
ScissorsIcon,
|
||||
TextCursorInputIcon,
|
||||
TrashIcon,
|
||||
} from "lucide-react"
|
||||
@@ -44,7 +47,7 @@ import { NewDirectoryDialog } from "@/directories/directory-page/new-directory-d
|
||||
import { RenameFileDialog } from "@/directories/directory-page/rename-file-dialog"
|
||||
import { DirectoryPathBreadcrumb } from "@/directories/directory-path-breadcrumb"
|
||||
import { FilePreviewDialog } from "@/files/file-preview-dialog"
|
||||
import { inProgressFileUploadCountAtom } from "@/files/store"
|
||||
import { cutHandlesAtom, inProgressFileUploadCountAtom } from "@/files/store"
|
||||
import { UploadFileDialog } from "@/files/upload-file-dialog"
|
||||
import type { FileDragInfo } from "@/files/use-file-drop"
|
||||
|
||||
@@ -77,22 +80,12 @@ const fileDragInfoAtom = atom<FileDragInfo | null>(null)
|
||||
const optimisticDeletedItemsAtom = atom(
|
||||
new Set<Id<"files"> | Id<"directories">>(),
|
||||
)
|
||||
const openedFileAtom = atom<Doc<"files"> | null>(null)
|
||||
const openedFileAtom = atom<OpenedFile | null>(null)
|
||||
const itemBeingRenamedAtom = atom<{
|
||||
originalItem: FileSystemItem
|
||||
name: string
|
||||
} | null>(null)
|
||||
|
||||
const tableFilterAtom = atom((get) => {
|
||||
const optimisticDeletedItems = get(optimisticDeletedItemsAtom)
|
||||
console.log("optimisticDeletedItems", optimisticDeletedItems)
|
||||
return (item: FileSystemItem) => {
|
||||
const test = !optimisticDeletedItems.has(item.doc._id)
|
||||
console.log("test", test)
|
||||
return test
|
||||
}
|
||||
})
|
||||
|
||||
// MARK: page entry
|
||||
function RouteComponent() {
|
||||
const { directoryId } = Route.useParams()
|
||||
@@ -100,7 +93,6 @@ function RouteComponent() {
|
||||
const directory = useConvexQuery(api.files.fetchDirectory, {
|
||||
directoryId,
|
||||
})
|
||||
const store = useStore()
|
||||
const directoryContent = useConvexQuery(
|
||||
api.filesystem.fetchDirectoryContent,
|
||||
{
|
||||
@@ -108,45 +100,12 @@ function RouteComponent() {
|
||||
trashed: false,
|
||||
},
|
||||
)
|
||||
const setOpenedFile = useSetAtom(openedFileAtom)
|
||||
const setContextMenuTargetItems = useSetAtom(contextMenuTargetItemsAtom)
|
||||
|
||||
const tableFilter = useCallback(
|
||||
(item: FileSystemItem) =>
|
||||
store.get(optimisticDeletedItemsAtom).has(item.doc._id),
|
||||
[store],
|
||||
)
|
||||
|
||||
const openFile = useCallback(
|
||||
(file: Doc<"files">) => {
|
||||
setOpenedFile(file)
|
||||
},
|
||||
[setOpenedFile],
|
||||
)
|
||||
|
||||
const directoryUrlFn = useCallback(
|
||||
(directory: Doc<"directories">) => `/directories/${directory._id}`,
|
||||
[],
|
||||
)
|
||||
|
||||
const directoryUrlById = useCallback(
|
||||
(directoryId: Id<"directories">) => `/directories/${directoryId}`,
|
||||
[],
|
||||
)
|
||||
|
||||
const handleContextMenuRequest = (
|
||||
row: Row<FileSystemItem>,
|
||||
table: Table<FileSystemItem>,
|
||||
) => {
|
||||
if (row.getIsSelected()) {
|
||||
setContextMenuTargetItems(
|
||||
table.getSelectedRowModel().rows.map((row) => row.original),
|
||||
)
|
||||
} else {
|
||||
setContextMenuTargetItems([row.original])
|
||||
}
|
||||
}
|
||||
|
||||
if (!directory || !directoryContent || !rootDirectory) {
|
||||
return <DirectoryPageSkeleton />
|
||||
}
|
||||
@@ -171,17 +130,7 @@ function RouteComponent() {
|
||||
{/* DirectoryContentContextMenu must wrap div instead of DirectoryContentTable, otherwise radix will throw "event.preventDefault is not a function" error, idk why */}
|
||||
<DirectoryContentContextMenu>
|
||||
<div className="w-full">
|
||||
<WithAtom atom={optimisticDeletedItemsAtom}>
|
||||
{(optimisticDeletedItems) => (
|
||||
<DirectoryContentTable
|
||||
hiddenItems={optimisticDeletedItems}
|
||||
directoryUrlFn={directoryUrlFn}
|
||||
fileDragInfoAtom={fileDragInfoAtom}
|
||||
onContextMenu={handleContextMenuRequest}
|
||||
onOpenFile={openFile}
|
||||
/>
|
||||
)}
|
||||
</WithAtom>
|
||||
<_DirectoryContentTable />
|
||||
</div>
|
||||
</DirectoryContentContextMenu>
|
||||
|
||||
@@ -228,7 +177,7 @@ function RouteComponent() {
|
||||
if (!openedFile) return null
|
||||
return (
|
||||
<FilePreviewDialog
|
||||
file={openedFile}
|
||||
openedFile={openedFile}
|
||||
onClose={() => setOpenedFile(null)}
|
||||
/>
|
||||
)
|
||||
@@ -238,6 +187,57 @@ function RouteComponent() {
|
||||
)
|
||||
}
|
||||
|
||||
// MARK: directory table
|
||||
|
||||
function _DirectoryContentTable() {
|
||||
const optimisticDeletedItems = useAtomValue(optimisticDeletedItemsAtom)
|
||||
const setOpenedFile = useSetAtom(openedFileAtom)
|
||||
const setContextMenuTargetItems = useSetAtom(contextMenuTargetItemsAtom)
|
||||
|
||||
const { mutate: openFile } = useMutation({
|
||||
mutationFn: useConvexMutation(api.filesystem.openFile),
|
||||
onSuccess: (openedFile: OpenedFile) => {
|
||||
setOpenedFile(openedFile)
|
||||
},
|
||||
onError: (error) => {
|
||||
console.error(error)
|
||||
toast.error("Failed to open file")
|
||||
},
|
||||
})
|
||||
|
||||
const onTableOpenFile = (file: Doc<"files">) => {
|
||||
openFile({ fileId: file._id })
|
||||
}
|
||||
|
||||
const directoryUrlFn = useCallback(
|
||||
(directory: Doc<"directories">) => `/directories/${directory._id}`,
|
||||
[],
|
||||
)
|
||||
|
||||
const handleContextMenuRequest = (
|
||||
row: Row<FileSystemItem>,
|
||||
table: Table<FileSystemItem>,
|
||||
) => {
|
||||
if (row.getIsSelected()) {
|
||||
setContextMenuTargetItems(
|
||||
table.getSelectedRowModel().rows.map((row) => row.original),
|
||||
)
|
||||
} else {
|
||||
setContextMenuTargetItems([row.original])
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<DirectoryContentTable
|
||||
hiddenItems={optimisticDeletedItems}
|
||||
directoryUrlFn={directoryUrlFn}
|
||||
fileDragInfoAtom={fileDragInfoAtom}
|
||||
onContextMenu={handleContextMenuRequest}
|
||||
onOpenFile={onTableOpenFile}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
// ==================================
|
||||
// MARK: ctx menu
|
||||
|
||||
@@ -251,6 +251,7 @@ function DirectoryContentContextMenu({
|
||||
const [target, setTarget] = useAtom(contextMenuTargetItemsAtom)
|
||||
const setOptimisticDeletedItems = useSetAtom(optimisticDeletedItemsAtom)
|
||||
const setBackgroundTaskProgress = useSetAtom(backgroundTaskProgressAtom)
|
||||
const setCutHandles = useSetAtom(cutHandlesAtom)
|
||||
const moveToTrashMutation = useContextMutation(api.filesystem.moveToTrash)
|
||||
|
||||
const { mutate: moveToTrash } = useMutation({
|
||||
@@ -294,6 +295,13 @@ function DirectoryContentContextMenu({
|
||||
},
|
||||
})
|
||||
|
||||
const handleCut = () => {
|
||||
const selectedItems = store.get(contextMenuTargetItemsAtom)
|
||||
if (selectedItems.length > 0) {
|
||||
setCutHandles(selectedItems.map(newFileSystemHandle))
|
||||
}
|
||||
}
|
||||
|
||||
const handleDelete = () => {
|
||||
const selectedItems = store.get(contextMenuTargetItemsAtom)
|
||||
if (selectedItems.length > 0) {
|
||||
@@ -315,6 +323,10 @@ function DirectoryContentContextMenu({
|
||||
{target.length > 0 && (
|
||||
<ContextMenuContent>
|
||||
<RenameMenuItem />
|
||||
<ContextMenuItem onClick={handleCut}>
|
||||
<ScissorsIcon />
|
||||
Cut
|
||||
</ContextMenuItem>
|
||||
<ContextMenuItem
|
||||
variant="destructive"
|
||||
onClick={handleDelete}
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
import { api } from "@fileone/convex/api"
|
||||
import type { Doc } from "@fileone/convex/dataModel"
|
||||
import { newFileHandle } from "@fileone/convex/filesystem"
|
||||
import { useMutation } from "@tanstack/react-query"
|
||||
import { createFileRoute, Link } from "@tanstack/react-router"
|
||||
import {
|
||||
useMutation as useConvexMutation,
|
||||
useQuery as useConvexQuery,
|
||||
} from "convex/react"
|
||||
import { atom, useAtom, useAtomValue, useSetAtom } from "jotai"
|
||||
import { FolderInputIcon, TrashIcon } from "lucide-react"
|
||||
import { useCallback } from "react"
|
||||
import { toast } from "sonner"
|
||||
import {
|
||||
ContextMenu,
|
||||
ContextMenuContent,
|
||||
ContextMenuItem,
|
||||
ContextMenuTrigger,
|
||||
} from "@/components/ui/context-menu"
|
||||
import { backgroundTaskProgressAtom } from "@/dashboard/state"
|
||||
import type { FileGridSelection } from "@/files/file-grid"
|
||||
import { FileGrid } from "@/files/file-grid"
|
||||
import { formatError } from "@/lib/error"
|
||||
|
||||
export const Route = createFileRoute("/_authenticated/_sidebar-layout/recent")({
|
||||
component: RouteComponent,
|
||||
})
|
||||
|
||||
const selectedFilesAtom = atom(new Set() as FileGridSelection)
|
||||
const contextMenuTargetItem = atom<Doc<"files"> | null>(null)
|
||||
|
||||
function RouteComponent() {
|
||||
return (
|
||||
<main className="p-4">
|
||||
<RecentFilesContextMenu>
|
||||
<RecentFilesGrid />
|
||||
</RecentFilesContextMenu>
|
||||
</main>
|
||||
)
|
||||
}
|
||||
|
||||
function RecentFilesGrid() {
|
||||
const recentFiles = useConvexQuery(api.filesystem.fetchRecentFiles, {
|
||||
limit: 100,
|
||||
})
|
||||
const [selectedFiles, setSelectedFiles] = useAtom(selectedFilesAtom)
|
||||
const setContextMenuTargetItem = useSetAtom(contextMenuTargetItem)
|
||||
|
||||
const handleContextMenu = useCallback(
|
||||
(file: Doc<"files">, _event: React.MouseEvent) => {
|
||||
setContextMenuTargetItem(file)
|
||||
},
|
||||
[setContextMenuTargetItem],
|
||||
)
|
||||
|
||||
return (
|
||||
<FileGrid
|
||||
files={recentFiles ?? []}
|
||||
selectedFiles={selectedFiles}
|
||||
onSelectionChange={setSelectedFiles}
|
||||
onContextMenu={handleContextMenu}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function RecentFilesContextMenu({ children }: { children: React.ReactNode }) {
|
||||
const targetItem = useAtomValue(contextMenuTargetItem)
|
||||
const setBackgroundTaskProgress = useSetAtom(backgroundTaskProgressAtom)
|
||||
|
||||
const { mutate: moveToTrash } = useMutation({
|
||||
mutationFn: useConvexMutation(api.filesystem.moveToTrash),
|
||||
onMutate: () => {
|
||||
setBackgroundTaskProgress({
|
||||
label: "Moving to trash…",
|
||||
})
|
||||
},
|
||||
onSuccess: () => {
|
||||
setBackgroundTaskProgress(null)
|
||||
toast.success("Moved to trash")
|
||||
},
|
||||
onError: (error) => {
|
||||
toast.error("Failed to move to trash", {
|
||||
description: formatError(error),
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
return (
|
||||
<ContextMenu>
|
||||
<ContextMenuTrigger asChild>
|
||||
<div>{children}</div>
|
||||
</ContextMenuTrigger>
|
||||
{targetItem && (
|
||||
<ContextMenuContent>
|
||||
<ContextMenuItem>
|
||||
<Link
|
||||
to={`/directories/${targetItem.directoryId}`}
|
||||
className="flex flex-row items-center gap-2"
|
||||
>
|
||||
<FolderInputIcon />
|
||||
Open in directory
|
||||
</Link>
|
||||
</ContextMenuItem>
|
||||
<ContextMenuItem
|
||||
variant="destructive"
|
||||
onClick={() => {
|
||||
moveToTrash({
|
||||
handles: [newFileHandle(targetItem._id)],
|
||||
})
|
||||
}}
|
||||
>
|
||||
<TrashIcon />
|
||||
Move to trash
|
||||
</ContextMenuItem>
|
||||
</ContextMenuContent>
|
||||
)}
|
||||
</ContextMenu>
|
||||
)
|
||||
}
|
||||
@@ -36,7 +36,6 @@ import { DirectoryPageContext } from "@/directories/directory-page/context"
|
||||
import { DirectoryContentTable } from "@/directories/directory-page/directory-content-table"
|
||||
import { DirectoryPageSkeleton } from "@/directories/directory-page/directory-page-skeleton"
|
||||
import { DirectoryPathBreadcrumb } from "@/directories/directory-path-breadcrumb"
|
||||
import { FilePreviewDialog } from "@/files/file-preview-dialog"
|
||||
import type { FileDragInfo } from "@/files/use-file-drop"
|
||||
import { backgroundTaskProgressAtom } from "../../../dashboard/state"
|
||||
|
||||
@@ -136,18 +135,6 @@ function RouteComponent() {
|
||||
|
||||
<DeleteConfirmationDialog />
|
||||
<EmptyTrashConfirmationDialog />
|
||||
|
||||
<WithAtom atom={openedFileAtom}>
|
||||
{(openedFile, setOpenedFile) => {
|
||||
if (!openedFile) return null
|
||||
return (
|
||||
<FilePreviewDialog
|
||||
file={openedFile}
|
||||
onClose={() => setOpenedFile(null)}
|
||||
/>
|
||||
)
|
||||
}}
|
||||
</WithAtom>
|
||||
</DirectoryPageContext>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@ export const Route = createFileRoute("/_authenticated/")({
|
||||
})
|
||||
|
||||
function RouteComponent() {
|
||||
return <Navigate replace to="/home" />
|
||||
return <Navigate replace to="/recent" />
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import path from "node:path"
|
||||
import tailwindcss from "@tailwindcss/vite"
|
||||
import { TanStackRouterVite } from "@tanstack/router-plugin/vite"
|
||||
import react from "@vitejs/plugin-react"
|
||||
import path from "path"
|
||||
import { defineConfig } from "vite"
|
||||
|
||||
export default defineConfig({
|
||||
@@ -19,7 +19,7 @@ export default defineConfig({
|
||||
},
|
||||
},
|
||||
optimizeDeps: {
|
||||
include: ["convex/react", "convex-helpers"],
|
||||
include: ["convex/react", "convex/values", "convex-helpers"],
|
||||
// Workaround for better-auth bug: https://github.com/better-auth/better-auth/issues/4457
|
||||
// Vite's esbuild incorrectly transpiles better-call dependency causing 'super' keyword errors
|
||||
exclude: ["better-auth", "@convex-dev/better-auth"],
|
||||
|
||||
4
apps/file-proxy/.env.sample
Normal file
4
apps/file-proxy/.env.sample
Normal file
@@ -0,0 +1,4 @@
|
||||
CONVEX_URL=
|
||||
# api key used to auth with the convex backend
|
||||
# use the drexa cli to generate an api key, then add the api key to the api key table via the convex dashboard
|
||||
API_KEY=
|
||||
14
apps/file-proxy/auth.ts
Normal file
14
apps/file-proxy/auth.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { createMiddleware } from "hono/factory"
|
||||
|
||||
export type ApiKeyContextVariable = {
|
||||
apiKey: string
|
||||
}
|
||||
|
||||
const apiKeyMiddleware = createMiddleware<{ Variables: ApiKeyContextVariable }>(
|
||||
async (c, next) => {
|
||||
c.set("apiKey", process.env.API_KEY)
|
||||
await next()
|
||||
},
|
||||
)
|
||||
|
||||
export { apiKeyMiddleware }
|
||||
@@ -0,0 +1,16 @@
|
||||
import { ConvexHttpClient } from "convex/browser"
|
||||
import { createMiddleware } from "hono/factory"
|
||||
|
||||
const _client = new ConvexHttpClient(process.env.CONVEX_URL)
|
||||
|
||||
export type ConvexContextVariables = {
|
||||
convex: ConvexHttpClient
|
||||
}
|
||||
|
||||
export const convexMiddleware = createMiddleware<{
|
||||
Variables: ConvexContextVariables
|
||||
}>(async (c, next) => {
|
||||
c.var
|
||||
c.set("convex", _client)
|
||||
await next()
|
||||
})
|
||||
|
||||
6
apps/file-proxy/env.d.ts
vendored
Normal file
6
apps/file-proxy/env.d.ts
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
declare module "bun" {
|
||||
interface Env {
|
||||
CONVEX_URL: string
|
||||
API_KEY: string
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,39 @@
|
||||
import { Hono } from "hono"
|
||||
import { api } from "@fileone/convex/api"
|
||||
import { newRouter } from "./router"
|
||||
|
||||
const h = new Hono().basePath("/files")
|
||||
const r = newRouter().basePath("/files")
|
||||
|
||||
h.get("/:fileId", async (c) => {
|
||||
const fileId = c.req.param("fileId")
|
||||
if (!fileId) {
|
||||
return c.json({ error: "File ID is required" }, 400)
|
||||
r.get(":shareToken", async (c) => {
|
||||
const shareToken = c.req.param("shareToken")
|
||||
if (!shareToken) {
|
||||
return c.json({ error: "not found" }, 404)
|
||||
}
|
||||
|
||||
const fileShare = await c.var.convex.query(api.fileshare.findFileShare, {
|
||||
apiKey: c.var.apiKey,
|
||||
shareToken,
|
||||
})
|
||||
if (!fileShare) {
|
||||
return c.json({ error: "not found" }, 404)
|
||||
}
|
||||
|
||||
const fileUrl = await c.var.convex.query(api.filesystem.getStorageUrl, {
|
||||
apiKey: c.var.apiKey,
|
||||
storageId: fileShare.storageId,
|
||||
})
|
||||
if (!fileUrl) {
|
||||
return c.json({ error: "not found" }, 404)
|
||||
}
|
||||
|
||||
const fileResponse = await fetch(fileUrl)
|
||||
if (!fileResponse.ok) {
|
||||
return c.json({ error: "not found" }, 404)
|
||||
}
|
||||
|
||||
return new Response(fileResponse.body, {
|
||||
status: fileResponse.status,
|
||||
headers: fileResponse.headers,
|
||||
})
|
||||
})
|
||||
|
||||
export { h as files }
|
||||
export { r as files }
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
import { Hono } from "hono"
|
||||
import { handleFileRequest } from "./files"
|
||||
import { apiKeyMiddleware } from "./auth"
|
||||
import { convexMiddleware } from "./convex"
|
||||
import { files } from "./files"
|
||||
|
||||
Bun.serve({
|
||||
routes: {
|
||||
"/files/:fileId": {
|
||||
GET: handleFileRequest,
|
||||
},
|
||||
},
|
||||
})
|
||||
const app = new Hono()
|
||||
|
||||
app.use(convexMiddleware)
|
||||
app.use(apiKeyMiddleware)
|
||||
|
||||
app.route("/", files)
|
||||
|
||||
export default {
|
||||
port: 8081,
|
||||
fetch: app.fetch,
|
||||
}
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "bun --hot run index.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
},
|
||||
@@ -11,6 +14,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@fileone/convex": "workspace:*",
|
||||
"arktype": "^2.1.23",
|
||||
"convex": "^1.28.0",
|
||||
"hono": "^4.10.1"
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import type { RouterTypes } from "bun"
|
||||
import { Hono } from "hono"
|
||||
import type { ApiKeyContextVariable } from "./auth"
|
||||
import type { ConvexContextVariables } from "./convex"
|
||||
|
||||
function router<
|
||||
R extends { [K in keyof R]: RouterTypes.RouteValue<Extract<K, string>> },
|
||||
>(routes: R): R {
|
||||
return routes
|
||||
type ContextVariables = ConvexContextVariables & ApiKeyContextVariable
|
||||
|
||||
export function newRouter() {
|
||||
return new Hono<{
|
||||
Variables: ContextVariables
|
||||
}>()
|
||||
}
|
||||
|
||||
@@ -1,29 +1,29 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
// Environment setup & latest features
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "Preserve",
|
||||
"moduleDetection": "force",
|
||||
"jsx": "react-jsx",
|
||||
"allowJs": true,
|
||||
"compilerOptions": {
|
||||
// Environment setup & latest features
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "Preserve",
|
||||
"moduleDetection": "force",
|
||||
"jsx": "react-jsx",
|
||||
"allowJs": true,
|
||||
|
||||
// Bundler mode
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"noEmit": true,
|
||||
// Bundler mode
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"noEmit": true,
|
||||
|
||||
// Best practices
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noImplicitOverride": true,
|
||||
// Best practices
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noImplicitOverride": true,
|
||||
|
||||
// Some stricter flags (disabled by default)
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"noPropertyAccessFromIndexSignature": false
|
||||
}
|
||||
// Some stricter flags (disabled by default)
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"noPropertyAccessFromIndexSignature": false
|
||||
}
|
||||
}
|
||||
|
||||
37
bun.lock
37
bun.lock
@@ -12,6 +12,24 @@
|
||||
"convex": "^1.27.0",
|
||||
},
|
||||
},
|
||||
"apps/cli": {
|
||||
"name": "@drexa/cli",
|
||||
"version": "0.1.0",
|
||||
"bin": {
|
||||
"drexa": "./index.ts",
|
||||
},
|
||||
"dependencies": {
|
||||
"@drexa/auth": "workspace:*",
|
||||
"chalk": "^5.3.0",
|
||||
"commander": "^12.1.0",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5",
|
||||
},
|
||||
},
|
||||
"apps/drive-web": {
|
||||
"name": "@fileone/web",
|
||||
"version": "0.1.0",
|
||||
@@ -65,6 +83,7 @@
|
||||
"name": "@drexa/file-proxy",
|
||||
"dependencies": {
|
||||
"@fileone/convex": "workspace:*",
|
||||
"arktype": "^2.1.23",
|
||||
"convex": "^1.28.0",
|
||||
"hono": "^4.10.1",
|
||||
},
|
||||
@@ -87,7 +106,9 @@
|
||||
"packages/convex": {
|
||||
"name": "@fileone/convex",
|
||||
"dependencies": {
|
||||
"@drexa/auth": "workspace:*",
|
||||
"@fileone/path": "workspace:*",
|
||||
"hash-wasm": "^4.12.0",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@convex-dev/better-auth": "^0.8.9",
|
||||
@@ -108,6 +129,12 @@
|
||||
"convex": "1.28.0",
|
||||
},
|
||||
"packages": {
|
||||
"@ark/regex": ["@ark/regex@0.0.0", "", { "dependencies": { "@ark/util": "0.50.0" } }, "sha512-p4vsWnd/LRGOdGQglbwOguIVhPmCAf5UzquvnDoxqhhPWTP84wWgi1INea8MgJ4SnI2gp37f13oA4Waz9vwNYg=="],
|
||||
|
||||
"@ark/schema": ["@ark/schema@0.50.0", "", { "dependencies": { "@ark/util": "0.50.0" } }, "sha512-hfmP82GltBZDadIOeR3argKNlYYyB2wyzHp0eeAqAOFBQguglMV/S7Ip2q007bRtKxIMLDqFY6tfPie1dtssaQ=="],
|
||||
|
||||
"@ark/util": ["@ark/util@0.50.0", "", {}, "sha512-tIkgIMVRpkfXRQIEf0G2CJryZVtHVrqcWHMDa5QKo0OEEBu0tHkRSIMm4Ln8cd8Bn9TPZtvc/kE2Gma8RESPSg=="],
|
||||
|
||||
"@babel/code-frame": ["@babel/code-frame@7.27.1", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg=="],
|
||||
|
||||
"@babel/compat-data": ["@babel/compat-data@7.28.4", "", {}, "sha512-YsmSKC29MJwf0gF8Rjjrg5LQCmyh+j/nD8/eP7f+BeoQTKYqs9RoWbjGOdy0+1Ekr68RJZMUOPVQaQisnIo4Rw=="],
|
||||
@@ -194,6 +221,8 @@
|
||||
|
||||
"@drexa/auth": ["@drexa/auth@workspace:packages/auth"],
|
||||
|
||||
"@drexa/cli": ["@drexa/cli@workspace:apps/cli"],
|
||||
|
||||
"@drexa/file-proxy": ["@drexa/file-proxy@workspace:apps/file-proxy"],
|
||||
|
||||
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.4", "", { "os": "aix", "cpu": "ppc64" }, "sha512-1VCICWypeQKhVbE9oW/sJaAmjLxhVqacdkvPLEjwlttjfwENRSClS8EjBz0KzRyFSCPDIkuXW34Je/vk7zdB7Q=="],
|
||||
@@ -520,6 +549,8 @@
|
||||
|
||||
"aria-hidden": ["aria-hidden@1.2.6", "", { "dependencies": { "tslib": "^2.0.0" } }, "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA=="],
|
||||
|
||||
"arktype": ["arktype@2.1.23", "", { "dependencies": { "@ark/regex": "0.0.0", "@ark/schema": "0.50.0", "@ark/util": "0.50.0" } }, "sha512-tyxNWX6xJVMb2EPJJ3OjgQS1G/vIeQRrZuY4DeBNQmh8n7geS+czgbauQWB6Pr+RXiOO8ChEey44XdmxsqGmfQ=="],
|
||||
|
||||
"asn1js": ["asn1js@3.0.6", "", { "dependencies": { "pvtsutils": "^1.3.6", "pvutils": "^1.1.3", "tslib": "^2.8.1" } }, "sha512-UOCGPYbl0tv8+006qks/dTgV9ajs97X2p0FAbyS2iyCRrmLSRolDaHdp+v/CLgnzHc3fVB+CwYiUmei7ndFcgA=="],
|
||||
|
||||
"ast-types": ["ast-types@0.16.1", "", { "dependencies": { "tslib": "^2.0.1" } }, "sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg=="],
|
||||
@@ -542,6 +573,8 @@
|
||||
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001751", "", {}, "sha512-A0QJhug0Ly64Ii3eIqHu5X51ebln3k4yTUkY1j8drqpWHVreg/VLijN48cZ1bYPiqOQuqpkIKnzr/Ul8V+p6Cw=="],
|
||||
|
||||
"chalk": ["chalk@5.6.2", "", {}, "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA=="],
|
||||
|
||||
"chokidar": ["chokidar@3.6.0", "", { "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="],
|
||||
|
||||
"chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
|
||||
@@ -556,6 +589,8 @@
|
||||
|
||||
"color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
|
||||
|
||||
"commander": ["commander@12.1.0", "", {}, "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA=="],
|
||||
|
||||
"common-tags": ["common-tags@1.8.2", "", {}, "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA=="],
|
||||
|
||||
"convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="],
|
||||
@@ -612,6 +647,8 @@
|
||||
|
||||
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
|
||||
|
||||
"hash-wasm": ["hash-wasm@4.12.0", "", {}, "sha512-+/2B2rYLb48I/evdOIhP+K/DD2ca2fgBjp6O+GBEnCDk2e4rpeXIK8GvIyRPjTezgmWn9gmKwkQjjx6BtqDHVQ=="],
|
||||
|
||||
"hono": ["hono@4.10.1", "", {}, "sha512-rpGNOfacO4WEPClfkEt1yfl8cbu10uB1lNpiI33AKoiAHwOS8lV748JiLx4b5ozO/u4qLjIvfpFsPXdY5Qjkmg=="],
|
||||
|
||||
"is-binary-path": ["is-binary-path@2.1.0", "", { "dependencies": { "binary-extensions": "^2.0.0" } }, "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw=="],
|
||||
|
||||
@@ -8,9 +8,10 @@
|
||||
"apps/*"
|
||||
],
|
||||
"scripts": {
|
||||
"dev": "bun run --filter=@fileone/web dev",
|
||||
"dev": "bun run --elide-lines=0 --filter './apps/*' dev",
|
||||
"build": "bun run --filter=@fileone/web build",
|
||||
"preview": "bun run --filter=@fileone/web preview"
|
||||
"preview": "bun run --filter=@fileone/web preview",
|
||||
"drexa": "bun run apps/cli/index.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.2.4",
|
||||
|
||||
93
packages/auth/hasher.ts
Normal file
93
packages/auth/hasher.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
export interface PassswordHasher {
|
||||
hash(password: string): Promise<string>
|
||||
verify(password: string, hash: string): Promise<boolean>
|
||||
}
|
||||
|
||||
export class BunSha256Hasher implements PassswordHasher {
|
||||
async hash(password: string): Promise<string> {
|
||||
const hasher = new Bun.CryptoHasher("sha256")
|
||||
hasher.update(password)
|
||||
return hasher.digest("base64url")
|
||||
}
|
||||
|
||||
async verify(password: string, hash: string): Promise<boolean> {
|
||||
const hasher = new Bun.CryptoHasher("sha256")
|
||||
hasher.update(password)
|
||||
|
||||
const passwordHash = hasher.digest()
|
||||
const hashBytes = Buffer.from(hash, "base64url")
|
||||
|
||||
if (passwordHash.byteLength !== hashBytes.byteLength) {
|
||||
return false
|
||||
}
|
||||
|
||||
return crypto.timingSafeEqual(passwordHash, hashBytes)
|
||||
}
|
||||
}
|
||||
|
||||
export class WebCryptoSha256Hasher implements PassswordHasher {
|
||||
async hash(password: string): Promise<string> {
|
||||
const hash = await crypto.subtle.digest(
|
||||
"SHA-256",
|
||||
new TextEncoder().encode(password),
|
||||
)
|
||||
return this.arrayBufferToBase64url(hash)
|
||||
}
|
||||
|
||||
async verify(password: string, hash: string): Promise<boolean> {
|
||||
const passwordHash = await crypto.subtle.digest(
|
||||
"SHA-256",
|
||||
new TextEncoder().encode(password),
|
||||
)
|
||||
const hashBytes = this.base64urlToArrayBuffer(hash)
|
||||
|
||||
if (passwordHash.byteLength !== hashBytes.byteLength) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Timing-safe comparison
|
||||
const passwordHashBytes = new Uint8Array(passwordHash)
|
||||
let result = 0
|
||||
for (let i = 0; i < passwordHashBytes.length; i++) {
|
||||
result |= passwordHashBytes[i]! ^ hashBytes[i]!
|
||||
}
|
||||
return result === 0
|
||||
}
|
||||
|
||||
private arrayBufferToBase64url(buffer: ArrayBuffer): string {
|
||||
const bytes = new Uint8Array(buffer)
|
||||
let binary = ""
|
||||
for (let i = 0; i < bytes.length; i++) {
|
||||
binary += String.fromCharCode(bytes[i]!)
|
||||
}
|
||||
return btoa(binary).replace(/[+/=]/g, (char) => {
|
||||
switch (char) {
|
||||
case "+":
|
||||
return "-"
|
||||
case "/":
|
||||
return "_"
|
||||
case "=":
|
||||
return ""
|
||||
default:
|
||||
return char
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
private base64urlToArrayBuffer(base64url: string): Uint8Array {
|
||||
const base64 = base64url.replace(/-/g, "+").replace(/_/g, "/")
|
||||
|
||||
const padded = base64.padEnd(
|
||||
base64.length + ((4 - (base64.length % 4)) % 4),
|
||||
"=",
|
||||
)
|
||||
|
||||
const binary = atob(padded)
|
||||
const bytes = new Uint8Array(binary.length)
|
||||
for (let i = 0; i < binary.length; i++) {
|
||||
bytes[i] = binary.charCodeAt(i)
|
||||
}
|
||||
|
||||
return bytes
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,7 @@
|
||||
import { BunSha256Hasher, type PassswordHasher } from "./hasher"
|
||||
|
||||
export { WebCryptoSha256Hasher } from "./hasher"
|
||||
|
||||
/**
|
||||
* An unhashed api key.
|
||||
* Always starts with sk, then the prefix specified at time of generation,
|
||||
@@ -7,6 +11,11 @@ export type UnhashedApiKey = `sk-${ApiKeyPrefix}-${string}`
|
||||
|
||||
export type ApiKeyPrefix = string & { __brand: "ApiKeyPrefix" }
|
||||
|
||||
export type ParsedApiKey = {
|
||||
prefix: ApiKeyPrefix
|
||||
unhashedKey: UnhashedApiKey
|
||||
}
|
||||
|
||||
export type GenerateApiKeyOptions = {
|
||||
/**
|
||||
* How long the key should be (excluding prefix) in bytes.
|
||||
@@ -21,6 +30,8 @@ export type GenerateApiKeyOptions = {
|
||||
|
||||
expiresAt?: Date
|
||||
description: string
|
||||
|
||||
hasher?: PassswordHasher
|
||||
}
|
||||
|
||||
export type GenerateApiKeyResult = {
|
||||
@@ -30,11 +41,21 @@ export type GenerateApiKeyResult = {
|
||||
description: string
|
||||
}
|
||||
|
||||
export type VerifyApiKeyOptions = {
|
||||
keyToBeVerified: UnhashedApiKey
|
||||
hashedKey: string
|
||||
hasher?: PassswordHasher
|
||||
}
|
||||
|
||||
function validatePrefix(prefix: string): prefix is ApiKeyPrefix {
|
||||
return !prefix.includes("-")
|
||||
}
|
||||
|
||||
export function newPrefix(prefix: string): ApiKeyPrefix | null {
|
||||
if (prefix.includes("-")) {
|
||||
if (!validatePrefix(prefix)) {
|
||||
return null
|
||||
}
|
||||
return prefix as ApiKeyPrefix
|
||||
return prefix
|
||||
}
|
||||
|
||||
export async function generateApiKey({
|
||||
@@ -42,6 +63,7 @@ export async function generateApiKey({
|
||||
prefix,
|
||||
expiresAt,
|
||||
description,
|
||||
hasher = new BunSha256Hasher(),
|
||||
}: GenerateApiKeyOptions): Promise<GenerateApiKeyResult> {
|
||||
const keyContent = new Uint8Array(keyByteLength)
|
||||
crypto.getRandomValues(keyContent)
|
||||
@@ -49,11 +71,7 @@ export async function generateApiKey({
|
||||
const base64KeyContent = Buffer.from(keyContent).toString("base64url")
|
||||
const unhashedKey: UnhashedApiKey = `sk-${prefix}-${base64KeyContent}`
|
||||
|
||||
const hashedKey = await Bun.password.hash(unhashedKey, {
|
||||
algorithm: "argon2id",
|
||||
memoryCost: 4, // memory usage in kibibytes
|
||||
timeCost: 3, // the number of iterations
|
||||
})
|
||||
const hashedKey = await hasher.hash(unhashedKey)
|
||||
|
||||
return {
|
||||
unhashedKey,
|
||||
@@ -62,3 +80,29 @@ export async function generateApiKey({
|
||||
description,
|
||||
}
|
||||
}
|
||||
|
||||
export function parseApiKey(key: string): ParsedApiKey | null {
|
||||
if (!key.startsWith("sk-")) {
|
||||
return null
|
||||
}
|
||||
const parts = key.split("-")
|
||||
if (parts.length !== 3) {
|
||||
return null
|
||||
}
|
||||
const prefix = parts[1]
|
||||
if (!prefix || !validatePrefix(prefix)) {
|
||||
return null
|
||||
}
|
||||
return {
|
||||
prefix,
|
||||
unhashedKey: key as UnhashedApiKey,
|
||||
}
|
||||
}
|
||||
|
||||
export async function verifyApiKey({
|
||||
keyToBeVerified,
|
||||
hashedKey,
|
||||
hasher = new BunSha256Hasher(),
|
||||
}: VerifyApiKeyOptions): Promise<boolean> {
|
||||
return await hasher.verify(keyToBeVerified, hashedKey)
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"name": "@drexa/auth",
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5"
|
||||
}
|
||||
"name": "@drexa/auth",
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +1,29 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
// Environment setup & latest features
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "Preserve",
|
||||
"moduleDetection": "force",
|
||||
"jsx": "react-jsx",
|
||||
"allowJs": true,
|
||||
"compilerOptions": {
|
||||
// Environment setup & latest features
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "Preserve",
|
||||
"moduleDetection": "force",
|
||||
"jsx": "react-jsx",
|
||||
"allowJs": true,
|
||||
|
||||
// Bundler mode
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"noEmit": true,
|
||||
// Bundler mode
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"noEmit": true,
|
||||
|
||||
// Best practices
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noImplicitOverride": true,
|
||||
// Best practices
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noImplicitOverride": true,
|
||||
|
||||
// Some stricter flags (disabled by default)
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"noPropertyAccessFromIndexSignature": false
|
||||
}
|
||||
// Some stricter flags (disabled by default)
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"noPropertyAccessFromIndexSignature": false
|
||||
}
|
||||
}
|
||||
|
||||
10
packages/convex/_generated/api.d.ts
vendored
10
packages/convex/_generated/api.d.ts
vendored
@@ -8,6 +8,7 @@
|
||||
* @module
|
||||
*/
|
||||
|
||||
import type * as apikey from "../apikey.js";
|
||||
import type * as auth from "../auth.js";
|
||||
import type * as betterauth__generated_api from "../betterauth/_generated/api.js";
|
||||
import type * as betterauth__generated_server from "../betterauth/_generated/server.js";
|
||||
@@ -16,11 +17,15 @@ import type * as betterauth_auth from "../betterauth/auth.js";
|
||||
import type * as convex__generated_api from "../convex/_generated/api.js";
|
||||
import type * as convex__generated_server from "../convex/_generated/server.js";
|
||||
import type * as files from "../files.js";
|
||||
import type * as fileshare from "../fileshare.js";
|
||||
import type * as filesystem from "../filesystem.js";
|
||||
import type * as functions from "../functions.js";
|
||||
import type * as http from "../http.js";
|
||||
import type * as model_apikey from "../model/apikey.js";
|
||||
import type * as model_directories from "../model/directories.js";
|
||||
import type * as model_filepreview from "../model/filepreview.js";
|
||||
import type * as model_files from "../model/files.js";
|
||||
import type * as model_fileshare from "../model/fileshare.js";
|
||||
import type * as model_filesystem from "../model/filesystem.js";
|
||||
import type * as model_user from "../model/user.js";
|
||||
import type * as shared_error from "../shared/error.js";
|
||||
@@ -43,6 +48,7 @@ import type {
|
||||
* ```
|
||||
*/
|
||||
declare const fullApi: ApiFromModules<{
|
||||
apikey: typeof apikey;
|
||||
auth: typeof auth;
|
||||
"betterauth/_generated/api": typeof betterauth__generated_api;
|
||||
"betterauth/_generated/server": typeof betterauth__generated_server;
|
||||
@@ -51,11 +57,15 @@ declare const fullApi: ApiFromModules<{
|
||||
"convex/_generated/api": typeof convex__generated_api;
|
||||
"convex/_generated/server": typeof convex__generated_server;
|
||||
files: typeof files;
|
||||
fileshare: typeof fileshare;
|
||||
filesystem: typeof filesystem;
|
||||
functions: typeof functions;
|
||||
http: typeof http;
|
||||
"model/apikey": typeof model_apikey;
|
||||
"model/directories": typeof model_directories;
|
||||
"model/filepreview": typeof model_filepreview;
|
||||
"model/files": typeof model_files;
|
||||
"model/fileshare": typeof model_fileshare;
|
||||
"model/filesystem": typeof model_filesystem;
|
||||
"model/user": typeof model_user;
|
||||
"shared/error": typeof shared_error;
|
||||
|
||||
14
packages/convex/apikey.ts
Normal file
14
packages/convex/apikey.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { verifyApiKey as _verifyApiKey, parseApiKey } from "@drexa/auth"
|
||||
import { WebCryptoSha256Hasher } from "@drexa/auth/hasher"
|
||||
import { v } from "convex/values"
|
||||
import { query } from "./_generated/server"
|
||||
import * as ApiKey from "./model/apikey"
|
||||
|
||||
export const verifyApiKey = query({
|
||||
args: {
|
||||
unhashedKey: v.string(),
|
||||
},
|
||||
handler: async (ctx, args) => {
|
||||
return await ApiKey.verifyApiKey(ctx, args.unhashedKey)
|
||||
},
|
||||
})
|
||||
@@ -1,8 +1,8 @@
|
||||
import { createClient, type GenericCtx } from "@convex-dev/better-auth"
|
||||
import { convex, crossDomain } from "@convex-dev/better-auth/plugins"
|
||||
import { betterAuth } from "better-auth"
|
||||
import { components } from "@fileone/convex/api"
|
||||
import type { DataModel } from "@fileone/convex/dataModel"
|
||||
import { betterAuth } from "better-auth"
|
||||
import authSchema from "./betterauth/schema"
|
||||
|
||||
const siteUrl = process.env.SITE_URL!
|
||||
@@ -19,12 +19,19 @@ export const authComponent = createClient<DataModel, typeof authSchema>(
|
||||
user: {
|
||||
onCreate: async (ctx, user) => {
|
||||
const now = Date.now()
|
||||
await ctx.db.insert("directories", {
|
||||
name: "",
|
||||
userId: user._id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
await Promise.all([
|
||||
ctx.db.insert("userInfo", {
|
||||
userId: user._id,
|
||||
storageUsageBytes: 0,
|
||||
storageQuotaBytes: 1024 * 1024 * 1024 * 5, // 5GB
|
||||
}),
|
||||
ctx.db.insert("directories", {
|
||||
name: "",
|
||||
userId: user._id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
}),
|
||||
])
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user