mirror of
https://github.com/get-drexa/drive.git
synced 2026-02-02 07:21:16 +00:00
impl: dir content pagination
This commit is contained in:
@@ -15,8 +15,45 @@
|
||||
},
|
||||
"paths": {
|
||||
"/accounts": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Retrieve all accounts for the authenticated user",
|
||||
"tags": [
|
||||
"accounts"
|
||||
],
|
||||
"summary": "List accounts",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "List of accounts for the authenticated user",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/internal_account.Account"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"description": "Not authenticated",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"description": "Create a new user account with email and password. Returns the account, user, and authentication tokens.",
|
||||
"description": "Create a new user account with email and password. Returns the account, user, and authentication tokens. Tokens can be delivered via HTTP-only cookies or in the response body based on the tokenDelivery field.",
|
||||
"tags": [
|
||||
"accounts"
|
||||
],
|
||||
@@ -44,7 +81,7 @@
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Invalid request body",
|
||||
"description": "Invalid request body or token delivery method",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
@@ -1616,6 +1653,15 @@
|
||||
"description": "Password for the new account (min 8 characters)",
|
||||
"type": "string",
|
||||
"example": "securepassword123"
|
||||
},
|
||||
"tokenDelivery": {
|
||||
"description": "How to deliver tokens: \"cookie\" (set HTTP-only cookies) or \"body\" (include in response)",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"cookie",
|
||||
"body"
|
||||
],
|
||||
"example": "body"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -1624,7 +1670,7 @@
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"accessToken": {
|
||||
"description": "JWT access token for immediate authentication",
|
||||
"description": "JWT access token for immediate authentication (only included when tokenDelivery is \"body\")",
|
||||
"type": "string",
|
||||
"example": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiI1NTBlODQwMC1lMjliLTQxZDQtYTcxNi00NDY2NTU0NDAwMDAifQ.signature"
|
||||
},
|
||||
@@ -1637,7 +1683,7 @@
|
||||
]
|
||||
},
|
||||
"refreshToken": {
|
||||
"description": "Base64 URL encoded refresh token",
|
||||
"description": "Base64 URL encoded refresh token (only included when tokenDelivery is \"body\")",
|
||||
"type": "string",
|
||||
"example": "dR4nD0mUu1DkZXlCeXRlc0FuZFJhbmRvbURhdGFIZXJlMTIzNDU2Nzg5MGFi"
|
||||
},
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
package catalog
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -24,7 +27,7 @@ type DirectoryInfo struct {
|
||||
Kind string `json:"kind" example:"directory"`
|
||||
// Unique directory identifier
|
||||
ID string `json:"id" example:"kRp2XYTq9A55"`
|
||||
// ParentID is the pbulic ID of the directory this directory is in
|
||||
// ParentID is the public ID of the directory this directory is in
|
||||
ParentID string `json:"parentId,omitempty" example:"kRp2XYTq9A55"`
|
||||
// Full path from root (included when ?include=path)
|
||||
Path virtualfs.Path `json:"path,omitempty"`
|
||||
@@ -54,6 +57,15 @@ type postDirectoryContentRequest struct {
|
||||
Items []string `json:"items" example:"mElnUNCm8F22,kRp2XYTq9A55"`
|
||||
}
|
||||
|
||||
// listDirectoryResponse represents the response to a request to list the contents of a directory
|
||||
// @Description Response to a request to list the contents of a directory
|
||||
type listDirectoryResponse struct {
|
||||
// Items is the list of items in the directory, limited to the limit specified in the request
|
||||
Items []any `json:"items"`
|
||||
// NextCursor is the cursor to use to get the next page of results
|
||||
NextCursor string `json:"nextCursor,omitempty"`
|
||||
}
|
||||
|
||||
// moveItemsToDirectoryResponse represents the response to a request
|
||||
// to move items into a directory.
|
||||
// @Description Response from moving items to a directory with status for each item
|
||||
@@ -80,6 +92,12 @@ type moveItemError struct {
|
||||
Error string `json:"error" example:"permission denied"`
|
||||
}
|
||||
|
||||
type decodedListChildrenCursor struct {
|
||||
orderBy virtualfs.ListChildrenOrder
|
||||
orderDirection virtualfs.ListChildrenDirection
|
||||
nodeID string
|
||||
}
|
||||
|
||||
func (h *HTTPHandler) currentDirectoryMiddleware(c *fiber.Ctx) error {
|
||||
account := account.CurrentAccount(c)
|
||||
if account == nil {
|
||||
@@ -236,19 +254,79 @@ func (h *HTTPHandler) fetchDirectory(c *fiber.Ctx) error {
|
||||
|
||||
// listDirectory returns directory contents
|
||||
// @Summary List directory contents
|
||||
// @Description Get all files and subdirectories within a directory
|
||||
// @Description Get all files and subdirectories within a directory with optional pagination, sorting, and filtering
|
||||
// @Tags directories
|
||||
// @Produce json
|
||||
// @Security BearerAuth
|
||||
// @Param accountID path string true "Account ID" format(uuid)
|
||||
// @Param directoryID path string true "Directory ID"
|
||||
// @Success 200 {array} interface{} "Array of FileInfo and DirectoryInfo objects"
|
||||
// @Param directoryID path string true "Directory ID (use 'root' for the root directory)"
|
||||
// @Param orderBy query string false "Sort field: name, createdAt, or updatedAt" Enums(name,createdAt,updatedAt)
|
||||
// @Param dir query string false "Sort direction: asc or desc" Enums(asc,desc)
|
||||
// @Param limit query integer false "Maximum number of items to return (default: 100, min: 1)"
|
||||
// @Param cursor query string false "Cursor for pagination (base64-encoded cursor from previous response)"
|
||||
// @Success 200 {object} listDirectoryResponse "Paginated list of FileInfo and DirectoryInfo objects"
|
||||
// @Failure 400 {object} map[string]string "Invalid limit or cursor"
|
||||
// @Failure 401 {string} string "Not authenticated"
|
||||
// @Failure 404 {string} string "Directory not found"
|
||||
// @Router /accounts/{accountID}/directories/{directoryID}/content [get]
|
||||
func (h *HTTPHandler) listDirectory(c *fiber.Ctx) error {
|
||||
node := mustCurrentDirectoryNode(c)
|
||||
children, err := h.vfs.ListChildren(c.Context(), h.db, node)
|
||||
|
||||
opts := virtualfs.ListChildrenOptions{}
|
||||
|
||||
if by := c.Query("orderBy"); by != "" {
|
||||
switch by {
|
||||
case "name":
|
||||
opts.OrderBy = virtualfs.ListChildrenOrderByName
|
||||
case "createdAt":
|
||||
opts.OrderBy = virtualfs.ListChildrenOrderByCreatedAt
|
||||
case "updatedAt":
|
||||
opts.OrderBy = virtualfs.ListChildrenOrderByUpdatedAt
|
||||
}
|
||||
}
|
||||
|
||||
if dir := c.Query("dir"); dir != "" {
|
||||
switch dir {
|
||||
case "asc":
|
||||
opts.OrderDirection = virtualfs.ListChildrenDirectionAsc
|
||||
case "desc":
|
||||
opts.OrderDirection = virtualfs.ListChildrenDirectionDesc
|
||||
}
|
||||
}
|
||||
|
||||
if limit := c.Query("limit"); limit != "" {
|
||||
limit, err := strconv.Atoi(limit)
|
||||
if err != nil {
|
||||
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid limit"})
|
||||
}
|
||||
if limit < 1 {
|
||||
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Limit must be at least 1"})
|
||||
}
|
||||
opts.Limit = limit
|
||||
}
|
||||
|
||||
if cursor := c.Query("cursor"); cursor != "" {
|
||||
dc, err := decodeListChildrenCursor(cursor)
|
||||
if err != nil {
|
||||
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "invalid cursor"})
|
||||
}
|
||||
|
||||
n, err := h.vfs.FindNodeByPublicID(c.Context(), h.db, node.AccountID, dc.nodeID)
|
||||
if err != nil {
|
||||
if errors.Is(err, virtualfs.ErrNodeNotFound) {
|
||||
return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "invalid cursor"})
|
||||
}
|
||||
return httperr.Internal(err)
|
||||
}
|
||||
|
||||
opts.Cursor = &virtualfs.ListChildrenCursor{
|
||||
Node: n,
|
||||
OrderBy: dc.orderBy,
|
||||
OrderDirection: dc.orderDirection,
|
||||
}
|
||||
}
|
||||
|
||||
children, cursor, err := h.vfs.ListChildren(c.Context(), h.db, node, opts)
|
||||
if err != nil {
|
||||
if errors.Is(err, virtualfs.ErrNodeNotFound) {
|
||||
return c.SendStatus(fiber.StatusNotFound)
|
||||
@@ -283,7 +361,10 @@ func (h *HTTPHandler) listDirectory(c *fiber.Ctx) error {
|
||||
}
|
||||
}
|
||||
|
||||
return c.JSON(items)
|
||||
return c.JSON(listDirectoryResponse{
|
||||
Items: items,
|
||||
NextCursor: encodeListChildrenCursor(cursor),
|
||||
})
|
||||
}
|
||||
|
||||
// patchDirectory updates directory properties
|
||||
@@ -562,3 +643,64 @@ func (h *HTTPHandler) moveItemsToDirectory(c *fiber.Ctx) error {
|
||||
|
||||
return c.JSON(res)
|
||||
}
|
||||
|
||||
func encodeListChildrenCursor(cursor *virtualfs.ListChildrenCursor) string {
|
||||
var by int
|
||||
switch cursor.OrderBy {
|
||||
case virtualfs.ListChildrenOrderByName:
|
||||
by = 0
|
||||
case virtualfs.ListChildrenOrderByCreatedAt:
|
||||
by = 1
|
||||
case virtualfs.ListChildrenOrderByUpdatedAt:
|
||||
by = 2
|
||||
}
|
||||
|
||||
var d int
|
||||
switch cursor.OrderDirection {
|
||||
case virtualfs.ListChildrenDirectionAsc:
|
||||
d = 0
|
||||
case virtualfs.ListChildrenDirectionDesc:
|
||||
d = 1
|
||||
}
|
||||
|
||||
s := fmt.Sprintf("%d:%d:%s", by, d, cursor.Node.ID)
|
||||
return base64.URLEncoding.EncodeToString([]byte(s))
|
||||
}
|
||||
|
||||
func decodeListChildrenCursor(s string) (*decodedListChildrenCursor, error) {
|
||||
bs, err := base64.URLEncoding.DecodeString(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
parts := strings.Split(string(bs), ":")
|
||||
if len(parts) != 3 {
|
||||
return nil, errors.New("invalid cursor")
|
||||
}
|
||||
|
||||
c := new(decodedListChildrenCursor)
|
||||
|
||||
switch parts[0] {
|
||||
case "0":
|
||||
c.orderBy = virtualfs.ListChildrenOrderByName
|
||||
case "1":
|
||||
c.orderBy = virtualfs.ListChildrenOrderByCreatedAt
|
||||
case "2":
|
||||
c.orderBy = virtualfs.ListChildrenOrderByUpdatedAt
|
||||
default:
|
||||
return nil, errors.New("invalid cursor")
|
||||
}
|
||||
|
||||
switch parts[1] {
|
||||
case "0":
|
||||
c.orderDirection = virtualfs.ListChildrenDirectionAsc
|
||||
case "1":
|
||||
c.orderDirection = virtualfs.ListChildrenDirectionDesc
|
||||
default:
|
||||
return nil, errors.New("invalid cursor")
|
||||
}
|
||||
|
||||
c.nodeID = parts[2]
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
@@ -18,8 +18,8 @@ type FileInfo struct {
|
||||
Kind string `json:"kind" example:"file"`
|
||||
// Unique file identifier
|
||||
ID string `json:"id" example:"mElnUNCm8F22"`
|
||||
// ParentID is the pbulic ID of the directory this file is in
|
||||
ParentID string `json:"parentId,omitempty" exmaple:"kRp2XYTq9A55"`
|
||||
// ParentID is the public ID of the directory this file is in
|
||||
ParentID string `json:"parentId,omitempty" example:"kRp2XYTq9A55"`
|
||||
// File name
|
||||
Name string `json:"name" example:"document.pdf"`
|
||||
// File size in bytes
|
||||
@@ -297,7 +297,6 @@ func (h *HTTPHandler) deleteFiles(c *fiber.Ctx) error {
|
||||
}
|
||||
|
||||
return c.JSON(res)
|
||||
|
||||
} else {
|
||||
err = h.vfs.PermanentlyDeleteFiles(c.Context(), tx, nodes)
|
||||
if err != nil {
|
||||
|
||||
@@ -3,7 +3,9 @@ package virtualfs
|
||||
import "errors"
|
||||
|
||||
var (
|
||||
ErrNodeNotFound = errors.New("node not found")
|
||||
ErrNodeConflict = errors.New("node conflict")
|
||||
ErrUnsupportedOperation = errors.New("unsupported operation")
|
||||
ErrNodeNotFound = errors.New("node not found")
|
||||
ErrNodeConflict = errors.New("node conflict")
|
||||
ErrUnsupportedOperation = errors.New("unsupported operation")
|
||||
ErrCursorMismatchedOrderField = errors.New("cursor mismatched order field")
|
||||
ErrCursorMismatchedDirection = errors.New("cursor mismatched direction")
|
||||
)
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"database/sql"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
@@ -19,6 +20,23 @@ import (
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type ListChildrenOrder string
|
||||
|
||||
const (
|
||||
ListChildrenOrderByName ListChildrenOrder = "name"
|
||||
ListChildrenOrderByCreatedAt ListChildrenOrder = "created_at"
|
||||
ListChildrenOrderByUpdatedAt ListChildrenOrder = "updated_at"
|
||||
)
|
||||
|
||||
type ListChildrenDirection int
|
||||
|
||||
const (
|
||||
ListChildrenDirectionAsc ListChildrenDirection = iota
|
||||
ListChildrenDirectionDesc
|
||||
)
|
||||
|
||||
const listChildrenDefaultLimit = 50
|
||||
|
||||
type VirtualFS struct {
|
||||
blobStore blob.Store
|
||||
keyResolver BlobKeyResolver
|
||||
@@ -48,6 +66,19 @@ type MoveFilesResult struct {
|
||||
Errors []MoveFileError
|
||||
}
|
||||
|
||||
type ListChildrenOptions struct {
|
||||
Limit int
|
||||
OrderBy ListChildrenOrder
|
||||
OrderDirection ListChildrenDirection
|
||||
Cursor *ListChildrenCursor
|
||||
}
|
||||
|
||||
type ListChildrenCursor struct {
|
||||
Node *Node
|
||||
OrderBy ListChildrenOrder
|
||||
OrderDirection ListChildrenDirection
|
||||
}
|
||||
|
||||
const RootDirectoryName = "root"
|
||||
|
||||
func New(blobStore blob.Store, keyResolver BlobKeyResolver) (*VirtualFS, error) {
|
||||
@@ -128,26 +159,108 @@ func (vfs *VirtualFS) FindRootDirectory(ctx context.Context, db bun.IDB, account
|
||||
return root, nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) ListChildren(ctx context.Context, db bun.IDB, node *Node) ([]*Node, error) {
|
||||
// ListChildren returns the children of a directory node with optional sorting and cursor-based pagination.
|
||||
func (vfs *VirtualFS) ListChildren(ctx context.Context, db bun.IDB, node *Node, opts ListChildrenOptions) ([]*Node, *ListChildrenCursor, error) {
|
||||
if !node.IsAccessible() {
|
||||
return nil, ErrNodeNotFound
|
||||
return nil, nil, ErrNodeNotFound
|
||||
}
|
||||
|
||||
var nodes []*Node
|
||||
err := db.NewSelect().Model(&nodes).
|
||||
q := db.NewSelect().Model(&nodes).
|
||||
Where("account_id = ?", node.AccountID).
|
||||
Where("parent_id = ?", node.ID).
|
||||
Where("status = ?", NodeStatusReady).
|
||||
Where("deleted_at IS NULL").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return make([]*Node, 0), nil
|
||||
Where("deleted_at IS NULL")
|
||||
|
||||
var dir string
|
||||
if opts.OrderBy != "" {
|
||||
switch opts.OrderDirection {
|
||||
default:
|
||||
dir = "ASC"
|
||||
case ListChildrenDirectionAsc:
|
||||
dir = "ASC"
|
||||
case ListChildrenDirectionDesc:
|
||||
dir = "DESC"
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return nodes, nil
|
||||
// Apply sorting with directories always first, then ID as tiebreaker.
|
||||
//
|
||||
// Cursor-based pagination implementation notes:
|
||||
// - The cursor contains the last node from the previous page along with the sort configuration
|
||||
// - The WHERE clause uses tuple comparison (kind, field, id) to filter results after the cursor position
|
||||
// - Directories are always ordered before files (kind ASC puts 'directory' before 'file' alphabetically)
|
||||
// - ID is always sorted ASC as a tiebreaker, regardless of the main sort direction
|
||||
//
|
||||
// Why ID is always ASC:
|
||||
// - Ensures deterministic ordering when multiple items have the same sort field value
|
||||
// - Maintains consistent tiebreaker behavior across different sort directions
|
||||
// - Prevents pagination inconsistencies where items with the same name/date appear in different orders
|
||||
// depending on whether sorting ASC or DESC
|
||||
// - The tuple comparison in the WHERE clause correctly handles the direction for the main field,
|
||||
// while ID provides a stable secondary sort
|
||||
switch opts.OrderBy {
|
||||
case ListChildrenOrderByName:
|
||||
q = q.Order("kind ASC", "name "+dir, "id ASC")
|
||||
case ListChildrenOrderByCreatedAt:
|
||||
q = q.Order("kind ASC", "created_at "+dir, "id ASC")
|
||||
case ListChildrenOrderByUpdatedAt:
|
||||
q = q.Order("kind ASC", "updated_at "+dir, "id ASC")
|
||||
}
|
||||
|
||||
// Apply cursor filter for pagination.
|
||||
// The cursor contains the last node from the previous page. We use tuple comparison
|
||||
// (kind, field, id) to find all rows that come after the cursor position in the sorted order.
|
||||
// Kind is included to handle pagination across the directory/file boundary correctly.
|
||||
// For ASC: use > to get rows after cursor
|
||||
// For DESC: use < to get rows after cursor (because "after" in descending order means lesser values)
|
||||
if opts.Cursor != nil {
|
||||
if opts.Cursor.OrderBy != opts.OrderBy {
|
||||
return nil, nil, ErrCursorMismatchedOrderField
|
||||
}
|
||||
if opts.Cursor.OrderDirection != opts.OrderDirection {
|
||||
return nil, nil, ErrCursorMismatchedDirection
|
||||
}
|
||||
|
||||
var op string
|
||||
switch opts.Cursor.OrderDirection {
|
||||
case ListChildrenDirectionAsc:
|
||||
op = ">"
|
||||
case ListChildrenDirectionDesc:
|
||||
op = "<"
|
||||
}
|
||||
|
||||
// Include kind in tuple comparison to handle pagination across directory/file boundary
|
||||
switch opts.Cursor.OrderBy {
|
||||
case ListChildrenOrderByName:
|
||||
q = q.Where("(kind, name, id) "+op+" (?, ?, ?)", opts.Cursor.Node.Kind, opts.Cursor.Node.Name, opts.Cursor.Node.ID)
|
||||
case ListChildrenOrderByCreatedAt:
|
||||
q = q.Where("(kind, created_at, id) "+op+" (?, ?, ?)", opts.Cursor.Node.Kind, opts.Cursor.Node.CreatedAt, opts.Cursor.Node.ID)
|
||||
case ListChildrenOrderByUpdatedAt:
|
||||
q = q.Where("(kind, updated_at, id) "+op+" (?, ?, ?)", opts.Cursor.Node.Kind, opts.Cursor.Node.UpdatedAt, opts.Cursor.Node.ID)
|
||||
}
|
||||
}
|
||||
|
||||
if opts.Limit > 0 {
|
||||
q = q.Limit(opts.Limit)
|
||||
} else {
|
||||
q = q.Limit(listChildrenDefaultLimit)
|
||||
}
|
||||
|
||||
if err := q.Scan(ctx); err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return make([]*Node, 0), nil, nil
|
||||
}
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
c := &ListChildrenCursor{
|
||||
Node: nodes[len(nodes)-1],
|
||||
OrderBy: opts.OrderBy,
|
||||
OrderDirection: opts.OrderDirection,
|
||||
}
|
||||
|
||||
return nodes, c, nil
|
||||
}
|
||||
|
||||
func (vfs *VirtualFS) CreateFile(ctx context.Context, db bun.IDB, accountID uuid.UUID, opts CreateFileOptions) (*Node, error) {
|
||||
@@ -178,7 +291,7 @@ func (vfs *VirtualFS) CreateFile(ctx context.Context, db bun.IDB, accountID uuid
|
||||
}
|
||||
}
|
||||
|
||||
_, err = db.NewInsert().Model(&node).Returning("*").Exec(ctx)
|
||||
_, err = db.NewInsert().Model(&node).On("CONFLICT DO NOTHING").Returning("*").Exec(ctx)
|
||||
if err != nil {
|
||||
if database.IsUniqueViolation(err) {
|
||||
return nil, ErrNodeConflict
|
||||
@@ -355,7 +468,7 @@ func (vfs *VirtualFS) SoftDeleteNodes(ctx context.Context, db bun.IDB, nodes []*
|
||||
}
|
||||
}
|
||||
|
||||
_, err := db.NewUpdate().Model(deletableNodes).
|
||||
_, err := db.NewUpdate().Model(&deletableNodes).
|
||||
Where("id IN (?)", bun.In(nodeIDs)).
|
||||
Where("status = ?", NodeStatusReady).
|
||||
Where("deleted_at IS NULL").
|
||||
@@ -363,7 +476,7 @@ func (vfs *VirtualFS) SoftDeleteNodes(ctx context.Context, db bun.IDB, nodes []*
|
||||
Returning("deleted_at").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("failed to soft delete nodes: %w", err)
|
||||
}
|
||||
|
||||
return deletableNodes, nil
|
||||
|
||||
@@ -3,7 +3,6 @@ import type { DirectoryContent, DirectoryInfoWithPath } from "@/vfs/vfs"
|
||||
|
||||
type DirectoryPageContextType = {
|
||||
directory: DirectoryInfoWithPath
|
||||
directoryContent: DirectoryContent
|
||||
}
|
||||
|
||||
export const DirectoryPageContext = createContext<DirectoryPageContextType>(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Link, useNavigate } from "@tanstack/react-router"
|
||||
import { useInfiniteQuery } from "@tanstack/react-query"
|
||||
import { Link, useNavigate, useSearch } from "@tanstack/react-router"
|
||||
import {
|
||||
type ColumnDef,
|
||||
flexRender,
|
||||
@@ -8,7 +9,7 @@ import {
|
||||
type Table as TableType,
|
||||
useReactTable,
|
||||
} from "@tanstack/react-table"
|
||||
import { type PrimitiveAtom, useSetAtom, useStore } from "jotai"
|
||||
import { type PrimitiveAtom, useAtomValue, useSetAtom, useStore } from "jotai"
|
||||
import { useContext, useEffect, useMemo, useRef } from "react"
|
||||
import { DirectoryIcon } from "@/components/icons/directory-icon"
|
||||
import { TextFileIcon } from "@/components/icons/text-file-icon"
|
||||
@@ -28,12 +29,13 @@ import {
|
||||
} from "@/lib/keyboard"
|
||||
import { cn } from "@/lib/utils"
|
||||
import type { DirectoryInfo, DirectoryItem, FileInfo } from "@/vfs/vfs"
|
||||
import { directoryContentQueryAtom } from "../../vfs/api"
|
||||
import { DirectoryPageContext } from "./context"
|
||||
import { DirectoryContentTableSkeleton } from "./directory-content-table-skeleton"
|
||||
|
||||
type DirectoryContentTableItemIdFilter = Set<string>
|
||||
|
||||
type DirectoryContentTableProps = {
|
||||
hiddenItems: DirectoryContentTableItemIdFilter
|
||||
directoryUrlFn: (directory: DirectoryInfo) => string
|
||||
fileDragInfoAtom: PrimitiveAtom<FileDragInfo | null>
|
||||
onContextMenu: (
|
||||
@@ -138,26 +140,40 @@ function useTableColumns(
|
||||
}
|
||||
|
||||
export function DirectoryContentTable({
|
||||
hiddenItems,
|
||||
directoryUrlFn,
|
||||
onContextMenu,
|
||||
fileDragInfoAtom,
|
||||
onOpenFile,
|
||||
}: DirectoryContentTableProps) {
|
||||
const { directoryContent } = useContext(DirectoryPageContext)
|
||||
const { directory } = useContext(DirectoryPageContext)
|
||||
const search = useSearch({
|
||||
from: "/_authenticated/_sidebar-layout/directories/$directoryId",
|
||||
})
|
||||
|
||||
const directoryContentQuery = useAtomValue(
|
||||
directoryContentQueryAtom({
|
||||
directoryId: directory.id,
|
||||
orderBy: search.orderBy,
|
||||
direction: search.direction,
|
||||
limit: 100,
|
||||
}),
|
||||
)
|
||||
const { data: directoryContent, isLoading: isLoadingDirectoryContent } =
|
||||
useInfiniteQuery(directoryContentQuery)
|
||||
|
||||
const store = useStore()
|
||||
const navigate = useNavigate()
|
||||
|
||||
const table = useReactTable({
|
||||
data: directoryContent || [],
|
||||
data: useMemo(
|
||||
() => directoryContent?.pages.flatMap((page) => page.items) || [],
|
||||
[directoryContent],
|
||||
),
|
||||
columns: useTableColumns(onOpenFile, directoryUrlFn),
|
||||
getCoreRowModel: getCoreRowModel(),
|
||||
getFilteredRowModel: getFilteredRowModel(),
|
||||
enableRowSelection: true,
|
||||
enableGlobalFilter: true,
|
||||
state: {
|
||||
globalFilter: hiddenItems,
|
||||
},
|
||||
globalFilterFn: (
|
||||
row,
|
||||
_columnId,
|
||||
@@ -180,6 +196,10 @@ export function DirectoryContentTable({
|
||||
[table.setRowSelection],
|
||||
)
|
||||
|
||||
if (isLoadingDirectoryContent) {
|
||||
return <DirectoryContentTableSkeleton />
|
||||
}
|
||||
|
||||
const handleRowContextMenu = (
|
||||
row: Row<DirectoryItem>,
|
||||
_event: React.MouseEvent,
|
||||
|
||||
@@ -32,3 +32,9 @@ if (import.meta.hot) {
|
||||
// The hot module reloading API is not available in production.
|
||||
createRoot(elem).render(app)
|
||||
}
|
||||
|
||||
declare module "@tanstack/react-router" {
|
||||
interface Register {
|
||||
router: typeof router
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useMutation, useQuery } from "@tanstack/react-query"
|
||||
import { createFileRoute } from "@tanstack/react-router"
|
||||
import type { Row, Table } from "@tanstack/react-table"
|
||||
import { type } from "arktype"
|
||||
import { atom, useAtom, useAtomValue, useSetAtom, useStore } from "jotai"
|
||||
import {
|
||||
ChevronDownIcon,
|
||||
@@ -38,8 +39,10 @@ import { FilePreviewDialog } from "@/files/file-preview-dialog"
|
||||
import { cutItemsAtom, inProgressFileUploadCountAtom } from "@/files/store"
|
||||
import { UploadFileDialog } from "@/files/upload-file-dialog"
|
||||
import type { FileDragInfo } from "@/files/use-file-drop"
|
||||
import { formatError } from "@/lib/error"
|
||||
import {
|
||||
directoryContentQueryAtom,
|
||||
DIRECTORY_CONTENT_ORDER_BY,
|
||||
DIRECTORY_CONTENT_ORDER_DIRECTION,
|
||||
directoryInfoQueryAtom,
|
||||
moveToTrashMutationAtom,
|
||||
} from "@/vfs/api"
|
||||
@@ -49,11 +52,20 @@ import type {
|
||||
DirectoryItem,
|
||||
FileInfo,
|
||||
} from "@/vfs/vfs"
|
||||
import { formatError } from "../../../lib/error"
|
||||
|
||||
const DirectoryContentPageParams = type({
|
||||
orderBy: type
|
||||
.valueOf(DIRECTORY_CONTENT_ORDER_BY)
|
||||
.default(DIRECTORY_CONTENT_ORDER_BY.name),
|
||||
direction: type
|
||||
.valueOf(DIRECTORY_CONTENT_ORDER_DIRECTION)
|
||||
.default(DIRECTORY_CONTENT_ORDER_DIRECTION.asc),
|
||||
})
|
||||
|
||||
export const Route = createFileRoute(
|
||||
"/_authenticated/_sidebar-layout/directories/$directoryId",
|
||||
)({
|
||||
validateSearch: DirectoryContentPageParams,
|
||||
component: RouteComponent,
|
||||
})
|
||||
|
||||
@@ -87,37 +99,54 @@ const itemBeingRenamedAtom = atom<{
|
||||
// MARK: page entry
|
||||
function RouteComponent() {
|
||||
const { directoryId } = Route.useParams()
|
||||
const {
|
||||
data: directoryInfo,
|
||||
isLoading: isLoadingDirectoryInfo,
|
||||
error: directoryInfoError,
|
||||
} = useQuery(useAtomValue(directoryInfoQueryAtom(directoryId)))
|
||||
const {
|
||||
data: directoryContent,
|
||||
isLoading: isLoadingDirectoryContent,
|
||||
error: directoryContentError,
|
||||
} = useQuery(useAtomValue(directoryContentQueryAtom(directoryId)))
|
||||
const { data: directoryInfo, isLoading: isLoadingDirectoryInfo } = useQuery(
|
||||
useAtomValue(directoryInfoQueryAtom(directoryId)),
|
||||
)
|
||||
|
||||
const setOpenedFile = useSetAtom(openedFileAtom)
|
||||
const setContextMenuTargetItems = useSetAtom(contextMenuTargetItemsAtom)
|
||||
|
||||
const directoryUrlById = useCallback(
|
||||
(directoryId: string) => `/directories/${directoryId}`,
|
||||
[],
|
||||
)
|
||||
|
||||
console.log({ directoryInfoError, directoryContentError })
|
||||
const onTableOpenFile = useCallback(
|
||||
(file: FileInfo) => {
|
||||
setOpenedFile(file)
|
||||
},
|
||||
[setOpenedFile],
|
||||
)
|
||||
|
||||
if (isLoadingDirectoryInfo || isLoadingDirectoryContent) {
|
||||
const directoryUrlFn = useCallback(
|
||||
(directory: DirectoryInfo) => `/directories/${directory.id}`,
|
||||
[],
|
||||
)
|
||||
|
||||
const handleContextMenuRequest = useCallback(
|
||||
(row: Row<DirectoryItem>, table: Table<DirectoryItem>) => {
|
||||
if (row.getIsSelected()) {
|
||||
setContextMenuTargetItems(
|
||||
table.getSelectedRowModel().rows.map((row) => row.original),
|
||||
)
|
||||
} else {
|
||||
setContextMenuTargetItems([row.original])
|
||||
}
|
||||
},
|
||||
[setContextMenuTargetItems],
|
||||
)
|
||||
|
||||
if (isLoadingDirectoryInfo) {
|
||||
return <DirectoryPageSkeleton />
|
||||
}
|
||||
|
||||
if (!directoryInfo || !directoryContent) {
|
||||
if (!directoryInfo) {
|
||||
// TODO: handle empty state/error
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<DirectoryPageContext
|
||||
value={{ directory: directoryInfo, directoryContent }}
|
||||
>
|
||||
<DirectoryPageContext value={{ directory: directoryInfo }}>
|
||||
<header className="flex py-2 shrink-0 items-center gap-2 border-b px-4 w-full">
|
||||
<DirectoryPathBreadcrumb
|
||||
directory={directoryInfo}
|
||||
@@ -134,7 +163,12 @@ function RouteComponent() {
|
||||
{/* DirectoryContentContextMenu must wrap div instead of DirectoryContentTable, otherwise radix will throw "event.preventDefault is not a function" error, idk why */}
|
||||
<DirectoryContentContextMenu>
|
||||
<div className="w-full">
|
||||
<_DirectoryContentTable />
|
||||
<DirectoryContentTable
|
||||
directoryUrlFn={directoryUrlFn}
|
||||
fileDragInfoAtom={fileDragInfoAtom}
|
||||
onContextMenu={handleContextMenuRequest}
|
||||
onOpenFile={onTableOpenFile}
|
||||
/>
|
||||
</div>
|
||||
</DirectoryContentContextMenu>
|
||||
|
||||
@@ -191,46 +225,6 @@ function RouteComponent() {
|
||||
)
|
||||
}
|
||||
|
||||
// MARK: directory table
|
||||
|
||||
function _DirectoryContentTable() {
|
||||
const optimisticDeletedItems = useAtomValue(optimisticDeletedItemsAtom)
|
||||
const setOpenedFile = useSetAtom(openedFileAtom)
|
||||
const setContextMenuTargetItems = useSetAtom(contextMenuTargetItemsAtom)
|
||||
|
||||
const onTableOpenFile = (file: FileInfo) => {
|
||||
setOpenedFile(file)
|
||||
}
|
||||
|
||||
const directoryUrlFn = useCallback(
|
||||
(directory: DirectoryInfo) => `/directories/${directory.id}`,
|
||||
[],
|
||||
)
|
||||
|
||||
const handleContextMenuRequest = (
|
||||
row: Row<DirectoryItem>,
|
||||
table: Table<DirectoryItem>,
|
||||
) => {
|
||||
if (row.getIsSelected()) {
|
||||
setContextMenuTargetItems(
|
||||
table.getSelectedRowModel().rows.map((row) => row.original),
|
||||
)
|
||||
} else {
|
||||
setContextMenuTargetItems([row.original])
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<DirectoryContentTable
|
||||
hiddenItems={optimisticDeletedItems}
|
||||
directoryUrlFn={directoryUrlFn}
|
||||
fileDragInfoAtom={fileDragInfoAtom}
|
||||
onContextMenu={handleContextMenuRequest}
|
||||
onOpenFile={onTableOpenFile}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
// ==================================
|
||||
// MARK: ctx menu
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { useMutation } from "@tanstack/react-query"
|
||||
import { createFileRoute, useNavigate } from "@tanstack/react-router"
|
||||
import { useSetAtom } from "jotai"
|
||||
import { GalleryVerticalEnd } from "lucide-react"
|
||||
import { loginMutation } from "@/auth/api"
|
||||
import { Button } from "@/components/ui/button"
|
||||
@@ -20,7 +19,6 @@ import {
|
||||
} from "@/components/ui/field"
|
||||
import { Input } from "@/components/ui/input"
|
||||
import { cn } from "@/lib/utils"
|
||||
import { currentAccountAtom } from "../account/account"
|
||||
|
||||
export const Route = createFileRoute("/login")({
|
||||
component: RouteComponent,
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
import { mutationOptions, queryOptions, skipToken } from "@tanstack/react-query"
|
||||
import {
|
||||
type InfiniteData,
|
||||
infiniteQueryOptions,
|
||||
mutationOptions,
|
||||
queryOptions,
|
||||
skipToken,
|
||||
} from "@tanstack/react-query"
|
||||
import { type } from "arktype"
|
||||
import { atom } from "jotai"
|
||||
import { atomFamily } from "jotai/utils"
|
||||
@@ -12,6 +18,11 @@ import {
|
||||
FileInfo,
|
||||
} from "./vfs"
|
||||
|
||||
const DirectoryContentResponse = type({
|
||||
items: DirectoryContent,
|
||||
"nextCursor?": "string",
|
||||
})
|
||||
|
||||
/**
|
||||
* This atom derives the file url for a given file.
|
||||
* It is recommended to use {@link useFileUrl} instead of using this atom directly.
|
||||
@@ -58,27 +69,63 @@ export const directoryInfoQueryAtom = atomFamily((directoryId: string) =>
|
||||
}),
|
||||
)
|
||||
|
||||
export const directoryContentQueryAtom = atomFamily((directoryId: string) =>
|
||||
atom((get) => {
|
||||
const account = get(currentAccountAtom)
|
||||
return queryOptions({
|
||||
queryKey: [
|
||||
"accounts",
|
||||
account?.id,
|
||||
"directories",
|
||||
directoryId,
|
||||
"content",
|
||||
],
|
||||
queryFn: account
|
||||
? () =>
|
||||
fetchApi(
|
||||
"GET",
|
||||
`/accounts/${account.id}/directories/${directoryId}/content`,
|
||||
{ returns: DirectoryContent },
|
||||
).then(([_, result]) => result)
|
||||
: skipToken,
|
||||
})
|
||||
}),
|
||||
export const DIRECTORY_CONTENT_ORDER_BY = {
|
||||
name: "name",
|
||||
createdAt: "createdAt",
|
||||
updatedAt: "updatedAt",
|
||||
} as const
|
||||
export type DirectoryContentOrderBy =
|
||||
(typeof DIRECTORY_CONTENT_ORDER_BY)[keyof typeof DIRECTORY_CONTENT_ORDER_BY]
|
||||
|
||||
export const DIRECTORY_CONTENT_ORDER_DIRECTION = {
|
||||
asc: "asc",
|
||||
desc: "desc",
|
||||
} as const
|
||||
type DirectoryContentOrderDirection =
|
||||
(typeof DIRECTORY_CONTENT_ORDER_DIRECTION)[keyof typeof DIRECTORY_CONTENT_ORDER_DIRECTION]
|
||||
|
||||
type DirectoryContentQueryParams = {
|
||||
directoryId: string
|
||||
orderBy: DirectoryContentOrderBy
|
||||
direction: DirectoryContentOrderDirection
|
||||
limit: number
|
||||
}
|
||||
|
||||
const directoryContentQueryKey = (
|
||||
accountId: string | undefined,
|
||||
directoryId: string,
|
||||
) => ["accounts", accountId, "directories", directoryId, "content"]
|
||||
export const directoryContentQueryAtom = atomFamily(
|
||||
({ directoryId, orderBy, direction, limit }: DirectoryContentQueryParams) =>
|
||||
atom((get) => {
|
||||
const account = get(currentAccountAtom)
|
||||
return infiniteQueryOptions({
|
||||
queryKey: directoryContentQueryKey(account?.id, directoryId),
|
||||
initialPageParam: {
|
||||
orderBy,
|
||||
direction,
|
||||
limit,
|
||||
cursor: "",
|
||||
},
|
||||
queryFn: ({ pageParam }) =>
|
||||
account
|
||||
? fetchApi(
|
||||
"GET",
|
||||
`/accounts/${account.id}/directories/${directoryId}/content?orderBy=${pageParam.orderBy}&dir=${pageParam.direction}&limit=${pageParam.limit}${pageParam.cursor ? `&cursor=${pageParam.cursor}` : ""}`,
|
||||
{ returns: DirectoryContentResponse },
|
||||
).then(([_, result]) => result)
|
||||
: Promise.reject(new Error("No account selected")),
|
||||
getNextPageParam: (lastPage, _pages, lastPageParam) => ({
|
||||
...lastPageParam,
|
||||
cursor: lastPage.nextCursor ?? "",
|
||||
}),
|
||||
})
|
||||
}),
|
||||
(paramsA, paramsB) =>
|
||||
paramsA.directoryId === paramsB.directoryId &&
|
||||
paramsA.orderBy === paramsB.orderBy &&
|
||||
paramsA.direction === paramsB.direction &&
|
||||
paramsA.limit === paramsB.limit,
|
||||
)
|
||||
|
||||
export const createDirectoryMutationAtom = atom((get) => {
|
||||
@@ -103,13 +150,6 @@ export const createDirectoryMutationAtom = atom((get) => {
|
||||
get(directoryInfoQueryAtom(data.id)).queryKey,
|
||||
data,
|
||||
)
|
||||
const parent = data.path.at(-2)
|
||||
if (parent) {
|
||||
client.setQueryData(
|
||||
get(directoryContentQueryAtom(parent.id)).queryKey,
|
||||
(prev) => (prev ? [...prev, data] : [data]),
|
||||
)
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -157,13 +197,18 @@ export const moveDirectoryItemsMutationAtom = atom((get) =>
|
||||
return result
|
||||
},
|
||||
onMutate: ({ items }, { client }) => {
|
||||
const account = get(currentAccountAtom)
|
||||
if (!account) {
|
||||
return null
|
||||
}
|
||||
|
||||
const movedItems = new Map<string, Set<string>>()
|
||||
|
||||
for (const item of items) {
|
||||
if (item.parentId) {
|
||||
const s = movedItems.get(item.parentId)
|
||||
if (!s) {
|
||||
movedItems.set(item.parentId, new Set(s))
|
||||
movedItems.set(item.parentId, new Set([item.id]))
|
||||
} else {
|
||||
s.add(item.id)
|
||||
}
|
||||
@@ -172,45 +217,67 @@ export const moveDirectoryItemsMutationAtom = atom((get) =>
|
||||
|
||||
const prevDirContentMap = new Map<
|
||||
string,
|
||||
DirectoryItem[] | undefined
|
||||
InfiniteData<typeof DirectoryContentResponse.infer> | undefined
|
||||
>()
|
||||
|
||||
movedItems.forEach((s, parentId) => {
|
||||
const query = get(directoryContentQueryAtom(parentId))
|
||||
const prevDirContent = client.getQueryData(query.queryKey)
|
||||
client.setQueryData(
|
||||
query.queryKey,
|
||||
(prev) => prev?.filter((it) => !s.has(it.id)) ?? prev,
|
||||
)
|
||||
const key = directoryContentQueryKey(account.id, parentId)
|
||||
const prevDirContent =
|
||||
client.getQueryData<
|
||||
InfiniteData<typeof DirectoryContentResponse.infer>
|
||||
>(key)
|
||||
client.setQueryData<
|
||||
InfiniteData<typeof DirectoryContentResponse.infer>
|
||||
>(key, (prev) => {
|
||||
if (!prev) return prev
|
||||
return {
|
||||
...prev,
|
||||
pages: prev.pages.map((page) => ({
|
||||
...page,
|
||||
items: page.items.filter((it) => !s.has(it.id)),
|
||||
})),
|
||||
}
|
||||
})
|
||||
prevDirContentMap.set(parentId, prevDirContent)
|
||||
})
|
||||
|
||||
return { prevDirContentMap }
|
||||
},
|
||||
onSuccess: (_data, { targetDirectory, items }, _result, { client }) => {
|
||||
const account = get(currentAccountAtom)
|
||||
if (!account) return
|
||||
|
||||
const dirId =
|
||||
typeof targetDirectory === "string"
|
||||
? targetDirectory
|
||||
: targetDirectory.id
|
||||
client.invalidateQueries(get(directoryContentQueryAtom(dirId)))
|
||||
client.invalidateQueries({
|
||||
queryKey: directoryContentQueryKey(account.id, dirId),
|
||||
})
|
||||
for (const item of items) {
|
||||
if (item.parentId) {
|
||||
client.invalidateQueries(
|
||||
get(directoryContentQueryAtom(item.parentId)),
|
||||
)
|
||||
client.invalidateQueries({
|
||||
queryKey: directoryContentQueryKey(
|
||||
account.id,
|
||||
item.parentId,
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
onError: (_error, _vars, context, { client }) => {
|
||||
if (context) {
|
||||
context.prevDirContentMap.forEach(
|
||||
(prevDirContent, parentId) => {
|
||||
client.setQueryData(
|
||||
get(directoryContentQueryAtom(parentId)).queryKey,
|
||||
prevDirContent,
|
||||
)
|
||||
},
|
||||
)
|
||||
const account = get(currentAccountAtom)
|
||||
if (account) {
|
||||
context.prevDirContentMap.forEach(
|
||||
(prevDirContent, parentId) => {
|
||||
client.setQueryData(
|
||||
directoryContentQueryKey(account.id, parentId),
|
||||
prevDirContent,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
},
|
||||
}),
|
||||
@@ -277,12 +344,17 @@ export const moveToTrashMutationAtom = atom((get) =>
|
||||
return [...deletedFiles, ...deletedDirectories]
|
||||
},
|
||||
onMutate: (items, { client }) => {
|
||||
const account = get(currentAccountAtom)
|
||||
if (!account) {
|
||||
return null
|
||||
}
|
||||
|
||||
const trashedItems = new Map<string, Set<string>>()
|
||||
for (const item of items) {
|
||||
if (item.parentId) {
|
||||
const s = trashedItems.get(item.parentId)
|
||||
if (!s) {
|
||||
trashedItems.set(item.parentId, new Set(s))
|
||||
trashedItems.set(item.parentId, new Set([item.id]))
|
||||
} else {
|
||||
s.add(item.id)
|
||||
}
|
||||
@@ -291,38 +363,58 @@ export const moveToTrashMutationAtom = atom((get) =>
|
||||
|
||||
const prevDirContentMap = new Map<
|
||||
string,
|
||||
DirectoryItem[] | undefined
|
||||
InfiniteData<typeof DirectoryContentResponse.infer> | undefined
|
||||
>()
|
||||
trashedItems.forEach((s, parentId) => {
|
||||
const query = get(directoryContentQueryAtom(parentId))
|
||||
const prevDirContent = client.getQueryData(query.queryKey)
|
||||
client.setQueryData(
|
||||
query.queryKey,
|
||||
(prev) => prev?.filter((it) => !s.has(it.id)) ?? prev,
|
||||
)
|
||||
const key = directoryContentQueryKey(account.id, parentId)
|
||||
const prevDirContent =
|
||||
client.getQueryData<
|
||||
InfiniteData<typeof DirectoryContentResponse.infer>
|
||||
>(key)
|
||||
client.setQueryData<
|
||||
InfiniteData<typeof DirectoryContentResponse.infer>
|
||||
>(key, (prev) => {
|
||||
if (!prev) return prev
|
||||
return {
|
||||
...prev,
|
||||
pages: prev.pages.map((page) => ({
|
||||
...page,
|
||||
items: page.items.filter((it) => !s.has(it.id)),
|
||||
})),
|
||||
}
|
||||
})
|
||||
prevDirContentMap.set(parentId, prevDirContent)
|
||||
})
|
||||
return { prevDirContentMap }
|
||||
},
|
||||
onSuccess: (_data, items, _result, { client }) => {
|
||||
for (const item of items) {
|
||||
if (item.parentId) {
|
||||
client.invalidateQueries(
|
||||
get(directoryContentQueryAtom(item.parentId)),
|
||||
)
|
||||
const account = get(currentAccountAtom)
|
||||
if (account) {
|
||||
for (const item of items) {
|
||||
if (item.parentId) {
|
||||
client.invalidateQueries({
|
||||
queryKey: directoryContentQueryKey(
|
||||
account.id,
|
||||
item.parentId,
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
onError: (_error, items, context, { client }) => {
|
||||
onError: (_error, _items, context, { client }) => {
|
||||
if (context) {
|
||||
context.prevDirContentMap.forEach(
|
||||
(prevDirContent, parentId) => {
|
||||
client.setQueryData(
|
||||
get(directoryContentQueryAtom(parentId)).queryKey,
|
||||
prevDirContent,
|
||||
)
|
||||
},
|
||||
)
|
||||
const account = get(currentAccountAtom)
|
||||
if (account) {
|
||||
context.prevDirContentMap.forEach(
|
||||
(prevDirContent, parentId) => {
|
||||
client.setQueryData(
|
||||
directoryContentQueryKey(account.id, parentId),
|
||||
prevDirContent,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
},
|
||||
}),
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
import path from "node:path"
|
||||
import tailwindcss from "@tailwindcss/vite"
|
||||
import { TanStackRouterVite } from "@tanstack/router-plugin/vite"
|
||||
import { tanstackRouter } from "@tanstack/router-plugin/vite"
|
||||
import react from "@vitejs/plugin-react"
|
||||
import { defineConfig } from "vite"
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [TanStackRouterVite(), react(), tailwindcss()],
|
||||
plugins: [
|
||||
tanstackRouter({ target: "react", autoCodeSplitting: true }),
|
||||
react(),
|
||||
tailwindcss(),
|
||||
],
|
||||
resolve: {
|
||||
alias: {
|
||||
"@": path.resolve(__dirname, "./src"),
|
||||
|
||||
Reference in New Issue
Block a user