mirror of
https://github.com/kennethnym/aris.git
synced 2026-02-02 13:11:17 +00:00
Compare commits
34 Commits
552629bcdb
...
feat/auth
| Author | SHA1 | Date | |
|---|---|---|---|
| c10c8a553a | |||
| fffcccc227 | |||
| b744af9c51 | |||
| 13300fb6a6 | |||
| 66ee44b470 | |||
| 1893c516f3 | |||
| 181160b018 | |||
| 559f82ce96 | |||
|
5e040470c7
|
|||
| c2f2aeec1d | |||
|
75ce06d39b
|
|||
| a7b6232058 | |||
| 5df3dbd1b5 | |||
| b7c7bcfc7c | |||
|
9a47dda767
|
|||
|
286a933d1e
|
|||
|
1d9de2851a
|
|||
| 80192c6dc1 | |||
|
0eb77b73c6
|
|||
| dfce846c9a | |||
|
b73e603c90
|
|||
|
037589cf4f
|
|||
|
3c16dd4275
|
|||
| 2eff7b49dc | |||
|
d9405a239a
|
|||
| 1ed335f783 | |||
| 6ecf080177 | |||
|
331a2596fa
|
|||
| 482c1c8b0f | |||
| c90bef0330 | |||
| de813d5b4a | |||
| 20559b92ad | |||
| c2f0b03924 | |||
| 8ec8b9a13e |
807
.claude/skills/vercel-react-best-practices/AGENTS.md
Normal file
807
.claude/skills/vercel-react-best-practices/AGENTS.md
Normal file
@@ -0,0 +1,807 @@
|
||||
# React Best Practices
|
||||
|
||||
**Version 1.0.0**
|
||||
|
||||
> **Note:**
|
||||
> This document is for agents and LLMs to follow when maintaining,
|
||||
> generating, or refactoring React codebases. Humans may also find it useful.
|
||||
|
||||
---
|
||||
|
||||
## Abstract
|
||||
|
||||
Performance optimization guide for React applications, designed for AI agents and LLMs. Contains rules across 7 categories, prioritized by impact from critical (eliminating waterfalls, reducing bundle size) to incremental (advanced patterns).
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Eliminating Waterfalls](#1-eliminating-waterfalls) — **CRITICAL**
|
||||
2. [Bundle Size Optimization](#2-bundle-size-optimization) — **CRITICAL**
|
||||
3. [Client-Side Data Fetching](#3-client-side-data-fetching) — **MEDIUM-HIGH**
|
||||
4. [Re-render Optimization](#4-re-render-optimization) — **MEDIUM**
|
||||
5. [Rendering Performance](#5-rendering-performance) — **MEDIUM**
|
||||
6. [JavaScript Performance](#6-javascript-performance) — **LOW-MEDIUM**
|
||||
7. [Advanced Patterns](#7-advanced-patterns) — **LOW**
|
||||
|
||||
---
|
||||
|
||||
## 1. Eliminating Waterfalls
|
||||
|
||||
**Impact: CRITICAL**
|
||||
|
||||
Waterfalls are the #1 performance killer. Each sequential await adds full network latency.
|
||||
|
||||
### 1.1 Defer Await Until Needed
|
||||
|
||||
**Impact: HIGH (avoids blocking unused code paths)**
|
||||
|
||||
Move `await` operations into the branches where they're actually used.
|
||||
|
||||
**Incorrect: blocks both branches**
|
||||
|
||||
```typescript
|
||||
async function handleRequest(userId: string, skipProcessing: boolean) {
|
||||
const userData = await fetchUserData(userId)
|
||||
|
||||
if (skipProcessing) {
|
||||
return { skipped: true }
|
||||
}
|
||||
|
||||
return processUserData(userData)
|
||||
}
|
||||
```
|
||||
|
||||
**Correct: only blocks when needed**
|
||||
|
||||
```typescript
|
||||
async function handleRequest(userId: string, skipProcessing: boolean) {
|
||||
if (skipProcessing) {
|
||||
return { skipped: true }
|
||||
}
|
||||
|
||||
const userData = await fetchUserData(userId)
|
||||
return processUserData(userData)
|
||||
}
|
||||
```
|
||||
|
||||
### 1.2 Dependency-Based Parallelization
|
||||
|
||||
**Impact: CRITICAL (2-10× improvement)**
|
||||
|
||||
For operations with partial dependencies, use `better-all` to maximize parallelism.
|
||||
|
||||
**Incorrect: profile waits for config unnecessarily**
|
||||
|
||||
```typescript
|
||||
const [user, config] = await Promise.all([
|
||||
fetchUser(),
|
||||
fetchConfig()
|
||||
])
|
||||
const profile = await fetchProfile(user.id)
|
||||
```
|
||||
|
||||
**Correct: config and profile run in parallel**
|
||||
|
||||
```typescript
|
||||
import { all } from 'better-all'
|
||||
|
||||
const { user, config, profile } = await all({
|
||||
async user() { return fetchUser() },
|
||||
async config() { return fetchConfig() },
|
||||
async profile() {
|
||||
return fetchProfile((await this.$.user).id)
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
### 1.3 Promise.all() for Independent Operations
|
||||
|
||||
**Impact: CRITICAL (2-10× improvement)**
|
||||
|
||||
When async operations have no interdependencies, execute them concurrently.
|
||||
|
||||
**Incorrect: sequential execution, 3 round trips**
|
||||
|
||||
```typescript
|
||||
const user = await fetchUser()
|
||||
const posts = await fetchPosts()
|
||||
const comments = await fetchComments()
|
||||
```
|
||||
|
||||
**Correct: parallel execution, 1 round trip**
|
||||
|
||||
```typescript
|
||||
const [user, posts, comments] = await Promise.all([
|
||||
fetchUser(),
|
||||
fetchPosts(),
|
||||
fetchComments()
|
||||
])
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Bundle Size Optimization
|
||||
|
||||
**Impact: CRITICAL**
|
||||
|
||||
Reducing initial bundle size improves Time to Interactive and Largest Contentful Paint.
|
||||
|
||||
### 2.1 Avoid Barrel File Imports
|
||||
|
||||
**Impact: CRITICAL (200-800ms import cost, slow builds)**
|
||||
|
||||
Import directly from source files instead of barrel files.
|
||||
|
||||
**Incorrect: imports entire library**
|
||||
|
||||
```tsx
|
||||
import { Check, X, Menu } from 'lucide-react'
|
||||
// Loads 1,583 modules
|
||||
```
|
||||
|
||||
**Correct: imports only what you need**
|
||||
|
||||
```tsx
|
||||
import Check from 'lucide-react/dist/esm/icons/check'
|
||||
import X from 'lucide-react/dist/esm/icons/x'
|
||||
import Menu from 'lucide-react/dist/esm/icons/menu'
|
||||
// Loads only 3 modules
|
||||
```
|
||||
|
||||
Libraries commonly affected: `lucide-react`, `@mui/material`, `@mui/icons-material`, `@tabler/icons-react`, `react-icons`, `@headlessui/react`, `@radix-ui/react-*`, `lodash`, `ramda`, `date-fns`, `rxjs`, `react-use`.
|
||||
|
||||
### 2.2 Conditional Module Loading
|
||||
|
||||
**Impact: HIGH (loads large data only when needed)**
|
||||
|
||||
Load large data or modules only when a feature is activated.
|
||||
|
||||
```tsx
|
||||
function AnimationPlayer({ enabled, setEnabled }) {
|
||||
const [frames, setFrames] = useState(null)
|
||||
|
||||
useEffect(() => {
|
||||
if (enabled && !frames) {
|
||||
import('./animation-frames.js')
|
||||
.then(mod => setFrames(mod.frames))
|
||||
.catch(() => setEnabled(false))
|
||||
}
|
||||
}, [enabled, frames, setEnabled])
|
||||
|
||||
if (!frames) return <Skeleton />
|
||||
return <Canvas frames={frames} />
|
||||
}
|
||||
```
|
||||
|
||||
### 2.3 Preload Based on User Intent
|
||||
|
||||
**Impact: MEDIUM (reduces perceived latency)**
|
||||
|
||||
Preload heavy bundles before they're needed.
|
||||
|
||||
```tsx
|
||||
function EditorButton({ onClick }) {
|
||||
const preload = () => {
|
||||
void import('./monaco-editor')
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
onMouseEnter={preload}
|
||||
onFocus={preload}
|
||||
onClick={onClick}
|
||||
>
|
||||
Open Editor
|
||||
</button>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. Client-Side Data Fetching
|
||||
|
||||
**Impact: MEDIUM-HIGH**
|
||||
|
||||
### 3.1 Deduplicate Global Event Listeners
|
||||
|
||||
**Impact: MEDIUM (prevents memory leaks and duplicate handlers)**
|
||||
|
||||
Use a singleton pattern for global event listeners.
|
||||
|
||||
**Incorrect: multiple listeners**
|
||||
|
||||
```tsx
|
||||
function useWindowResize(callback) {
|
||||
useEffect(() => {
|
||||
window.addEventListener('resize', callback)
|
||||
return () => window.removeEventListener('resize', callback)
|
||||
}, [callback])
|
||||
}
|
||||
```
|
||||
|
||||
**Correct: shared listener**
|
||||
|
||||
```tsx
|
||||
const listeners = new Set()
|
||||
let isListening = false
|
||||
|
||||
function useWindowResize(callback) {
|
||||
useEffect(() => {
|
||||
listeners.add(callback)
|
||||
|
||||
if (!isListening) {
|
||||
isListening = true
|
||||
window.addEventListener('resize', (e) => {
|
||||
listeners.forEach(fn => fn(e))
|
||||
})
|
||||
}
|
||||
|
||||
return () => listeners.delete(callback)
|
||||
}, [callback])
|
||||
}
|
||||
```
|
||||
|
||||
### 3.2 Use Passive Event Listeners
|
||||
|
||||
**Impact: MEDIUM (improves scroll performance)**
|
||||
|
||||
Use passive listeners for scroll and touch events.
|
||||
|
||||
```tsx
|
||||
useEffect(() => {
|
||||
const handler = (e) => { /* handle scroll */ }
|
||||
window.addEventListener('scroll', handler, { passive: true })
|
||||
return () => window.removeEventListener('scroll', handler)
|
||||
}, [])
|
||||
```
|
||||
|
||||
### 3.3 Use SWR for Automatic Deduplication
|
||||
|
||||
**Impact: HIGH (eliminates duplicate requests)**
|
||||
|
||||
SWR automatically deduplicates requests to the same key.
|
||||
|
||||
```tsx
|
||||
import useSWR from 'swr'
|
||||
|
||||
function UserProfile({ userId }) {
|
||||
const { data } = useSWR(`/api/users/${userId}`, fetcher)
|
||||
return <div>{data?.name}</div>
|
||||
}
|
||||
|
||||
// Multiple components using the same key = single request
|
||||
```
|
||||
|
||||
### 3.4 Version and Minimize localStorage Data
|
||||
|
||||
**Impact: MEDIUM (prevents data corruption)**
|
||||
|
||||
Use schema versioning for localStorage.
|
||||
|
||||
```typescript
|
||||
const STORAGE_VERSION = 2
|
||||
|
||||
interface StoredData {
|
||||
version: number
|
||||
data: UserPreferences
|
||||
}
|
||||
|
||||
function loadPreferences(): UserPreferences {
|
||||
const raw = localStorage.getItem('prefs')
|
||||
if (!raw) return defaultPreferences
|
||||
|
||||
const stored: StoredData = JSON.parse(raw)
|
||||
if (stored.version !== STORAGE_VERSION) {
|
||||
return migrate(stored)
|
||||
}
|
||||
return stored.data
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 4. Re-render Optimization
|
||||
|
||||
**Impact: MEDIUM**
|
||||
|
||||
### 4.1 Defer State Reads to Usage Point
|
||||
|
||||
**Impact: MEDIUM (avoids unnecessary re-renders)**
|
||||
|
||||
Don't subscribe to state that's only used in callbacks.
|
||||
|
||||
**Incorrect: re-renders on every count change**
|
||||
|
||||
```tsx
|
||||
function Counter() {
|
||||
const count = useStore(state => state.count)
|
||||
const increment = useStore(state => state.increment)
|
||||
|
||||
return <button onClick={() => increment()}>+</button>
|
||||
}
|
||||
```
|
||||
|
||||
**Correct: no re-renders from count changes**
|
||||
|
||||
```tsx
|
||||
function Counter() {
|
||||
const increment = useStore(state => state.increment)
|
||||
|
||||
return <button onClick={() => increment()}>+</button>
|
||||
}
|
||||
```
|
||||
|
||||
### 4.2 Extract to Memoized Components
|
||||
|
||||
**Impact: MEDIUM (isolates expensive renders)**
|
||||
|
||||
Extract expensive computations into memoized child components.
|
||||
|
||||
```tsx
|
||||
const ExpensiveList = memo(function ExpensiveList({ items }) {
|
||||
return items.map(item => <ExpensiveItem key={item.id} item={item} />)
|
||||
})
|
||||
|
||||
function Parent() {
|
||||
const [filter, setFilter] = useState('')
|
||||
const items = useItems()
|
||||
|
||||
return (
|
||||
<>
|
||||
<input value={filter} onChange={e => setFilter(e.target.value)} />
|
||||
<ExpensiveList items={items} />
|
||||
</>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
### 4.3 Narrow Effect Dependencies
|
||||
|
||||
**Impact: MEDIUM (reduces effect runs)**
|
||||
|
||||
Use primitive dependencies instead of objects.
|
||||
|
||||
**Incorrect: runs on every render**
|
||||
|
||||
```tsx
|
||||
useEffect(() => {
|
||||
fetchData(options)
|
||||
}, [options]) // Object reference changes every render
|
||||
```
|
||||
|
||||
**Correct: runs only when values change**
|
||||
|
||||
```tsx
|
||||
useEffect(() => {
|
||||
fetchData({ page, limit })
|
||||
}, [page, limit])
|
||||
```
|
||||
|
||||
### 4.4 Subscribe to Derived State
|
||||
|
||||
**Impact: MEDIUM (reduces re-renders)**
|
||||
|
||||
Subscribe to derived booleans instead of raw values.
|
||||
|
||||
**Incorrect: re-renders on every count change**
|
||||
|
||||
```tsx
|
||||
function Badge() {
|
||||
const count = useStore(state => state.notifications.length)
|
||||
return count > 0 ? <span>New</span> : null
|
||||
}
|
||||
```
|
||||
|
||||
**Correct: re-renders only when visibility changes**
|
||||
|
||||
```tsx
|
||||
function Badge() {
|
||||
const hasNotifications = useStore(state => state.notifications.length > 0)
|
||||
return hasNotifications ? <span>New</span> : null
|
||||
}
|
||||
```
|
||||
|
||||
### 4.5 Use Functional setState Updates
|
||||
|
||||
**Impact: MEDIUM (stable callback references)**
|
||||
|
||||
Use functional updates to avoid dependency on current state.
|
||||
|
||||
```tsx
|
||||
// Incorrect: callback changes when count changes
|
||||
const increment = useCallback(() => {
|
||||
setCount(count + 1)
|
||||
}, [count])
|
||||
|
||||
// Correct: stable callback reference
|
||||
const increment = useCallback(() => {
|
||||
setCount(c => c + 1)
|
||||
}, [])
|
||||
```
|
||||
|
||||
### 4.6 Use Lazy State Initialization
|
||||
|
||||
**Impact: LOW-MEDIUM (avoids expensive initial computation)**
|
||||
|
||||
Pass a function to useState for expensive initial values.
|
||||
|
||||
```tsx
|
||||
// Incorrect: parses on every render
|
||||
const [data, setData] = useState(JSON.parse(localStorage.getItem('data')))
|
||||
|
||||
// Correct: parses only once
|
||||
const [data, setData] = useState(() => JSON.parse(localStorage.getItem('data')))
|
||||
```
|
||||
|
||||
### 4.7 Use Transitions for Non-Urgent Updates
|
||||
|
||||
**Impact: MEDIUM (keeps UI responsive)**
|
||||
|
||||
Use startTransition for updates that can be deferred.
|
||||
|
||||
```tsx
|
||||
import { startTransition } from 'react'
|
||||
|
||||
function SearchResults() {
|
||||
const [query, setQuery] = useState('')
|
||||
const [results, setResults] = useState([])
|
||||
|
||||
const handleChange = (e) => {
|
||||
setQuery(e.target.value) // Urgent: update input immediately
|
||||
|
||||
startTransition(() => {
|
||||
setResults(search(e.target.value)) // Non-urgent: can be interrupted
|
||||
})
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<input value={query} onChange={handleChange} />
|
||||
<ResultsList results={results} />
|
||||
</>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. Rendering Performance
|
||||
|
||||
**Impact: MEDIUM**
|
||||
|
||||
### 5.1 Animate SVG Wrapper Instead of SVG Element
|
||||
|
||||
**Impact: MEDIUM (avoids SVG re-parsing)**
|
||||
|
||||
Wrap SVGs in a div and animate the wrapper.
|
||||
|
||||
```tsx
|
||||
// Incorrect: triggers SVG re-parsing
|
||||
<motion.svg animate={{ scale: 1.2 }}>...</motion.svg>
|
||||
|
||||
// Correct: animates wrapper only
|
||||
<motion.div animate={{ scale: 1.2 }}>
|
||||
<svg>...</svg>
|
||||
</motion.div>
|
||||
```
|
||||
|
||||
### 5.2 CSS content-visibility for Long Lists
|
||||
|
||||
**Impact: HIGH (skips off-screen rendering)**
|
||||
|
||||
Use content-visibility to skip rendering off-screen items.
|
||||
|
||||
```css
|
||||
.list-item {
|
||||
content-visibility: auto;
|
||||
contain-intrinsic-size: 0 50px;
|
||||
}
|
||||
```
|
||||
|
||||
### 5.3 Hoist Static JSX Elements
|
||||
|
||||
**Impact: LOW-MEDIUM (avoids recreation)**
|
||||
|
||||
Extract static JSX outside components.
|
||||
|
||||
```tsx
|
||||
const staticIcon = <Icon name="check" />
|
||||
|
||||
function ListItem({ label }) {
|
||||
return (
|
||||
<div>
|
||||
{staticIcon}
|
||||
<span>{label}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
### 5.4 Optimize SVG Precision
|
||||
|
||||
**Impact: LOW (reduces SVG size)**
|
||||
|
||||
Reduce coordinate precision in SVGs.
|
||||
|
||||
```tsx
|
||||
// Before: 847 bytes
|
||||
<path d="M12.7071067811865 5.29289321881345..." />
|
||||
|
||||
// After: 324 bytes
|
||||
<path d="M12.71 5.29..." />
|
||||
```
|
||||
|
||||
### 5.5 Use Explicit Conditional Rendering
|
||||
|
||||
**Impact: LOW (prevents rendering bugs)**
|
||||
|
||||
Use ternary instead of && for conditionals.
|
||||
|
||||
```tsx
|
||||
// Incorrect: renders "0" when count is 0
|
||||
{count && <Badge count={count} />}
|
||||
|
||||
// Correct: renders nothing when count is 0
|
||||
{count > 0 ? <Badge count={count} /> : null}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 6. JavaScript Performance
|
||||
|
||||
**Impact: LOW-MEDIUM**
|
||||
|
||||
### 6.1 Batch DOM CSS Changes
|
||||
|
||||
**Impact: MEDIUM (reduces reflows)**
|
||||
|
||||
Group CSS changes via classes or cssText.
|
||||
|
||||
```typescript
|
||||
// Incorrect: 3 reflows
|
||||
element.style.width = '100px'
|
||||
element.style.height = '100px'
|
||||
element.style.margin = '10px'
|
||||
|
||||
// Correct: 1 reflow
|
||||
element.style.cssText = 'width: 100px; height: 100px; margin: 10px;'
|
||||
```
|
||||
|
||||
### 6.2 Build Index Maps for Repeated Lookups
|
||||
|
||||
**Impact: HIGH for large datasets**
|
||||
|
||||
Build a Map for O(1) lookups instead of O(n) array searches.
|
||||
|
||||
```typescript
|
||||
// Incorrect: O(n) for each lookup
|
||||
users.find(u => u.id === targetId)
|
||||
|
||||
// Correct: O(1) lookup
|
||||
const userMap = new Map(users.map(u => [u.id, u]))
|
||||
userMap.get(targetId)
|
||||
```
|
||||
|
||||
### 6.3 Cache Property Access in Loops
|
||||
|
||||
**Impact: LOW-MEDIUM**
|
||||
|
||||
Cache object properties accessed in loops.
|
||||
|
||||
```typescript
|
||||
// Incorrect
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
process(items[i])
|
||||
}
|
||||
|
||||
// Correct
|
||||
const len = items.length
|
||||
for (let i = 0; i < len; i++) {
|
||||
process(items[i])
|
||||
}
|
||||
```
|
||||
|
||||
### 6.4 Cache Repeated Function Calls
|
||||
|
||||
**Impact: MEDIUM**
|
||||
|
||||
Cache expensive function results.
|
||||
|
||||
```typescript
|
||||
const cache = new Map()
|
||||
|
||||
function expensiveComputation(input) {
|
||||
if (cache.has(input)) return cache.get(input)
|
||||
const result = /* expensive work */
|
||||
cache.set(input, result)
|
||||
return result
|
||||
}
|
||||
```
|
||||
|
||||
### 6.5 Cache Storage API Calls
|
||||
|
||||
**Impact: MEDIUM**
|
||||
|
||||
Cache localStorage/sessionStorage reads.
|
||||
|
||||
```typescript
|
||||
let cachedTheme = null
|
||||
|
||||
function getTheme() {
|
||||
if (cachedTheme === null) {
|
||||
cachedTheme = localStorage.getItem('theme') || 'light'
|
||||
}
|
||||
return cachedTheme
|
||||
}
|
||||
```
|
||||
|
||||
### 6.6 Combine Multiple Array Iterations
|
||||
|
||||
**Impact: LOW-MEDIUM**
|
||||
|
||||
Combine filter/map into a single loop.
|
||||
|
||||
```typescript
|
||||
// Incorrect: 2 iterations
|
||||
const result = items
|
||||
.filter(item => item.active)
|
||||
.map(item => item.value)
|
||||
|
||||
// Correct: 1 iteration
|
||||
const result = []
|
||||
for (const item of items) {
|
||||
if (item.active) result.push(item.value)
|
||||
}
|
||||
```
|
||||
|
||||
### 6.7 Early Length Check for Array Comparisons
|
||||
|
||||
**Impact: LOW**
|
||||
|
||||
Check array length before expensive comparisons.
|
||||
|
||||
```typescript
|
||||
function arraysEqual(a, b) {
|
||||
if (a.length !== b.length) return false
|
||||
return a.every((val, i) => val === b[i])
|
||||
}
|
||||
```
|
||||
|
||||
### 6.8 Early Return from Functions
|
||||
|
||||
**Impact: LOW**
|
||||
|
||||
Return early to avoid unnecessary work.
|
||||
|
||||
```typescript
|
||||
function processUser(user) {
|
||||
if (!user) return null
|
||||
if (!user.active) return null
|
||||
|
||||
// Process active user
|
||||
}
|
||||
```
|
||||
|
||||
### 6.9 Hoist RegExp Creation
|
||||
|
||||
**Impact: LOW-MEDIUM**
|
||||
|
||||
Create RegExp outside loops.
|
||||
|
||||
```typescript
|
||||
// Incorrect: creates regex on each iteration
|
||||
items.forEach(item => {
|
||||
if (/pattern/.test(item)) { /* ... */ }
|
||||
})
|
||||
|
||||
// Correct: reuses regex
|
||||
const pattern = /pattern/
|
||||
items.forEach(item => {
|
||||
if (pattern.test(item)) { /* ... */ }
|
||||
})
|
||||
```
|
||||
|
||||
### 6.10 Use Loop for Min/Max Instead of Sort
|
||||
|
||||
**Impact: MEDIUM for large arrays**
|
||||
|
||||
Use a loop instead of sorting to find min/max.
|
||||
|
||||
```typescript
|
||||
// Incorrect: O(n log n)
|
||||
const max = items.sort((a, b) => b - a)[0]
|
||||
|
||||
// Correct: O(n)
|
||||
const max = Math.max(...items)
|
||||
// Or for very large arrays:
|
||||
let max = items[0]
|
||||
for (let i = 1; i < items.length; i++) {
|
||||
if (items[i] > max) max = items[i]
|
||||
}
|
||||
```
|
||||
|
||||
### 6.11 Use Set/Map for O(1) Lookups
|
||||
|
||||
**Impact: HIGH for repeated lookups**
|
||||
|
||||
Use Set for membership checks, Map for key-value lookups.
|
||||
|
||||
```typescript
|
||||
// Incorrect: O(n)
|
||||
const isValid = validIds.includes(id)
|
||||
|
||||
// Correct: O(1)
|
||||
const validIdSet = new Set(validIds)
|
||||
const isValid = validIdSet.has(id)
|
||||
```
|
||||
|
||||
### 6.12 Use toSorted() for Immutability
|
||||
|
||||
**Impact: LOW**
|
||||
|
||||
Use toSorted() instead of sort() to avoid mutation.
|
||||
|
||||
```typescript
|
||||
// Mutates original
|
||||
const sorted = items.sort((a, b) => a - b)
|
||||
|
||||
// Returns new array
|
||||
const sorted = items.toSorted((a, b) => a - b)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 7. Advanced Patterns
|
||||
|
||||
**Impact: LOW**
|
||||
|
||||
### 7.1 Store Event Handlers in Refs
|
||||
|
||||
**Impact: LOW (avoids effect re-runs)**
|
||||
|
||||
Store handlers in refs to avoid effect dependencies.
|
||||
|
||||
```tsx
|
||||
function useEventListener(event, handler) {
|
||||
const handlerRef = useRef(handler)
|
||||
handlerRef.current = handler
|
||||
|
||||
useEffect(() => {
|
||||
const listener = (e) => handlerRef.current(e)
|
||||
window.addEventListener(event, listener)
|
||||
return () => window.removeEventListener(event, listener)
|
||||
}, [event]) // handler not in deps
|
||||
}
|
||||
```
|
||||
|
||||
### 7.2 useLatest for Stable Callback Refs
|
||||
|
||||
**Impact: LOW**
|
||||
|
||||
Create a useLatest hook for stable references.
|
||||
|
||||
```tsx
|
||||
function useLatest(value) {
|
||||
const ref = useRef(value)
|
||||
ref.current = value
|
||||
return ref
|
||||
}
|
||||
|
||||
function useInterval(callback, delay) {
|
||||
const callbackRef = useLatest(callback)
|
||||
|
||||
useEffect(() => {
|
||||
const id = setInterval(() => callbackRef.current(), delay)
|
||||
return () => clearInterval(id)
|
||||
}, [delay])
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## References
|
||||
|
||||
1. [https://github.com/shuding/better-all](https://github.com/shuding/better-all)
|
||||
2. [https://vercel.com/blog/how-we-optimized-package-imports-in-next-js](https://vercel.com/blog/how-we-optimized-package-imports-in-next-js)
|
||||
111
.claude/skills/vercel-react-best-practices/SKILL.md
Normal file
111
.claude/skills/vercel-react-best-practices/SKILL.md
Normal file
@@ -0,0 +1,111 @@
|
||||
---
|
||||
name: react-best-practices
|
||||
description: React performance optimization guidelines. Use when writing, reviewing, or refactoring React code to ensure optimal performance patterns. Triggers on tasks involving React components, data fetching, bundle optimization, or performance improvements.
|
||||
license: MIT
|
||||
metadata:
|
||||
author: vercel
|
||||
version: "1.0.0"
|
||||
---
|
||||
|
||||
# React Best Practices
|
||||
|
||||
Performance optimization guide for React applications. Contains rules across 6 categories, prioritized by impact.
|
||||
|
||||
## When to Apply
|
||||
|
||||
Reference these guidelines when:
|
||||
- Writing new React components
|
||||
- Implementing data fetching
|
||||
- Reviewing code for performance issues
|
||||
- Refactoring existing React code
|
||||
- Optimizing bundle size or load times
|
||||
|
||||
## Rule Categories by Priority
|
||||
|
||||
| Priority | Category | Impact | Prefix |
|
||||
|----------|----------|--------|--------|
|
||||
| 1 | Eliminating Waterfalls | CRITICAL | `async-` |
|
||||
| 2 | Bundle Size Optimization | CRITICAL | `bundle-` |
|
||||
| 3 | Client-Side Data Fetching | MEDIUM-HIGH | `client-` |
|
||||
| 4 | Re-render Optimization | MEDIUM | `rerender-` |
|
||||
| 5 | Rendering Performance | MEDIUM | `rendering-` |
|
||||
| 6 | JavaScript Performance | LOW-MEDIUM | `js-` |
|
||||
| 7 | Advanced Patterns | LOW | `advanced-` |
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### 1. Eliminating Waterfalls (CRITICAL)
|
||||
|
||||
- `async-defer-await` - Move await into branches where actually used
|
||||
- `async-parallel` - Use Promise.all() for independent operations
|
||||
- `async-dependencies` - Use better-all for partial dependencies
|
||||
|
||||
### 2. Bundle Size Optimization (CRITICAL)
|
||||
|
||||
- `bundle-barrel-imports` - Import directly, avoid barrel files
|
||||
- `bundle-conditional` - Load modules only when feature is activated
|
||||
- `bundle-preload` - Preload on hover/focus for perceived speed
|
||||
|
||||
### 3. Client-Side Data Fetching (MEDIUM-HIGH)
|
||||
|
||||
- `client-swr-dedup` - Use SWR for automatic request deduplication
|
||||
- `client-event-listeners` - Deduplicate global event listeners
|
||||
- `client-localstorage-schema` - Schema validation for localStorage
|
||||
- `client-passive-event-listeners` - Use passive listeners for scroll/touch
|
||||
|
||||
### 4. Re-render Optimization (MEDIUM)
|
||||
|
||||
- `rerender-defer-reads` - Don't subscribe to state only used in callbacks
|
||||
- `rerender-memo` - Extract expensive work into memoized components
|
||||
- `rerender-dependencies` - Use primitive dependencies in effects
|
||||
- `rerender-derived-state` - Subscribe to derived booleans, not raw values
|
||||
- `rerender-functional-setstate` - Use functional setState for stable callbacks
|
||||
- `rerender-lazy-state-init` - Pass function to useState for expensive values
|
||||
- `rerender-transitions` - Use startTransition for non-urgent updates
|
||||
|
||||
### 5. Rendering Performance (MEDIUM)
|
||||
|
||||
- `rendering-animate-svg-wrapper` - Animate div wrapper, not SVG element
|
||||
- `rendering-content-visibility` - Use content-visibility for long lists
|
||||
- `rendering-hoist-jsx` - Extract static JSX outside components
|
||||
- `rendering-svg-precision` - Reduce SVG coordinate precision
|
||||
- `rendering-conditional-render` - Use ternary, not && for conditionals
|
||||
|
||||
### 6. JavaScript Performance (LOW-MEDIUM)
|
||||
|
||||
- `js-batch-dom-css` - Group CSS changes via classes or cssText
|
||||
- `js-index-maps` - Build Map for repeated lookups
|
||||
- `js-cache-property-access` - Cache object properties in loops
|
||||
- `js-cache-function-results` - Cache function results in module-level Map
|
||||
- `js-cache-storage` - Cache localStorage/sessionStorage reads
|
||||
- `js-combine-iterations` - Combine multiple filter/map into one loop
|
||||
- `js-length-check-first` - Check array length before expensive comparison
|
||||
- `js-early-exit` - Return early from functions
|
||||
- `js-hoist-regexp` - Hoist RegExp creation outside loops
|
||||
- `js-min-max-loop` - Use loop for min/max instead of sort
|
||||
- `js-set-map-lookups` - Use Set/Map for O(1) lookups
|
||||
- `js-tosorted-immutable` - Use toSorted() for immutability
|
||||
|
||||
### 7. Advanced Patterns (LOW)
|
||||
|
||||
- `advanced-event-handler-refs` - Store event handlers in refs
|
||||
- `advanced-use-latest` - useLatest for stable callback refs
|
||||
|
||||
## How to Use
|
||||
|
||||
Read individual rule files for detailed explanations and code examples:
|
||||
|
||||
```
|
||||
rules/async-parallel.md
|
||||
rules/bundle-barrel-imports.md
|
||||
```
|
||||
|
||||
Each rule file contains:
|
||||
- Brief explanation of why it matters
|
||||
- Incorrect code example with explanation
|
||||
- Correct code example with explanation
|
||||
- Additional context and references
|
||||
|
||||
## Full Compiled Document
|
||||
|
||||
For the complete guide with all rules expanded: `AGENTS.md`
|
||||
@@ -0,0 +1,55 @@
|
||||
---
|
||||
title: Store Event Handlers in Refs
|
||||
impact: LOW
|
||||
impactDescription: stable subscriptions
|
||||
tags: advanced, hooks, refs, event-handlers, optimization
|
||||
---
|
||||
|
||||
## Store Event Handlers in Refs
|
||||
|
||||
Store callbacks in refs when used in effects that shouldn't re-subscribe on callback changes.
|
||||
|
||||
**Incorrect (re-subscribes on every render):**
|
||||
|
||||
```tsx
|
||||
function useWindowEvent(event: string, handler: () => void) {
|
||||
useEffect(() => {
|
||||
window.addEventListener(event, handler)
|
||||
return () => window.removeEventListener(event, handler)
|
||||
}, [event, handler])
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (stable subscription):**
|
||||
|
||||
```tsx
|
||||
function useWindowEvent(event: string, handler: () => void) {
|
||||
const handlerRef = useRef(handler)
|
||||
useEffect(() => {
|
||||
handlerRef.current = handler
|
||||
}, [handler])
|
||||
|
||||
useEffect(() => {
|
||||
const listener = () => handlerRef.current()
|
||||
window.addEventListener(event, listener)
|
||||
return () => window.removeEventListener(event, listener)
|
||||
}, [event])
|
||||
}
|
||||
```
|
||||
|
||||
**Alternative: use `useEffectEvent` if you're on latest React:**
|
||||
|
||||
```tsx
|
||||
import { useEffectEvent } from 'react'
|
||||
|
||||
function useWindowEvent(event: string, handler: () => void) {
|
||||
const onEvent = useEffectEvent(handler)
|
||||
|
||||
useEffect(() => {
|
||||
window.addEventListener(event, onEvent)
|
||||
return () => window.removeEventListener(event, onEvent)
|
||||
}, [event])
|
||||
}
|
||||
```
|
||||
|
||||
`useEffectEvent` provides a cleaner API for the same pattern: it creates a stable function reference that always calls the latest version of the handler.
|
||||
@@ -0,0 +1,49 @@
|
||||
---
|
||||
title: useLatest for Stable Callback Refs
|
||||
impact: LOW
|
||||
impactDescription: prevents effect re-runs
|
||||
tags: advanced, hooks, useLatest, refs, optimization
|
||||
---
|
||||
|
||||
## useLatest for Stable Callback Refs
|
||||
|
||||
Access latest values in callbacks without adding them to dependency arrays. Prevents effect re-runs while avoiding stale closures.
|
||||
|
||||
**Implementation:**
|
||||
|
||||
```typescript
|
||||
function useLatest<T>(value: T) {
|
||||
const ref = useRef(value)
|
||||
useEffect(() => {
|
||||
ref.current = value
|
||||
}, [value])
|
||||
return ref
|
||||
}
|
||||
```
|
||||
|
||||
**Incorrect (effect re-runs on every callback change):**
|
||||
|
||||
```tsx
|
||||
function SearchInput({ onSearch }: { onSearch: (q: string) => void }) {
|
||||
const [query, setQuery] = useState('')
|
||||
|
||||
useEffect(() => {
|
||||
const timeout = setTimeout(() => onSearch(query), 300)
|
||||
return () => clearTimeout(timeout)
|
||||
}, [query, onSearch])
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (stable effect, fresh callback):**
|
||||
|
||||
```tsx
|
||||
function SearchInput({ onSearch }: { onSearch: (q: string) => void }) {
|
||||
const [query, setQuery] = useState('')
|
||||
const onSearchRef = useLatest(onSearch)
|
||||
|
||||
useEffect(() => {
|
||||
const timeout = setTimeout(() => onSearchRef.current(query), 300)
|
||||
return () => clearTimeout(timeout)
|
||||
}, [query])
|
||||
}
|
||||
```
|
||||
@@ -0,0 +1,80 @@
|
||||
---
|
||||
title: Defer Await Until Needed
|
||||
impact: HIGH
|
||||
impactDescription: avoids blocking unused code paths
|
||||
tags: async, await, conditional, optimization
|
||||
---
|
||||
|
||||
## Defer Await Until Needed
|
||||
|
||||
Move `await` operations into the branches where they're actually used to avoid blocking code paths that don't need them.
|
||||
|
||||
**Incorrect (blocks both branches):**
|
||||
|
||||
```typescript
|
||||
async function handleRequest(userId: string, skipProcessing: boolean) {
|
||||
const userData = await fetchUserData(userId)
|
||||
|
||||
if (skipProcessing) {
|
||||
// Returns immediately but still waited for userData
|
||||
return { skipped: true }
|
||||
}
|
||||
|
||||
// Only this branch uses userData
|
||||
return processUserData(userData)
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (only blocks when needed):**
|
||||
|
||||
```typescript
|
||||
async function handleRequest(userId: string, skipProcessing: boolean) {
|
||||
if (skipProcessing) {
|
||||
// Returns immediately without waiting
|
||||
return { skipped: true }
|
||||
}
|
||||
|
||||
// Fetch only when needed
|
||||
const userData = await fetchUserData(userId)
|
||||
return processUserData(userData)
|
||||
}
|
||||
```
|
||||
|
||||
**Another example (early return optimization):**
|
||||
|
||||
```typescript
|
||||
// Incorrect: always fetches permissions
|
||||
async function updateResource(resourceId: string, userId: string) {
|
||||
const permissions = await fetchPermissions(userId)
|
||||
const resource = await getResource(resourceId)
|
||||
|
||||
if (!resource) {
|
||||
return { error: 'Not found' }
|
||||
}
|
||||
|
||||
if (!permissions.canEdit) {
|
||||
return { error: 'Forbidden' }
|
||||
}
|
||||
|
||||
return await updateResourceData(resource, permissions)
|
||||
}
|
||||
|
||||
// Correct: fetches only when needed
|
||||
async function updateResource(resourceId: string, userId: string) {
|
||||
const resource = await getResource(resourceId)
|
||||
|
||||
if (!resource) {
|
||||
return { error: 'Not found' }
|
||||
}
|
||||
|
||||
const permissions = await fetchPermissions(userId)
|
||||
|
||||
if (!permissions.canEdit) {
|
||||
return { error: 'Forbidden' }
|
||||
}
|
||||
|
||||
return await updateResourceData(resource, permissions)
|
||||
}
|
||||
```
|
||||
|
||||
This optimization is especially valuable when the skipped branch is frequently taken, or when the deferred operation is expensive.
|
||||
@@ -0,0 +1,36 @@
|
||||
---
|
||||
title: Dependency-Based Parallelization
|
||||
impact: CRITICAL
|
||||
impactDescription: 2-10× improvement
|
||||
tags: async, parallelization, dependencies, better-all
|
||||
---
|
||||
|
||||
## Dependency-Based Parallelization
|
||||
|
||||
For operations with partial dependencies, use `better-all` to maximize parallelism. It automatically starts each task at the earliest possible moment.
|
||||
|
||||
**Incorrect (profile waits for config unnecessarily):**
|
||||
|
||||
```typescript
|
||||
const [user, config] = await Promise.all([
|
||||
fetchUser(),
|
||||
fetchConfig()
|
||||
])
|
||||
const profile = await fetchProfile(user.id)
|
||||
```
|
||||
|
||||
**Correct (config and profile run in parallel):**
|
||||
|
||||
```typescript
|
||||
import { all } from 'better-all'
|
||||
|
||||
const { user, config, profile } = await all({
|
||||
async user() { return fetchUser() },
|
||||
async config() { return fetchConfig() },
|
||||
async profile() {
|
||||
return fetchProfile((await this.$.user).id)
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
Reference: [https://github.com/shuding/better-all](https://github.com/shuding/better-all)
|
||||
@@ -0,0 +1,28 @@
|
||||
---
|
||||
title: Promise.all() for Independent Operations
|
||||
impact: CRITICAL
|
||||
impactDescription: 2-10× improvement
|
||||
tags: async, parallelization, promises, waterfalls
|
||||
---
|
||||
|
||||
## Promise.all() for Independent Operations
|
||||
|
||||
When async operations have no interdependencies, execute them concurrently using `Promise.all()`.
|
||||
|
||||
**Incorrect (sequential execution, 3 round trips):**
|
||||
|
||||
```typescript
|
||||
const user = await fetchUser()
|
||||
const posts = await fetchPosts()
|
||||
const comments = await fetchComments()
|
||||
```
|
||||
|
||||
**Correct (parallel execution, 1 round trip):**
|
||||
|
||||
```typescript
|
||||
const [user, posts, comments] = await Promise.all([
|
||||
fetchUser(),
|
||||
fetchPosts(),
|
||||
fetchComments()
|
||||
])
|
||||
```
|
||||
@@ -0,0 +1,59 @@
|
||||
---
|
||||
title: Avoid Barrel File Imports
|
||||
impact: CRITICAL
|
||||
impactDescription: 200-800ms import cost, slow builds
|
||||
tags: bundle, imports, tree-shaking, barrel-files, performance
|
||||
---
|
||||
|
||||
## Avoid Barrel File Imports
|
||||
|
||||
Import directly from source files instead of barrel files to avoid loading thousands of unused modules. **Barrel files** are entry points that re-export multiple modules (e.g., `index.js` that does `export * from './module'`).
|
||||
|
||||
Popular icon and component libraries can have **up to 10,000 re-exports** in their entry file. For many React packages, **it takes 200-800ms just to import them**, affecting both development speed and production cold starts.
|
||||
|
||||
**Why tree-shaking doesn't help:** When a library is marked as external (not bundled), the bundler can't optimize it. If you bundle it to enable tree-shaking, builds become substantially slower analyzing the entire module graph.
|
||||
|
||||
**Incorrect (imports entire library):**
|
||||
|
||||
```tsx
|
||||
import { Check, X, Menu } from 'lucide-react'
|
||||
// Loads 1,583 modules, takes ~2.8s extra in dev
|
||||
// Runtime cost: 200-800ms on every cold start
|
||||
|
||||
import { Button, TextField } from '@mui/material'
|
||||
// Loads 2,225 modules, takes ~4.2s extra in dev
|
||||
```
|
||||
|
||||
**Correct (imports only what you need):**
|
||||
|
||||
```tsx
|
||||
import Check from 'lucide-react/dist/esm/icons/check'
|
||||
import X from 'lucide-react/dist/esm/icons/x'
|
||||
import Menu from 'lucide-react/dist/esm/icons/menu'
|
||||
// Loads only 3 modules (~2KB vs ~1MB)
|
||||
|
||||
import Button from '@mui/material/Button'
|
||||
import TextField from '@mui/material/TextField'
|
||||
// Loads only what you use
|
||||
```
|
||||
|
||||
**Alternative (Next.js 13.5+):**
|
||||
|
||||
```js
|
||||
// next.config.js - use optimizePackageImports
|
||||
module.exports = {
|
||||
experimental: {
|
||||
optimizePackageImports: ['lucide-react', '@mui/material']
|
||||
}
|
||||
}
|
||||
|
||||
// Then you can keep the ergonomic barrel imports:
|
||||
import { Check, X, Menu } from 'lucide-react'
|
||||
// Automatically transformed to direct imports at build time
|
||||
```
|
||||
|
||||
Direct imports provide 15-70% faster dev boot, 28% faster builds, 40% faster cold starts, and significantly faster HMR.
|
||||
|
||||
Libraries commonly affected: `lucide-react`, `@mui/material`, `@mui/icons-material`, `@tabler/icons-react`, `react-icons`, `@headlessui/react`, `@radix-ui/react-*`, `lodash`, `ramda`, `date-fns`, `rxjs`, `react-use`.
|
||||
|
||||
Reference: [How we optimized package imports in Next.js](https://vercel.com/blog/how-we-optimized-package-imports-in-next-js)
|
||||
@@ -0,0 +1,31 @@
|
||||
---
|
||||
title: Conditional Module Loading
|
||||
impact: HIGH
|
||||
impactDescription: loads large data only when needed
|
||||
tags: bundle, conditional-loading, lazy-loading
|
||||
---
|
||||
|
||||
## Conditional Module Loading
|
||||
|
||||
Load large data or modules only when a feature is activated.
|
||||
|
||||
**Example (lazy-load animation frames):**
|
||||
|
||||
```tsx
|
||||
function AnimationPlayer({ enabled, setEnabled }: { enabled: boolean; setEnabled: React.Dispatch<React.SetStateAction<boolean>> }) {
|
||||
const [frames, setFrames] = useState<Frame[] | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
if (enabled && !frames && typeof window !== 'undefined') {
|
||||
import('./animation-frames.js')
|
||||
.then(mod => setFrames(mod.frames))
|
||||
.catch(() => setEnabled(false))
|
||||
}
|
||||
}, [enabled, frames, setEnabled])
|
||||
|
||||
if (!frames) return <Skeleton />
|
||||
return <Canvas frames={frames} />
|
||||
}
|
||||
```
|
||||
|
||||
The `typeof window !== 'undefined'` check prevents bundling this module for SSR, optimizing server bundle size and build speed.
|
||||
@@ -0,0 +1,50 @@
|
||||
---
|
||||
title: Preload Based on User Intent
|
||||
impact: MEDIUM
|
||||
impactDescription: reduces perceived latency
|
||||
tags: bundle, preload, user-intent, hover
|
||||
---
|
||||
|
||||
## Preload Based on User Intent
|
||||
|
||||
Preload heavy bundles before they're needed to reduce perceived latency.
|
||||
|
||||
**Example (preload on hover/focus):**
|
||||
|
||||
```tsx
|
||||
function EditorButton({ onClick }: { onClick: () => void }) {
|
||||
const preload = () => {
|
||||
if (typeof window !== 'undefined') {
|
||||
void import('./monaco-editor')
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
onMouseEnter={preload}
|
||||
onFocus={preload}
|
||||
onClick={onClick}
|
||||
>
|
||||
Open Editor
|
||||
</button>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
**Example (preload when feature flag is enabled):**
|
||||
|
||||
```tsx
|
||||
function FlagsProvider({ children, flags }: Props) {
|
||||
useEffect(() => {
|
||||
if (flags.editorEnabled && typeof window !== 'undefined') {
|
||||
void import('./monaco-editor').then(mod => mod.init())
|
||||
}
|
||||
}, [flags.editorEnabled])
|
||||
|
||||
return <FlagsContext.Provider value={flags}>
|
||||
{children}
|
||||
</FlagsContext.Provider>
|
||||
}
|
||||
```
|
||||
|
||||
The `typeof window !== 'undefined'` check prevents bundling preloaded modules for SSR, optimizing server bundle size and build speed.
|
||||
@@ -0,0 +1,74 @@
|
||||
---
|
||||
title: Deduplicate Global Event Listeners
|
||||
impact: LOW
|
||||
impactDescription: single listener for N components
|
||||
tags: client, swr, event-listeners, subscription
|
||||
---
|
||||
|
||||
## Deduplicate Global Event Listeners
|
||||
|
||||
Use `useSWRSubscription()` to share global event listeners across component instances.
|
||||
|
||||
**Incorrect (N instances = N listeners):**
|
||||
|
||||
```tsx
|
||||
function useKeyboardShortcut(key: string, callback: () => void) {
|
||||
useEffect(() => {
|
||||
const handler = (e: KeyboardEvent) => {
|
||||
if (e.metaKey && e.key === key) {
|
||||
callback()
|
||||
}
|
||||
}
|
||||
window.addEventListener('keydown', handler)
|
||||
return () => window.removeEventListener('keydown', handler)
|
||||
}, [key, callback])
|
||||
}
|
||||
```
|
||||
|
||||
When using the `useKeyboardShortcut` hook multiple times, each instance will register a new listener.
|
||||
|
||||
**Correct (N instances = 1 listener):**
|
||||
|
||||
```tsx
|
||||
import useSWRSubscription from 'swr/subscription'
|
||||
|
||||
// Module-level Map to track callbacks per key
|
||||
const keyCallbacks = new Map<string, Set<() => void>>()
|
||||
|
||||
function useKeyboardShortcut(key: string, callback: () => void) {
|
||||
// Register this callback in the Map
|
||||
useEffect(() => {
|
||||
if (!keyCallbacks.has(key)) {
|
||||
keyCallbacks.set(key, new Set())
|
||||
}
|
||||
keyCallbacks.get(key)!.add(callback)
|
||||
|
||||
return () => {
|
||||
const set = keyCallbacks.get(key)
|
||||
if (set) {
|
||||
set.delete(callback)
|
||||
if (set.size === 0) {
|
||||
keyCallbacks.delete(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
}, [key, callback])
|
||||
|
||||
useSWRSubscription('global-keydown', () => {
|
||||
const handler = (e: KeyboardEvent) => {
|
||||
if (e.metaKey && keyCallbacks.has(e.key)) {
|
||||
keyCallbacks.get(e.key)!.forEach(cb => cb())
|
||||
}
|
||||
}
|
||||
window.addEventListener('keydown', handler)
|
||||
return () => window.removeEventListener('keydown', handler)
|
||||
})
|
||||
}
|
||||
|
||||
function Profile() {
|
||||
// Multiple shortcuts will share the same listener
|
||||
useKeyboardShortcut('p', () => { /* ... */ })
|
||||
useKeyboardShortcut('k', () => { /* ... */ })
|
||||
// ...
|
||||
}
|
||||
```
|
||||
@@ -0,0 +1,71 @@
|
||||
---
|
||||
title: Version and Minimize localStorage Data
|
||||
impact: MEDIUM
|
||||
impactDescription: prevents schema conflicts, reduces storage size
|
||||
tags: client, localStorage, storage, versioning, data-minimization
|
||||
---
|
||||
|
||||
## Version and Minimize localStorage Data
|
||||
|
||||
Add version prefix to keys and store only needed fields. Prevents schema conflicts and accidental storage of sensitive data.
|
||||
|
||||
**Incorrect:**
|
||||
|
||||
```typescript
|
||||
// No version, stores everything, no error handling
|
||||
localStorage.setItem('userConfig', JSON.stringify(fullUserObject))
|
||||
const data = localStorage.getItem('userConfig')
|
||||
```
|
||||
|
||||
**Correct:**
|
||||
|
||||
```typescript
|
||||
const VERSION = 'v2'
|
||||
|
||||
function saveConfig(config: { theme: string; language: string }) {
|
||||
try {
|
||||
localStorage.setItem(`userConfig:${VERSION}`, JSON.stringify(config))
|
||||
} catch {
|
||||
// Throws in incognito/private browsing, quota exceeded, or disabled
|
||||
}
|
||||
}
|
||||
|
||||
function loadConfig() {
|
||||
try {
|
||||
const data = localStorage.getItem(`userConfig:${VERSION}`)
|
||||
return data ? JSON.parse(data) : null
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
// Migration from v1 to v2
|
||||
function migrate() {
|
||||
try {
|
||||
const v1 = localStorage.getItem('userConfig:v1')
|
||||
if (v1) {
|
||||
const old = JSON.parse(v1)
|
||||
saveConfig({ theme: old.darkMode ? 'dark' : 'light', language: old.lang })
|
||||
localStorage.removeItem('userConfig:v1')
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
```
|
||||
|
||||
**Store minimal fields from server responses:**
|
||||
|
||||
```typescript
|
||||
// User object has 20+ fields, only store what UI needs
|
||||
function cachePrefs(user: FullUser) {
|
||||
try {
|
||||
localStorage.setItem('prefs:v1', JSON.stringify({
|
||||
theme: user.preferences.theme,
|
||||
notifications: user.preferences.notifications
|
||||
}))
|
||||
} catch {}
|
||||
}
|
||||
```
|
||||
|
||||
**Always wrap in try-catch:** `getItem()` and `setItem()` throw in incognito/private browsing (Safari, Firefox), when quota exceeded, or when disabled.
|
||||
|
||||
**Benefits:** Schema evolution via versioning, reduced storage size, prevents storing tokens/PII/internal flags.
|
||||
@@ -0,0 +1,48 @@
|
||||
---
|
||||
title: Use Passive Event Listeners for Scrolling Performance
|
||||
impact: MEDIUM
|
||||
impactDescription: eliminates scroll delay caused by event listeners
|
||||
tags: client, event-listeners, scrolling, performance, touch, wheel
|
||||
---
|
||||
|
||||
## Use Passive Event Listeners for Scrolling Performance
|
||||
|
||||
Add `{ passive: true }` to touch and wheel event listeners to enable immediate scrolling. Browsers normally wait for listeners to finish to check if `preventDefault()` is called, causing scroll delay.
|
||||
|
||||
**Incorrect:**
|
||||
|
||||
```typescript
|
||||
useEffect(() => {
|
||||
const handleTouch = (e: TouchEvent) => console.log(e.touches[0].clientX)
|
||||
const handleWheel = (e: WheelEvent) => console.log(e.deltaY)
|
||||
|
||||
document.addEventListener('touchstart', handleTouch)
|
||||
document.addEventListener('wheel', handleWheel)
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('touchstart', handleTouch)
|
||||
document.removeEventListener('wheel', handleWheel)
|
||||
}
|
||||
}, [])
|
||||
```
|
||||
|
||||
**Correct:**
|
||||
|
||||
```typescript
|
||||
useEffect(() => {
|
||||
const handleTouch = (e: TouchEvent) => console.log(e.touches[0].clientX)
|
||||
const handleWheel = (e: WheelEvent) => console.log(e.deltaY)
|
||||
|
||||
document.addEventListener('touchstart', handleTouch, { passive: true })
|
||||
document.addEventListener('wheel', handleWheel, { passive: true })
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('touchstart', handleTouch)
|
||||
document.removeEventListener('wheel', handleWheel)
|
||||
}
|
||||
}, [])
|
||||
```
|
||||
|
||||
**Use passive when:** tracking/analytics, logging, any listener that doesn't call `preventDefault()`.
|
||||
|
||||
**Don't use passive when:** implementing custom swipe gestures, custom zoom controls, or any listener that needs `preventDefault()`.
|
||||
@@ -0,0 +1,56 @@
|
||||
---
|
||||
title: Use SWR for Automatic Deduplication
|
||||
impact: MEDIUM-HIGH
|
||||
impactDescription: automatic deduplication
|
||||
tags: client, swr, deduplication, data-fetching
|
||||
---
|
||||
|
||||
## Use SWR for Automatic Deduplication
|
||||
|
||||
SWR enables request deduplication, caching, and revalidation across component instances.
|
||||
|
||||
**Incorrect (no deduplication, each instance fetches):**
|
||||
|
||||
```tsx
|
||||
function UserList() {
|
||||
const [users, setUsers] = useState([])
|
||||
useEffect(() => {
|
||||
fetch('/api/users')
|
||||
.then(r => r.json())
|
||||
.then(setUsers)
|
||||
}, [])
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (multiple instances share one request):**
|
||||
|
||||
```tsx
|
||||
import useSWR from 'swr'
|
||||
|
||||
function UserList() {
|
||||
const { data: users } = useSWR('/api/users', fetcher)
|
||||
}
|
||||
```
|
||||
|
||||
**For immutable data:**
|
||||
|
||||
```tsx
|
||||
import { useImmutableSWR } from '@/lib/swr'
|
||||
|
||||
function StaticContent() {
|
||||
const { data } = useImmutableSWR('/api/config', fetcher)
|
||||
}
|
||||
```
|
||||
|
||||
**For mutations:**
|
||||
|
||||
```tsx
|
||||
import { useSWRMutation } from 'swr/mutation'
|
||||
|
||||
function UpdateButton() {
|
||||
const { trigger } = useSWRMutation('/api/user', updateUser)
|
||||
return <button onClick={() => trigger()}>Update</button>
|
||||
}
|
||||
```
|
||||
|
||||
Reference: [https://swr.vercel.app](https://swr.vercel.app)
|
||||
@@ -0,0 +1,82 @@
|
||||
---
|
||||
title: Batch DOM CSS Changes
|
||||
impact: MEDIUM
|
||||
impactDescription: reduces reflows/repaints
|
||||
tags: javascript, dom, css, performance, reflow
|
||||
---
|
||||
|
||||
## Batch DOM CSS Changes
|
||||
|
||||
Avoid changing styles one property at a time. Group multiple CSS changes together via classes or `cssText` to minimize browser reflows.
|
||||
|
||||
**Incorrect (multiple reflows):**
|
||||
|
||||
```typescript
|
||||
function updateElementStyles(element: HTMLElement) {
|
||||
// Each line triggers a reflow
|
||||
element.style.width = '100px'
|
||||
element.style.height = '200px'
|
||||
element.style.backgroundColor = 'blue'
|
||||
element.style.border = '1px solid black'
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (add class - single reflow):**
|
||||
|
||||
```typescript
|
||||
// CSS file
|
||||
.highlighted-box {
|
||||
width: 100px;
|
||||
height: 200px;
|
||||
background-color: blue;
|
||||
border: 1px solid black;
|
||||
}
|
||||
|
||||
// JavaScript
|
||||
function updateElementStyles(element: HTMLElement) {
|
||||
element.classList.add('highlighted-box')
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (change cssText - single reflow):**
|
||||
|
||||
```typescript
|
||||
function updateElementStyles(element: HTMLElement) {
|
||||
element.style.cssText = `
|
||||
width: 100px;
|
||||
height: 200px;
|
||||
background-color: blue;
|
||||
border: 1px solid black;
|
||||
`
|
||||
}
|
||||
```
|
||||
|
||||
**React example:**
|
||||
|
||||
```tsx
|
||||
// Incorrect: changing styles one by one
|
||||
function Box({ isHighlighted }: { isHighlighted: boolean }) {
|
||||
const ref = useRef<HTMLDivElement>(null)
|
||||
|
||||
useEffect(() => {
|
||||
if (ref.current && isHighlighted) {
|
||||
ref.current.style.width = '100px'
|
||||
ref.current.style.height = '200px'
|
||||
ref.current.style.backgroundColor = 'blue'
|
||||
}
|
||||
}, [isHighlighted])
|
||||
|
||||
return <div ref={ref}>Content</div>
|
||||
}
|
||||
|
||||
// Correct: toggle class
|
||||
function Box({ isHighlighted }: { isHighlighted: boolean }) {
|
||||
return (
|
||||
<div className={isHighlighted ? 'highlighted-box' : ''}>
|
||||
Content
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
Prefer CSS classes over inline styles when possible. Classes are cached by the browser and provide better separation of concerns.
|
||||
@@ -0,0 +1,80 @@
|
||||
---
|
||||
title: Cache Repeated Function Calls
|
||||
impact: MEDIUM
|
||||
impactDescription: avoid redundant computation
|
||||
tags: javascript, cache, memoization, performance
|
||||
---
|
||||
|
||||
## Cache Repeated Function Calls
|
||||
|
||||
Use a module-level Map to cache function results when the same function is called repeatedly with the same inputs during render.
|
||||
|
||||
**Incorrect (redundant computation):**
|
||||
|
||||
```typescript
|
||||
function ProjectList({ projects }: { projects: Project[] }) {
|
||||
return (
|
||||
<div>
|
||||
{projects.map(project => {
|
||||
// slugify() called 100+ times for same project names
|
||||
const slug = slugify(project.name)
|
||||
|
||||
return <ProjectCard key={project.id} slug={slug} />
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (cached results):**
|
||||
|
||||
```typescript
|
||||
// Module-level cache
|
||||
const slugifyCache = new Map<string, string>()
|
||||
|
||||
function cachedSlugify(text: string): string {
|
||||
if (slugifyCache.has(text)) {
|
||||
return slugifyCache.get(text)!
|
||||
}
|
||||
const result = slugify(text)
|
||||
slugifyCache.set(text, result)
|
||||
return result
|
||||
}
|
||||
|
||||
function ProjectList({ projects }: { projects: Project[] }) {
|
||||
return (
|
||||
<div>
|
||||
{projects.map(project => {
|
||||
// Computed only once per unique project name
|
||||
const slug = cachedSlugify(project.name)
|
||||
|
||||
return <ProjectCard key={project.id} slug={slug} />
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
**Simpler pattern for single-value functions:**
|
||||
|
||||
```typescript
|
||||
let isLoggedInCache: boolean | null = null
|
||||
|
||||
function isLoggedIn(): boolean {
|
||||
if (isLoggedInCache !== null) {
|
||||
return isLoggedInCache
|
||||
}
|
||||
|
||||
isLoggedInCache = document.cookie.includes('auth=')
|
||||
return isLoggedInCache
|
||||
}
|
||||
|
||||
// Clear cache when auth changes
|
||||
function onAuthChange() {
|
||||
isLoggedInCache = null
|
||||
}
|
||||
```
|
||||
|
||||
Use a Map (not a hook) so it works everywhere: utilities, event handlers, not just React components.
|
||||
|
||||
Reference: [How we made the Vercel Dashboard twice as fast](https://vercel.com/blog/how-we-made-the-vercel-dashboard-twice-as-fast)
|
||||
@@ -0,0 +1,28 @@
|
||||
---
|
||||
title: Cache Property Access in Loops
|
||||
impact: LOW-MEDIUM
|
||||
impactDescription: reduces lookups
|
||||
tags: javascript, loops, optimization, caching
|
||||
---
|
||||
|
||||
## Cache Property Access in Loops
|
||||
|
||||
Cache object property lookups in hot paths.
|
||||
|
||||
**Incorrect (3 lookups × N iterations):**
|
||||
|
||||
```typescript
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
process(obj.config.settings.value)
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (1 lookup total):**
|
||||
|
||||
```typescript
|
||||
const value = obj.config.settings.value
|
||||
const len = arr.length
|
||||
for (let i = 0; i < len; i++) {
|
||||
process(value)
|
||||
}
|
||||
```
|
||||
@@ -0,0 +1,70 @@
|
||||
---
|
||||
title: Cache Storage API Calls
|
||||
impact: LOW-MEDIUM
|
||||
impactDescription: reduces expensive I/O
|
||||
tags: javascript, localStorage, storage, caching, performance
|
||||
---
|
||||
|
||||
## Cache Storage API Calls
|
||||
|
||||
`localStorage`, `sessionStorage`, and `document.cookie` are synchronous and expensive. Cache reads in memory.
|
||||
|
||||
**Incorrect (reads storage on every call):**
|
||||
|
||||
```typescript
|
||||
function getTheme() {
|
||||
return localStorage.getItem('theme') ?? 'light'
|
||||
}
|
||||
// Called 10 times = 10 storage reads
|
||||
```
|
||||
|
||||
**Correct (Map cache):**
|
||||
|
||||
```typescript
|
||||
const storageCache = new Map<string, string | null>()
|
||||
|
||||
function getLocalStorage(key: string) {
|
||||
if (!storageCache.has(key)) {
|
||||
storageCache.set(key, localStorage.getItem(key))
|
||||
}
|
||||
return storageCache.get(key)
|
||||
}
|
||||
|
||||
function setLocalStorage(key: string, value: string) {
|
||||
localStorage.setItem(key, value)
|
||||
storageCache.set(key, value) // keep cache in sync
|
||||
}
|
||||
```
|
||||
|
||||
Use a Map (not a hook) so it works everywhere: utilities, event handlers, not just React components.
|
||||
|
||||
**Cookie caching:**
|
||||
|
||||
```typescript
|
||||
let cookieCache: Record<string, string> | null = null
|
||||
|
||||
function getCookie(name: string) {
|
||||
if (!cookieCache) {
|
||||
cookieCache = Object.fromEntries(
|
||||
document.cookie.split('; ').map(c => c.split('='))
|
||||
)
|
||||
}
|
||||
return cookieCache[name]
|
||||
}
|
||||
```
|
||||
|
||||
**Important (invalidate on external changes):**
|
||||
|
||||
If storage can change externally (another tab, server-set cookies), invalidate cache:
|
||||
|
||||
```typescript
|
||||
window.addEventListener('storage', (e) => {
|
||||
if (e.key) storageCache.delete(e.key)
|
||||
})
|
||||
|
||||
document.addEventListener('visibilitychange', () => {
|
||||
if (document.visibilityState === 'visible') {
|
||||
storageCache.clear()
|
||||
}
|
||||
})
|
||||
```
|
||||
@@ -0,0 +1,32 @@
|
||||
---
|
||||
title: Combine Multiple Array Iterations
|
||||
impact: LOW-MEDIUM
|
||||
impactDescription: reduces iterations
|
||||
tags: javascript, arrays, loops, performance
|
||||
---
|
||||
|
||||
## Combine Multiple Array Iterations
|
||||
|
||||
Multiple `.filter()` or `.map()` calls iterate the array multiple times. Combine into one loop.
|
||||
|
||||
**Incorrect (3 iterations):**
|
||||
|
||||
```typescript
|
||||
const admins = users.filter(u => u.isAdmin)
|
||||
const testers = users.filter(u => u.isTester)
|
||||
const inactive = users.filter(u => !u.isActive)
|
||||
```
|
||||
|
||||
**Correct (1 iteration):**
|
||||
|
||||
```typescript
|
||||
const admins: User[] = []
|
||||
const testers: User[] = []
|
||||
const inactive: User[] = []
|
||||
|
||||
for (const user of users) {
|
||||
if (user.isAdmin) admins.push(user)
|
||||
if (user.isTester) testers.push(user)
|
||||
if (!user.isActive) inactive.push(user)
|
||||
}
|
||||
```
|
||||
@@ -0,0 +1,50 @@
|
||||
---
|
||||
title: Early Return from Functions
|
||||
impact: LOW-MEDIUM
|
||||
impactDescription: avoids unnecessary computation
|
||||
tags: javascript, functions, optimization, early-return
|
||||
---
|
||||
|
||||
## Early Return from Functions
|
||||
|
||||
Return early when result is determined to skip unnecessary processing.
|
||||
|
||||
**Incorrect (processes all items even after finding answer):**
|
||||
|
||||
```typescript
|
||||
function validateUsers(users: User[]) {
|
||||
let hasError = false
|
||||
let errorMessage = ''
|
||||
|
||||
for (const user of users) {
|
||||
if (!user.email) {
|
||||
hasError = true
|
||||
errorMessage = 'Email required'
|
||||
}
|
||||
if (!user.name) {
|
||||
hasError = true
|
||||
errorMessage = 'Name required'
|
||||
}
|
||||
// Continues checking all users even after error found
|
||||
}
|
||||
|
||||
return hasError ? { valid: false, error: errorMessage } : { valid: true }
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (returns immediately on first error):**
|
||||
|
||||
```typescript
|
||||
function validateUsers(users: User[]) {
|
||||
for (const user of users) {
|
||||
if (!user.email) {
|
||||
return { valid: false, error: 'Email required' }
|
||||
}
|
||||
if (!user.name) {
|
||||
return { valid: false, error: 'Name required' }
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
}
|
||||
```
|
||||
@@ -0,0 +1,45 @@
|
||||
---
|
||||
title: Hoist RegExp Creation
|
||||
impact: LOW-MEDIUM
|
||||
impactDescription: avoids recreation
|
||||
tags: javascript, regexp, optimization, memoization
|
||||
---
|
||||
|
||||
## Hoist RegExp Creation
|
||||
|
||||
Don't create RegExp inside render. Hoist to module scope or memoize with `useMemo()`.
|
||||
|
||||
**Incorrect (new RegExp every render):**
|
||||
|
||||
```tsx
|
||||
function Highlighter({ text, query }: Props) {
|
||||
const regex = new RegExp(`(${query})`, 'gi')
|
||||
const parts = text.split(regex)
|
||||
return <>{parts.map((part, i) => ...)}</>
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (memoize or hoist):**
|
||||
|
||||
```tsx
|
||||
const EMAIL_REGEX = /^[^\s@]+@[^\s@]+\.[^\s@]+$/
|
||||
|
||||
function Highlighter({ text, query }: Props) {
|
||||
const regex = useMemo(
|
||||
() => new RegExp(`(${escapeRegex(query)})`, 'gi'),
|
||||
[query]
|
||||
)
|
||||
const parts = text.split(regex)
|
||||
return <>{parts.map((part, i) => ...)}</>
|
||||
}
|
||||
```
|
||||
|
||||
**Warning (global regex has mutable state):**
|
||||
|
||||
Global regex (`/g`) has mutable `lastIndex` state:
|
||||
|
||||
```typescript
|
||||
const regex = /foo/g
|
||||
regex.test('foo') // true, lastIndex = 3
|
||||
regex.test('foo') // false, lastIndex = 0
|
||||
```
|
||||
@@ -0,0 +1,37 @@
|
||||
---
|
||||
title: Build Index Maps for Repeated Lookups
|
||||
impact: LOW-MEDIUM
|
||||
impactDescription: 1M ops to 2K ops
|
||||
tags: javascript, map, indexing, optimization, performance
|
||||
---
|
||||
|
||||
## Build Index Maps for Repeated Lookups
|
||||
|
||||
Multiple `.find()` calls by the same key should use a Map.
|
||||
|
||||
**Incorrect (O(n) per lookup):**
|
||||
|
||||
```typescript
|
||||
function processOrders(orders: Order[], users: User[]) {
|
||||
return orders.map(order => ({
|
||||
...order,
|
||||
user: users.find(u => u.id === order.userId)
|
||||
}))
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (O(1) per lookup):**
|
||||
|
||||
```typescript
|
||||
function processOrders(orders: Order[], users: User[]) {
|
||||
const userById = new Map(users.map(u => [u.id, u]))
|
||||
|
||||
return orders.map(order => ({
|
||||
...order,
|
||||
user: userById.get(order.userId)
|
||||
}))
|
||||
}
|
||||
```
|
||||
|
||||
Build map once (O(n)), then all lookups are O(1).
|
||||
For 1000 orders × 1000 users: 1M ops → 2K ops.
|
||||
@@ -0,0 +1,49 @@
|
||||
---
|
||||
title: Early Length Check for Array Comparisons
|
||||
impact: MEDIUM-HIGH
|
||||
impactDescription: avoids expensive operations when lengths differ
|
||||
tags: javascript, arrays, performance, optimization, comparison
|
||||
---
|
||||
|
||||
## Early Length Check for Array Comparisons
|
||||
|
||||
When comparing arrays with expensive operations (sorting, deep equality, serialization), check lengths first. If lengths differ, the arrays cannot be equal.
|
||||
|
||||
In real-world applications, this optimization is especially valuable when the comparison runs in hot paths (event handlers, render loops).
|
||||
|
||||
**Incorrect (always runs expensive comparison):**
|
||||
|
||||
```typescript
|
||||
function hasChanges(current: string[], original: string[]) {
|
||||
// Always sorts and joins, even when lengths differ
|
||||
return current.sort().join() !== original.sort().join()
|
||||
}
|
||||
```
|
||||
|
||||
Two O(n log n) sorts run even when `current.length` is 5 and `original.length` is 100. There is also overhead of joining the arrays and comparing the strings.
|
||||
|
||||
**Correct (O(1) length check first):**
|
||||
|
||||
```typescript
|
||||
function hasChanges(current: string[], original: string[]) {
|
||||
// Early return if lengths differ
|
||||
if (current.length !== original.length) {
|
||||
return true
|
||||
}
|
||||
// Only sort/join when lengths match
|
||||
const currentSorted = current.toSorted()
|
||||
const originalSorted = original.toSorted()
|
||||
for (let i = 0; i < currentSorted.length; i++) {
|
||||
if (currentSorted[i] !== originalSorted[i]) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
```
|
||||
|
||||
This new approach is more efficient because:
|
||||
- It avoids the overhead of sorting and joining the arrays when lengths differ
|
||||
- It avoids consuming memory for the joined strings (especially important for large arrays)
|
||||
- It avoids mutating the original arrays
|
||||
- It returns early when a difference is found
|
||||
@@ -0,0 +1,82 @@
|
||||
---
|
||||
title: Use Loop for Min/Max Instead of Sort
|
||||
impact: LOW
|
||||
impactDescription: O(n) instead of O(n log n)
|
||||
tags: javascript, arrays, performance, sorting, algorithms
|
||||
---
|
||||
|
||||
## Use Loop for Min/Max Instead of Sort
|
||||
|
||||
Finding the smallest or largest element only requires a single pass through the array. Sorting is wasteful and slower.
|
||||
|
||||
**Incorrect (O(n log n) - sort to find latest):**
|
||||
|
||||
```typescript
|
||||
interface Project {
|
||||
id: string
|
||||
name: string
|
||||
updatedAt: number
|
||||
}
|
||||
|
||||
function getLatestProject(projects: Project[]) {
|
||||
const sorted = [...projects].sort((a, b) => b.updatedAt - a.updatedAt)
|
||||
return sorted[0]
|
||||
}
|
||||
```
|
||||
|
||||
Sorts the entire array just to find the maximum value.
|
||||
|
||||
**Incorrect (O(n log n) - sort for oldest and newest):**
|
||||
|
||||
```typescript
|
||||
function getOldestAndNewest(projects: Project[]) {
|
||||
const sorted = [...projects].sort((a, b) => a.updatedAt - b.updatedAt)
|
||||
return { oldest: sorted[0], newest: sorted[sorted.length - 1] }
|
||||
}
|
||||
```
|
||||
|
||||
Still sorts unnecessarily when only min/max are needed.
|
||||
|
||||
**Correct (O(n) - single loop):**
|
||||
|
||||
```typescript
|
||||
function getLatestProject(projects: Project[]) {
|
||||
if (projects.length === 0) return null
|
||||
|
||||
let latest = projects[0]
|
||||
|
||||
for (let i = 1; i < projects.length; i++) {
|
||||
if (projects[i].updatedAt > latest.updatedAt) {
|
||||
latest = projects[i]
|
||||
}
|
||||
}
|
||||
|
||||
return latest
|
||||
}
|
||||
|
||||
function getOldestAndNewest(projects: Project[]) {
|
||||
if (projects.length === 0) return { oldest: null, newest: null }
|
||||
|
||||
let oldest = projects[0]
|
||||
let newest = projects[0]
|
||||
|
||||
for (let i = 1; i < projects.length; i++) {
|
||||
if (projects[i].updatedAt < oldest.updatedAt) oldest = projects[i]
|
||||
if (projects[i].updatedAt > newest.updatedAt) newest = projects[i]
|
||||
}
|
||||
|
||||
return { oldest, newest }
|
||||
}
|
||||
```
|
||||
|
||||
Single pass through the array, no copying, no sorting.
|
||||
|
||||
**Alternative (Math.min/Math.max for small arrays):**
|
||||
|
||||
```typescript
|
||||
const numbers = [5, 2, 8, 1, 9]
|
||||
const min = Math.min(...numbers)
|
||||
const max = Math.max(...numbers)
|
||||
```
|
||||
|
||||
This works for small arrays but can be slower for very large arrays due to spread operator limitations. Use the loop approach for reliability.
|
||||
@@ -0,0 +1,24 @@
|
||||
---
|
||||
title: Use Set/Map for O(1) Lookups
|
||||
impact: LOW-MEDIUM
|
||||
impactDescription: O(n) to O(1)
|
||||
tags: javascript, set, map, data-structures, performance
|
||||
---
|
||||
|
||||
## Use Set/Map for O(1) Lookups
|
||||
|
||||
Convert arrays to Set/Map for repeated membership checks.
|
||||
|
||||
**Incorrect (O(n) per check):**
|
||||
|
||||
```typescript
|
||||
const allowedIds = ['a', 'b', 'c', ...]
|
||||
items.filter(item => allowedIds.includes(item.id))
|
||||
```
|
||||
|
||||
**Correct (O(1) per check):**
|
||||
|
||||
```typescript
|
||||
const allowedIds = new Set(['a', 'b', 'c', ...])
|
||||
items.filter(item => allowedIds.has(item.id))
|
||||
```
|
||||
@@ -0,0 +1,57 @@
|
||||
---
|
||||
title: Use toSorted() Instead of sort() for Immutability
|
||||
impact: MEDIUM-HIGH
|
||||
impactDescription: prevents mutation bugs in React state
|
||||
tags: javascript, arrays, immutability, react, state, mutation
|
||||
---
|
||||
|
||||
## Use toSorted() Instead of sort() for Immutability
|
||||
|
||||
`.sort()` mutates the array in place, which can cause bugs with React state and props. Use `.toSorted()` to create a new sorted array without mutation.
|
||||
|
||||
**Incorrect (mutates original array):**
|
||||
|
||||
```typescript
|
||||
function UserList({ users }: { users: User[] }) {
|
||||
// Mutates the users prop array!
|
||||
const sorted = useMemo(
|
||||
() => users.sort((a, b) => a.name.localeCompare(b.name)),
|
||||
[users]
|
||||
)
|
||||
return <div>{sorted.map(renderUser)}</div>
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (creates new array):**
|
||||
|
||||
```typescript
|
||||
function UserList({ users }: { users: User[] }) {
|
||||
// Creates new sorted array, original unchanged
|
||||
const sorted = useMemo(
|
||||
() => users.toSorted((a, b) => a.name.localeCompare(b.name)),
|
||||
[users]
|
||||
)
|
||||
return <div>{sorted.map(renderUser)}</div>
|
||||
}
|
||||
```
|
||||
|
||||
**Why this matters in React:**
|
||||
|
||||
1. Props/state mutations break React's immutability model - React expects props and state to be treated as read-only
|
||||
2. Causes stale closure bugs - Mutating arrays inside closures (callbacks, effects) can lead to unexpected behavior
|
||||
|
||||
**Browser support (fallback for older browsers):**
|
||||
|
||||
`.toSorted()` is available in all modern browsers (Chrome 110+, Safari 16+, Firefox 115+, Node.js 20+). For older environments, use spread operator:
|
||||
|
||||
```typescript
|
||||
// Fallback for older browsers
|
||||
const sorted = [...items].sort((a, b) => a.value - b.value)
|
||||
```
|
||||
|
||||
**Other immutable array methods:**
|
||||
|
||||
- `.toSorted()` - immutable sort
|
||||
- `.toReversed()` - immutable reverse
|
||||
- `.toSpliced()` - immutable splice
|
||||
- `.with()` - immutable element replacement
|
||||
@@ -0,0 +1,47 @@
|
||||
---
|
||||
title: Animate SVG Wrapper Instead of SVG Element
|
||||
impact: LOW
|
||||
impactDescription: enables hardware acceleration
|
||||
tags: rendering, svg, css, animation, performance
|
||||
---
|
||||
|
||||
## Animate SVG Wrapper Instead of SVG Element
|
||||
|
||||
Many browsers don't have hardware acceleration for CSS3 animations on SVG elements. Wrap SVG in a `<div>` and animate the wrapper instead.
|
||||
|
||||
**Incorrect (animating SVG directly - no hardware acceleration):**
|
||||
|
||||
```tsx
|
||||
function LoadingSpinner() {
|
||||
return (
|
||||
<svg
|
||||
className="animate-spin"
|
||||
width="24"
|
||||
height="24"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<circle cx="12" cy="12" r="10" stroke="currentColor" />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (animating wrapper div - hardware accelerated):**
|
||||
|
||||
```tsx
|
||||
function LoadingSpinner() {
|
||||
return (
|
||||
<div className="animate-spin">
|
||||
<svg
|
||||
width="24"
|
||||
height="24"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<circle cx="12" cy="12" r="10" stroke="currentColor" />
|
||||
</svg>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
This applies to all CSS transforms and transitions (`transform`, `opacity`, `translate`, `scale`, `rotate`). The wrapper div allows browsers to use GPU acceleration for smoother animations.
|
||||
@@ -0,0 +1,40 @@
|
||||
---
|
||||
title: Use Explicit Conditional Rendering
|
||||
impact: LOW
|
||||
impactDescription: prevents rendering 0 or NaN
|
||||
tags: rendering, conditional, jsx, falsy-values
|
||||
---
|
||||
|
||||
## Use Explicit Conditional Rendering
|
||||
|
||||
Use explicit ternary operators (`? :`) instead of `&&` for conditional rendering when the condition can be `0`, `NaN`, or other falsy values that render.
|
||||
|
||||
**Incorrect (renders "0" when count is 0):**
|
||||
|
||||
```tsx
|
||||
function Badge({ count }: { count: number }) {
|
||||
return (
|
||||
<div>
|
||||
{count && <span className="badge">{count}</span>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// When count = 0, renders: <div>0</div>
|
||||
// When count = 5, renders: <div><span class="badge">5</span></div>
|
||||
```
|
||||
|
||||
**Correct (renders nothing when count is 0):**
|
||||
|
||||
```tsx
|
||||
function Badge({ count }: { count: number }) {
|
||||
return (
|
||||
<div>
|
||||
{count > 0 ? <span className="badge">{count}</span> : null}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// When count = 0, renders: <div></div>
|
||||
// When count = 5, renders: <div><span class="badge">5</span></div>
|
||||
```
|
||||
@@ -0,0 +1,38 @@
|
||||
---
|
||||
title: CSS content-visibility for Long Lists
|
||||
impact: HIGH
|
||||
impactDescription: faster initial render
|
||||
tags: rendering, css, content-visibility, long-lists
|
||||
---
|
||||
|
||||
## CSS content-visibility for Long Lists
|
||||
|
||||
Apply `content-visibility: auto` to defer off-screen rendering.
|
||||
|
||||
**CSS:**
|
||||
|
||||
```css
|
||||
.message-item {
|
||||
content-visibility: auto;
|
||||
contain-intrinsic-size: 0 80px;
|
||||
}
|
||||
```
|
||||
|
||||
**Example:**
|
||||
|
||||
```tsx
|
||||
function MessageList({ messages }: { messages: Message[] }) {
|
||||
return (
|
||||
<div className="overflow-y-auto h-screen">
|
||||
{messages.map(msg => (
|
||||
<div key={msg.id} className="message-item">
|
||||
<Avatar user={msg.author} />
|
||||
<div>{msg.content}</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
For 1000 messages, browser skips layout/paint for ~990 off-screen items (10× faster initial render).
|
||||
@@ -0,0 +1,46 @@
|
||||
---
|
||||
title: Hoist Static JSX Elements
|
||||
impact: LOW
|
||||
impactDescription: avoids re-creation
|
||||
tags: rendering, jsx, static, optimization
|
||||
---
|
||||
|
||||
## Hoist Static JSX Elements
|
||||
|
||||
Extract static JSX outside components to avoid re-creation.
|
||||
|
||||
**Incorrect (recreates element every render):**
|
||||
|
||||
```tsx
|
||||
function LoadingSkeleton() {
|
||||
return <div className="animate-pulse h-20 bg-gray-200" />
|
||||
}
|
||||
|
||||
function Container() {
|
||||
return (
|
||||
<div>
|
||||
{loading && <LoadingSkeleton />}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (reuses same element):**
|
||||
|
||||
```tsx
|
||||
const loadingSkeleton = (
|
||||
<div className="animate-pulse h-20 bg-gray-200" />
|
||||
)
|
||||
|
||||
function Container() {
|
||||
return (
|
||||
<div>
|
||||
{loading && loadingSkeleton}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
This is especially helpful for large and static SVG nodes, which can be expensive to recreate on every render.
|
||||
|
||||
**Note:** If your project has [React Compiler](https://react.dev/learn/react-compiler) enabled, the compiler automatically hoists static JSX elements and optimizes component re-renders, making manual hoisting unnecessary.
|
||||
@@ -0,0 +1,28 @@
|
||||
---
|
||||
title: Optimize SVG Precision
|
||||
impact: LOW
|
||||
impactDescription: reduces file size
|
||||
tags: rendering, svg, optimization, svgo
|
||||
---
|
||||
|
||||
## Optimize SVG Precision
|
||||
|
||||
Reduce SVG coordinate precision to decrease file size. The optimal precision depends on the viewBox size, but in general reducing precision should be considered.
|
||||
|
||||
**Incorrect (excessive precision):**
|
||||
|
||||
```svg
|
||||
<path d="M 10.293847 20.847362 L 30.938472 40.192837" />
|
||||
```
|
||||
|
||||
**Correct (1 decimal place):**
|
||||
|
||||
```svg
|
||||
<path d="M 10.3 20.8 L 30.9 40.2" />
|
||||
```
|
||||
|
||||
**Automate with SVGO:**
|
||||
|
||||
```bash
|
||||
npx svgo --precision=1 --multipass icon.svg
|
||||
```
|
||||
@@ -0,0 +1,39 @@
|
||||
---
|
||||
title: Defer State Reads to Usage Point
|
||||
impact: MEDIUM
|
||||
impactDescription: avoids unnecessary subscriptions
|
||||
tags: rerender, searchParams, localStorage, optimization
|
||||
---
|
||||
|
||||
## Defer State Reads to Usage Point
|
||||
|
||||
Don't subscribe to dynamic state (searchParams, localStorage) if you only read it inside callbacks.
|
||||
|
||||
**Incorrect (subscribes to all searchParams changes):**
|
||||
|
||||
```tsx
|
||||
function ShareButton({ chatId }: { chatId: string }) {
|
||||
const searchParams = useSearchParams()
|
||||
|
||||
const handleShare = () => {
|
||||
const ref = searchParams.get('ref')
|
||||
shareChat(chatId, { ref })
|
||||
}
|
||||
|
||||
return <button onClick={handleShare}>Share</button>
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (reads on demand, no subscription):**
|
||||
|
||||
```tsx
|
||||
function ShareButton({ chatId }: { chatId: string }) {
|
||||
const handleShare = () => {
|
||||
const params = new URLSearchParams(window.location.search)
|
||||
const ref = params.get('ref')
|
||||
shareChat(chatId, { ref })
|
||||
}
|
||||
|
||||
return <button onClick={handleShare}>Share</button>
|
||||
}
|
||||
```
|
||||
@@ -0,0 +1,45 @@
|
||||
---
|
||||
title: Narrow Effect Dependencies
|
||||
impact: LOW
|
||||
impactDescription: minimizes effect re-runs
|
||||
tags: rerender, useEffect, dependencies, optimization
|
||||
---
|
||||
|
||||
## Narrow Effect Dependencies
|
||||
|
||||
Specify primitive dependencies instead of objects to minimize effect re-runs.
|
||||
|
||||
**Incorrect (re-runs on any user field change):**
|
||||
|
||||
```tsx
|
||||
useEffect(() => {
|
||||
console.log(user.id)
|
||||
}, [user])
|
||||
```
|
||||
|
||||
**Correct (re-runs only when id changes):**
|
||||
|
||||
```tsx
|
||||
useEffect(() => {
|
||||
console.log(user.id)
|
||||
}, [user.id])
|
||||
```
|
||||
|
||||
**For derived state, compute outside effect:**
|
||||
|
||||
```tsx
|
||||
// Incorrect: runs on width=767, 766, 765...
|
||||
useEffect(() => {
|
||||
if (width < 768) {
|
||||
enableMobileMode()
|
||||
}
|
||||
}, [width])
|
||||
|
||||
// Correct: runs only on boolean transition
|
||||
const isMobile = width < 768
|
||||
useEffect(() => {
|
||||
if (isMobile) {
|
||||
enableMobileMode()
|
||||
}
|
||||
}, [isMobile])
|
||||
```
|
||||
@@ -0,0 +1,29 @@
|
||||
---
|
||||
title: Subscribe to Derived State
|
||||
impact: MEDIUM
|
||||
impactDescription: reduces re-render frequency
|
||||
tags: rerender, derived-state, media-query, optimization
|
||||
---
|
||||
|
||||
## Subscribe to Derived State
|
||||
|
||||
Subscribe to derived boolean state instead of continuous values to reduce re-render frequency.
|
||||
|
||||
**Incorrect (re-renders on every pixel change):**
|
||||
|
||||
```tsx
|
||||
function Sidebar() {
|
||||
const width = useWindowWidth() // updates continuously
|
||||
const isMobile = width < 768
|
||||
return <nav className={isMobile ? 'mobile' : 'desktop'} />
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (re-renders only when boolean changes):**
|
||||
|
||||
```tsx
|
||||
function Sidebar() {
|
||||
const isMobile = useMediaQuery('(max-width: 767px)')
|
||||
return <nav className={isMobile ? 'mobile' : 'desktop'} />
|
||||
}
|
||||
```
|
||||
@@ -0,0 +1,74 @@
|
||||
---
|
||||
title: Use Functional setState Updates
|
||||
impact: MEDIUM
|
||||
impactDescription: prevents stale closures and unnecessary callback recreations
|
||||
tags: react, hooks, useState, useCallback, callbacks, closures
|
||||
---
|
||||
|
||||
## Use Functional setState Updates
|
||||
|
||||
When updating state based on the current state value, use the functional update form of setState instead of directly referencing the state variable. This prevents stale closures, eliminates unnecessary dependencies, and creates stable callback references.
|
||||
|
||||
**Incorrect (requires state as dependency):**
|
||||
|
||||
```tsx
|
||||
function TodoList() {
|
||||
const [items, setItems] = useState(initialItems)
|
||||
|
||||
// Callback must depend on items, recreated on every items change
|
||||
const addItems = useCallback((newItems: Item[]) => {
|
||||
setItems([...items, ...newItems])
|
||||
}, [items]) // ❌ items dependency causes recreations
|
||||
|
||||
// Risk of stale closure if dependency is forgotten
|
||||
const removeItem = useCallback((id: string) => {
|
||||
setItems(items.filter(item => item.id !== id))
|
||||
}, []) // ❌ Missing items dependency - will use stale items!
|
||||
|
||||
return <ItemsEditor items={items} onAdd={addItems} onRemove={removeItem} />
|
||||
}
|
||||
```
|
||||
|
||||
The first callback is recreated every time `items` changes, which can cause child components to re-render unnecessarily. The second callback has a stale closure bug—it will always reference the initial `items` value.
|
||||
|
||||
**Correct (stable callbacks, no stale closures):**
|
||||
|
||||
```tsx
|
||||
function TodoList() {
|
||||
const [items, setItems] = useState(initialItems)
|
||||
|
||||
// Stable callback, never recreated
|
||||
const addItems = useCallback((newItems: Item[]) => {
|
||||
setItems(curr => [...curr, ...newItems])
|
||||
}, []) // ✅ No dependencies needed
|
||||
|
||||
// Always uses latest state, no stale closure risk
|
||||
const removeItem = useCallback((id: string) => {
|
||||
setItems(curr => curr.filter(item => item.id !== id))
|
||||
}, []) // ✅ Safe and stable
|
||||
|
||||
return <ItemsEditor items={items} onAdd={addItems} onRemove={removeItem} />
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
|
||||
1. **Stable callback references** - Callbacks don't need to be recreated when state changes
|
||||
2. **No stale closures** - Always operates on the latest state value
|
||||
3. **Fewer dependencies** - Simplifies dependency arrays and reduces memory leaks
|
||||
4. **Prevents bugs** - Eliminates the most common source of React closure bugs
|
||||
|
||||
**When to use functional updates:**
|
||||
|
||||
- Any setState that depends on the current state value
|
||||
- Inside useCallback/useMemo when state is needed
|
||||
- Event handlers that reference state
|
||||
- Async operations that update state
|
||||
|
||||
**When direct updates are fine:**
|
||||
|
||||
- Setting state to a static value: `setCount(0)`
|
||||
- Setting state from props/arguments only: `setName(newName)`
|
||||
- State doesn't depend on previous value
|
||||
|
||||
**Note:** If your project has [React Compiler](https://react.dev/learn/react-compiler) enabled, the compiler can automatically optimize some cases, but functional updates are still recommended for correctness and to prevent stale closure bugs.
|
||||
@@ -0,0 +1,58 @@
|
||||
---
|
||||
title: Use Lazy State Initialization
|
||||
impact: MEDIUM
|
||||
impactDescription: wasted computation on every render
|
||||
tags: react, hooks, useState, performance, initialization
|
||||
---
|
||||
|
||||
## Use Lazy State Initialization
|
||||
|
||||
Pass a function to `useState` for expensive initial values. Without the function form, the initializer runs on every render even though the value is only used once.
|
||||
|
||||
**Incorrect (runs on every render):**
|
||||
|
||||
```tsx
|
||||
function FilteredList({ items }: { items: Item[] }) {
|
||||
// buildSearchIndex() runs on EVERY render, even after initialization
|
||||
const [searchIndex, setSearchIndex] = useState(buildSearchIndex(items))
|
||||
const [query, setQuery] = useState('')
|
||||
|
||||
// When query changes, buildSearchIndex runs again unnecessarily
|
||||
return <SearchResults index={searchIndex} query={query} />
|
||||
}
|
||||
|
||||
function UserProfile() {
|
||||
// JSON.parse runs on every render
|
||||
const [settings, setSettings] = useState(
|
||||
JSON.parse(localStorage.getItem('settings') || '{}')
|
||||
)
|
||||
|
||||
return <SettingsForm settings={settings} onChange={setSettings} />
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (runs only once):**
|
||||
|
||||
```tsx
|
||||
function FilteredList({ items }: { items: Item[] }) {
|
||||
// buildSearchIndex() runs ONLY on initial render
|
||||
const [searchIndex, setSearchIndex] = useState(() => buildSearchIndex(items))
|
||||
const [query, setQuery] = useState('')
|
||||
|
||||
return <SearchResults index={searchIndex} query={query} />
|
||||
}
|
||||
|
||||
function UserProfile() {
|
||||
// JSON.parse runs only on initial render
|
||||
const [settings, setSettings] = useState(() => {
|
||||
const stored = localStorage.getItem('settings')
|
||||
return stored ? JSON.parse(stored) : {}
|
||||
})
|
||||
|
||||
return <SettingsForm settings={settings} onChange={setSettings} />
|
||||
}
|
||||
```
|
||||
|
||||
Use lazy initialization when computing initial values from localStorage/sessionStorage, building data structures (indexes, maps), reading from the DOM, or performing heavy transformations.
|
||||
|
||||
For simple primitives (`useState(0)`), direct references (`useState(props.value)`), or cheap literals (`useState({})`), the function form is unnecessary.
|
||||
@@ -0,0 +1,44 @@
|
||||
---
|
||||
title: Extract to Memoized Components
|
||||
impact: MEDIUM
|
||||
impactDescription: enables early returns
|
||||
tags: rerender, memo, useMemo, optimization
|
||||
---
|
||||
|
||||
## Extract to Memoized Components
|
||||
|
||||
Extract expensive work into memoized components to enable early returns before computation.
|
||||
|
||||
**Incorrect (computes avatar even when loading):**
|
||||
|
||||
```tsx
|
||||
function Profile({ user, loading }: Props) {
|
||||
const avatar = useMemo(() => {
|
||||
const id = computeAvatarId(user)
|
||||
return <Avatar id={id} />
|
||||
}, [user])
|
||||
|
||||
if (loading) return <Skeleton />
|
||||
return <div>{avatar}</div>
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (skips computation when loading):**
|
||||
|
||||
```tsx
|
||||
const UserAvatar = memo(function UserAvatar({ user }: { user: User }) {
|
||||
const id = useMemo(() => computeAvatarId(user), [user])
|
||||
return <Avatar id={id} />
|
||||
})
|
||||
|
||||
function Profile({ user, loading }: Props) {
|
||||
if (loading) return <Skeleton />
|
||||
return (
|
||||
<div>
|
||||
<UserAvatar user={user} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
**Note:** If your project has [React Compiler](https://react.dev/learn/react-compiler) enabled, manual memoization with `memo()` and `useMemo()` is not necessary. The compiler automatically optimizes re-renders.
|
||||
@@ -0,0 +1,40 @@
|
||||
---
|
||||
title: Use Transitions for Non-Urgent Updates
|
||||
impact: MEDIUM
|
||||
impactDescription: maintains UI responsiveness
|
||||
tags: rerender, transitions, startTransition, performance
|
||||
---
|
||||
|
||||
## Use Transitions for Non-Urgent Updates
|
||||
|
||||
Mark frequent, non-urgent state updates as transitions to maintain UI responsiveness.
|
||||
|
||||
**Incorrect (blocks UI on every scroll):**
|
||||
|
||||
```tsx
|
||||
function ScrollTracker() {
|
||||
const [scrollY, setScrollY] = useState(0)
|
||||
useEffect(() => {
|
||||
const handler = () => setScrollY(window.scrollY)
|
||||
window.addEventListener('scroll', handler, { passive: true })
|
||||
return () => window.removeEventListener('scroll', handler)
|
||||
}, [])
|
||||
}
|
||||
```
|
||||
|
||||
**Correct (non-blocking updates):**
|
||||
|
||||
```tsx
|
||||
import { startTransition } from 'react'
|
||||
|
||||
function ScrollTracker() {
|
||||
const [scrollY, setScrollY] = useState(0)
|
||||
useEffect(() => {
|
||||
const handler = () => {
|
||||
startTransition(() => setScrollY(window.scrollY))
|
||||
}
|
||||
window.addEventListener('scroll', handler, { passive: true })
|
||||
return () => window.removeEventListener('scroll', handler)
|
||||
}, [])
|
||||
}
|
||||
```
|
||||
39
.claude/skills/web-design-guidelines/SKILL.md
Normal file
39
.claude/skills/web-design-guidelines/SKILL.md
Normal file
@@ -0,0 +1,39 @@
|
||||
---
|
||||
name: web-design-guidelines
|
||||
description: Review UI code for Web Interface Guidelines compliance. Use when asked to "review my UI", "check accessibility", "audit design", "review UX", or "check my site against best practices".
|
||||
metadata:
|
||||
author: vercel
|
||||
version: "1.0.0"
|
||||
argument-hint: <file-or-pattern>
|
||||
---
|
||||
|
||||
# Web Interface Guidelines
|
||||
|
||||
Review files for compliance with Web Interface Guidelines.
|
||||
|
||||
## How It Works
|
||||
|
||||
1. Fetch the latest guidelines from the source URL below
|
||||
2. Read the specified files (or prompt user for files/pattern)
|
||||
3. Check against all rules in the fetched guidelines
|
||||
4. Output findings in the terse `file:line` format
|
||||
|
||||
## Guidelines Source
|
||||
|
||||
Fetch fresh guidelines before each review:
|
||||
|
||||
```
|
||||
https://raw.githubusercontent.com/vercel-labs/web-interface-guidelines/main/command.md
|
||||
```
|
||||
|
||||
Use WebFetch to retrieve the latest rules. The fetched content contains all the rules and output format instructions.
|
||||
|
||||
## Usage
|
||||
|
||||
When a user provides a file or pattern argument:
|
||||
1. Fetch guidelines from the source URL above
|
||||
2. Read the specified files
|
||||
3. Apply all rules from the fetched guidelines
|
||||
4. Output findings using the format specified in the guidelines
|
||||
|
||||
If no files specified, ask the user which files to review.
|
||||
@@ -8,5 +8,5 @@
|
||||
"ignoreCase": true,
|
||||
"newlinesBetween": true
|
||||
},
|
||||
"ignorePatterns": []
|
||||
"ignorePatterns": [".claude", "fixtures"]
|
||||
}
|
||||
|
||||
42
AGENTS.md
Normal file
42
AGENTS.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# AGENTS.md
|
||||
|
||||
## Project
|
||||
|
||||
ARIS is an AI-powered personal assistant that aggregates data from various sources into a contextual feed. Monorepo with `packages/` (shared libraries) and `apps/` (applications).
|
||||
|
||||
## Commands
|
||||
|
||||
- Install: `bun install`
|
||||
- Test: `bun test` (run in the specific package directory)
|
||||
- Lint: `bun run lint`
|
||||
- Format: `bun run format`
|
||||
- Type check: `bun tsc --noEmit`
|
||||
|
||||
Use Bun exclusively. Do not use npm or yarn.
|
||||
|
||||
## Code Style
|
||||
|
||||
- File names: kebab-case (`data-source.ts`)
|
||||
- Prefer function declarations over arrow functions
|
||||
- Never use `any` - use `unknown` and narrow types
|
||||
- Enums: use const objects with corresponding types:
|
||||
```typescript
|
||||
const Priority = {
|
||||
Low: "Low",
|
||||
High: "High",
|
||||
} as const
|
||||
type Priority = (typeof Priority)[keyof typeof Priority]
|
||||
```
|
||||
- File organization: types first, then primary functions, then helpers
|
||||
|
||||
## Before Committing
|
||||
|
||||
1. Format: `bun run format`
|
||||
2. Test the modified package: `cd packages/<package> && bun test`
|
||||
3. Fix all type errors related to your changes
|
||||
|
||||
## Git
|
||||
|
||||
- Branch: `feat/<task>`, `fix/<task>`, `ci/<task>`, etc.
|
||||
- Commits: conventional commit format, title <= 50 chars
|
||||
- Signing: If `GPG_PRIVATE_KEY_PASSPHRASE` env var is available, use it to sign commits with `git commit -S`
|
||||
21
README.md
21
README.md
@@ -6,10 +6,25 @@ To install dependencies:
|
||||
bun install
|
||||
```
|
||||
|
||||
To run:
|
||||
## Packages
|
||||
|
||||
### @aris/source-tfl
|
||||
|
||||
TfL (Transport for London) feed source for tube, overground, and Elizabeth line alerts.
|
||||
|
||||
#### Testing
|
||||
|
||||
```bash
|
||||
bun run index.ts
|
||||
cd packages/aris-source-tfl
|
||||
bun run test
|
||||
```
|
||||
|
||||
This project was created using `bun init` in bun v1.3.6. [Bun](https://bun.com) is a fast all-in-one JavaScript runtime.
|
||||
#### Fixtures
|
||||
|
||||
Tests use fixture data from real TfL API responses stored in `fixtures/tfl-responses.json`.
|
||||
|
||||
To refresh fixtures:
|
||||
|
||||
```bash
|
||||
bun run fetch-fixtures
|
||||
```
|
||||
|
||||
8
apps/aris-backend/.env.example
Normal file
8
apps/aris-backend/.env.example
Normal file
@@ -0,0 +1,8 @@
|
||||
# PostgreSQL connection string
|
||||
DATABASE_URL=postgresql://user:password@localhost:5432/aris
|
||||
|
||||
# BetterAuth secret (min 32 chars, generate with: openssl rand -base64 32)
|
||||
BETTER_AUTH_SECRET=
|
||||
|
||||
# Base URL of the backend
|
||||
BETTER_AUTH_URL=http://localhost:3000
|
||||
22
apps/aris-backend/package.json
Normal file
22
apps/aris-backend/package.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "@aris/backend",
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"main": "src/server.ts",
|
||||
"scripts": {
|
||||
"dev": "bun run --watch src/server.ts",
|
||||
"start": "bun run src/server.ts",
|
||||
"test": "bun test src/"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aris/core": "workspace:*",
|
||||
"@aris/source-location": "workspace:*",
|
||||
"@aris/source-weatherkit": "workspace:*",
|
||||
"better-auth": "^1",
|
||||
"hono": "^4",
|
||||
"pg": "^8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/pg": "^8"
|
||||
}
|
||||
}
|
||||
7
apps/aris-backend/src/auth/http.ts
Normal file
7
apps/aris-backend/src/auth/http.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import type { Hono } from "hono"
|
||||
|
||||
import { auth } from "./index.ts"
|
||||
|
||||
export function registerAuthHandlers(app: Hono): void {
|
||||
app.on(["POST", "GET"], "/api/auth/*", (c) => auth.handler(c.req.raw))
|
||||
}
|
||||
10
apps/aris-backend/src/auth/index.ts
Normal file
10
apps/aris-backend/src/auth/index.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { betterAuth } from "better-auth"
|
||||
|
||||
import { pool } from "../db.ts"
|
||||
|
||||
export const auth = betterAuth({
|
||||
database: pool,
|
||||
emailAndPassword: {
|
||||
enabled: true,
|
||||
},
|
||||
})
|
||||
54
apps/aris-backend/src/auth/session-middleware.ts
Normal file
54
apps/aris-backend/src/auth/session-middleware.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import type { Context, Next } from "hono"
|
||||
|
||||
import { auth } from "./index.ts"
|
||||
|
||||
type SessionUser = typeof auth.$Infer.Session.user
|
||||
type Session = typeof auth.$Infer.Session.session
|
||||
|
||||
export interface SessionVariables {
|
||||
user: SessionUser | null
|
||||
session: Session | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware that attaches session and user to the context.
|
||||
* Does not reject unauthenticated requests - use requireSession for that.
|
||||
*/
|
||||
export async function sessionMiddleware(c: Context, next: Next): Promise<void> {
|
||||
const session = await auth.api.getSession({ headers: c.req.raw.headers })
|
||||
|
||||
if (session) {
|
||||
c.set("user", session.user)
|
||||
c.set("session", session.session)
|
||||
} else {
|
||||
c.set("user", null)
|
||||
c.set("session", null)
|
||||
}
|
||||
|
||||
await next()
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware that requires a valid session. Returns 401 if not authenticated.
|
||||
*/
|
||||
export async function requireSession(c: Context, next: Next): Promise<Response | void> {
|
||||
const session = await auth.api.getSession({ headers: c.req.raw.headers })
|
||||
|
||||
if (!session) {
|
||||
return c.json({ error: "Unauthorized" }, 401)
|
||||
}
|
||||
|
||||
c.set("user", session.user)
|
||||
c.set("session", session.session)
|
||||
await next()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session from headers. Useful for WebSocket upgrade validation.
|
||||
*/
|
||||
export async function getSessionFromHeaders(
|
||||
headers: Headers,
|
||||
): Promise<{ user: SessionUser; session: Session } | null> {
|
||||
const session = await auth.api.getSession({ headers })
|
||||
return session
|
||||
}
|
||||
5
apps/aris-backend/src/db.ts
Normal file
5
apps/aris-backend/src/db.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { Pool } from "pg"
|
||||
|
||||
export const pool = new Pool({
|
||||
connectionString: process.env.DATABASE_URL,
|
||||
})
|
||||
14
apps/aris-backend/src/server.ts
Normal file
14
apps/aris-backend/src/server.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { Hono } from "hono"
|
||||
|
||||
import { registerAuthHandlers } from "./auth/http.ts"
|
||||
|
||||
const app = new Hono()
|
||||
|
||||
app.get("/health", (c) => c.json({ status: "ok" }))
|
||||
|
||||
registerAuthHandlers(app)
|
||||
|
||||
export default {
|
||||
port: 3000,
|
||||
fetch: app.fetch,
|
||||
}
|
||||
4
apps/aris-backend/tsconfig.json
Normal file
4
apps/aris-backend/tsconfig.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"include": ["src"]
|
||||
}
|
||||
112
bun.lock
112
bun.lock
@@ -13,6 +13,21 @@
|
||||
"typescript": "^5",
|
||||
},
|
||||
},
|
||||
"apps/aris-backend": {
|
||||
"name": "@aris/backend",
|
||||
"version": "0.0.0",
|
||||
"dependencies": {
|
||||
"@aris/core": "workspace:*",
|
||||
"@aris/source-location": "workspace:*",
|
||||
"@aris/source-weatherkit": "workspace:*",
|
||||
"better-auth": "^1",
|
||||
"hono": "^4",
|
||||
"pg": "^8",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/pg": "^8",
|
||||
},
|
||||
},
|
||||
"packages/aris-core": {
|
||||
"name": "@aris/core",
|
||||
"version": "0.0.0",
|
||||
@@ -25,16 +40,61 @@
|
||||
"arktype": "^2.1.0",
|
||||
},
|
||||
},
|
||||
"packages/aris-source-location": {
|
||||
"name": "@aris/source-location",
|
||||
"version": "0.0.0",
|
||||
"dependencies": {
|
||||
"@aris/core": "workspace:*",
|
||||
},
|
||||
},
|
||||
"packages/aris-source-tfl": {
|
||||
"name": "@aris/source-tfl",
|
||||
"version": "0.0.0",
|
||||
"dependencies": {
|
||||
"@aris/core": "workspace:*",
|
||||
"@aris/source-location": "workspace:*",
|
||||
"arktype": "^2.1.0",
|
||||
},
|
||||
},
|
||||
"packages/aris-source-weatherkit": {
|
||||
"name": "@aris/source-weatherkit",
|
||||
"version": "0.0.0",
|
||||
"dependencies": {
|
||||
"@aris/core": "workspace:*",
|
||||
"@aris/source-location": "workspace:*",
|
||||
"arktype": "^2.1.0",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@aris/backend": ["@aris/backend@workspace:apps/aris-backend"],
|
||||
|
||||
"@aris/core": ["@aris/core@workspace:packages/aris-core"],
|
||||
|
||||
"@aris/data-source-weatherkit": ["@aris/data-source-weatherkit@workspace:packages/aris-data-source-weatherkit"],
|
||||
|
||||
"@aris/source-location": ["@aris/source-location@workspace:packages/aris-source-location"],
|
||||
|
||||
"@aris/source-tfl": ["@aris/source-tfl@workspace:packages/aris-source-tfl"],
|
||||
|
||||
"@aris/source-weatherkit": ["@aris/source-weatherkit@workspace:packages/aris-source-weatherkit"],
|
||||
|
||||
"@ark/schema": ["@ark/schema@0.56.0", "", { "dependencies": { "@ark/util": "0.56.0" } }, "sha512-ECg3hox/6Z/nLajxXqNhgPtNdHWC9zNsDyskwO28WinoFEnWow4IsERNz9AnXRhTZJnYIlAJ4uGn3nlLk65vZA=="],
|
||||
|
||||
"@ark/util": ["@ark/util@0.56.0", "", {}, "sha512-BghfRC8b9pNs3vBoDJhcta0/c1J1rsoS1+HgVUreMFPdhz/CRAKReAu57YEllNaSy98rWAdY1gE+gFup7OXpgA=="],
|
||||
|
||||
"@better-auth/core": ["@better-auth/core@1.4.17", "", { "dependencies": { "@standard-schema/spec": "^1.0.0", "zod": "^4.3.5" }, "peerDependencies": { "@better-auth/utils": "0.3.0", "@better-fetch/fetch": "1.1.21", "better-call": "1.1.8", "jose": "^6.1.0", "kysely": "^0.28.5", "nanostores": "^1.0.1" } }, "sha512-WSaEQDdUO6B1CzAmissN6j0lx9fM9lcslEYzlApB5UzFaBeAOHNUONTdglSyUs6/idiZBoRvt0t/qMXCgIU8ug=="],
|
||||
|
||||
"@better-auth/telemetry": ["@better-auth/telemetry@1.4.17", "", { "dependencies": { "@better-auth/utils": "0.3.0", "@better-fetch/fetch": "1.1.21" }, "peerDependencies": { "@better-auth/core": "1.4.17" } }, "sha512-R1BC4e/bNjQbXu7lG6ubpgmsPj7IMqky5DvMlzAtnAJWJhh99pMh/n6w5gOHa0cqDZgEAuj75IPTxv+q3YiInA=="],
|
||||
|
||||
"@better-auth/utils": ["@better-auth/utils@0.3.0", "", {}, "sha512-W+Adw6ZA6mgvnSnhOki270rwJ42t4XzSK6YWGF//BbVXL6SwCLWfyzBc1lN2m/4RM28KubdBKQ4X5VMoLRNPQw=="],
|
||||
|
||||
"@better-fetch/fetch": ["@better-fetch/fetch@1.1.21", "", {}, "sha512-/ImESw0sskqlVR94jB+5+Pxjf+xBwDZF/N5+y2/q4EqD7IARUTSpPfIo8uf39SYpCxyOCtbyYpUrZ3F/k0zT4A=="],
|
||||
|
||||
"@noble/ciphers": ["@noble/ciphers@2.1.1", "", {}, "sha512-bysYuiVfhxNJuldNXlFEitTVdNnYUc+XNJZd7Qm2a5j1vZHgY+fazadNFWFaMK/2vye0JVlxV3gHmC0WDfAOQw=="],
|
||||
|
||||
"@noble/hashes": ["@noble/hashes@2.0.1", "", {}, "sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw=="],
|
||||
|
||||
"@oxfmt/darwin-arm64": ["@oxfmt/darwin-arm64@0.24.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-aYXuGf/yq8nsyEcHindGhiz9I+GEqLkVq8CfPbd+6VE259CpPEH+CaGHEO1j6vIOmNr8KHRq+IAjeRO2uJpb8A=="],
|
||||
|
||||
"@oxfmt/darwin-x64": ["@oxfmt/darwin-x64@0.24.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-vs3b8Bs53hbiNvcNeBilzE/+IhDTWKjSBB3v/ztr664nZk65j0xr+5IHMBNz3CFppmX7o/aBta2PxY+t+4KoPg=="],
|
||||
@@ -67,24 +127,76 @@
|
||||
|
||||
"@oxlint/win32-x64": ["@oxlint/win32-x64@1.39.0", "", { "os": "win32", "cpu": "x64" }, "sha512-sbi25lfj74hH+6qQtb7s1wEvd1j8OQbTaH8v3xTcDjrwm579Cyh0HBv1YSZ2+gsnVwfVDiCTL1D0JsNqYXszVA=="],
|
||||
|
||||
"@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="],
|
||||
|
||||
"@types/bun": ["@types/bun@1.3.6", "", { "dependencies": { "bun-types": "1.3.6" } }, "sha512-uWCv6FO/8LcpREhenN1d1b6fcspAB+cefwD7uti8C8VffIv0Um08TKMn98FynpTiU38+y2dUO55T11NgDt8VAA=="],
|
||||
|
||||
"@types/node": ["@types/node@25.0.9", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-/rpCXHlCWeqClNBwUhDcusJxXYDjZTyE8v5oTO7WbL8eij2nKhUeU89/6xgjU7N4/Vh3He0BtyhJdQbDyhiXAw=="],
|
||||
|
||||
"@types/pg": ["@types/pg@8.16.0", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^2.2.0" } }, "sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ=="],
|
||||
|
||||
"arkregex": ["arkregex@0.0.5", "", { "dependencies": { "@ark/util": "0.56.0" } }, "sha512-ncYjBdLlh5/QnVsAA8De16Tc9EqmYM7y/WU9j+236KcyYNUXogpz3sC4ATIZYzzLxwI+0sEOaQLEmLmRleaEXw=="],
|
||||
|
||||
"arktype": ["arktype@2.1.29", "", { "dependencies": { "@ark/schema": "0.56.0", "@ark/util": "0.56.0", "arkregex": "0.0.5" } }, "sha512-jyfKk4xIOzvYNayqnD8ZJQqOwcrTOUbIU4293yrzAjA3O1dWh61j71ArMQ6tS/u4pD7vabSPe7nG3RCyoXW6RQ=="],
|
||||
|
||||
"better-auth": ["better-auth@1.4.17", "", { "dependencies": { "@better-auth/core": "1.4.17", "@better-auth/telemetry": "1.4.17", "@better-auth/utils": "0.3.0", "@better-fetch/fetch": "1.1.21", "@noble/ciphers": "^2.0.0", "@noble/hashes": "^2.0.0", "better-call": "1.1.8", "defu": "^6.1.4", "jose": "^6.1.0", "kysely": "^0.28.5", "nanostores": "^1.0.1", "zod": "^4.3.5" }, "peerDependencies": { "@lynx-js/react": "*", "@prisma/client": "^5.0.0 || ^6.0.0 || ^7.0.0", "@sveltejs/kit": "^2.0.0", "@tanstack/react-start": "^1.0.0", "@tanstack/solid-start": "^1.0.0", "better-sqlite3": "^12.0.0", "drizzle-kit": ">=0.31.4", "drizzle-orm": ">=0.41.0", "mongodb": "^6.0.0 || ^7.0.0", "mysql2": "^3.0.0", "next": "^14.0.0 || ^15.0.0 || ^16.0.0", "pg": "^8.0.0", "prisma": "^5.0.0 || ^6.0.0 || ^7.0.0", "react": "^18.0.0 || ^19.0.0", "react-dom": "^18.0.0 || ^19.0.0", "solid-js": "^1.0.0", "svelte": "^4.0.0 || ^5.0.0", "vitest": "^2.0.0 || ^3.0.0 || ^4.0.0", "vue": "^3.0.0" }, "optionalPeers": ["@lynx-js/react", "@prisma/client", "@sveltejs/kit", "@tanstack/react-start", "@tanstack/solid-start", "better-sqlite3", "drizzle-kit", "drizzle-orm", "mongodb", "mysql2", "next", "pg", "prisma", "react", "react-dom", "solid-js", "svelte", "vitest", "vue"] }, "sha512-VmHGQyKsEahkEs37qguROKg/6ypYpNF13D7v/lkbO7w7Aivz0Bv2h+VyUkH4NzrGY0QBKXi1577mGhDCVwp0ew=="],
|
||||
|
||||
"better-call": ["better-call@1.1.8", "", { "dependencies": { "@better-auth/utils": "^0.3.0", "@better-fetch/fetch": "^1.1.4", "rou3": "^0.7.10", "set-cookie-parser": "^2.7.1" }, "peerDependencies": { "zod": "^4.0.0" }, "optionalPeers": ["zod"] }, "sha512-XMQ2rs6FNXasGNfMjzbyroSwKwYbZ/T3IxruSS6U2MJRsSYh3wYtG3o6H00ZlKZ/C/UPOAD97tqgQJNsxyeTXw=="],
|
||||
|
||||
"bun-types": ["bun-types@1.3.6", "", { "dependencies": { "@types/node": "*" } }, "sha512-OlFwHcnNV99r//9v5IIOgQ9Uk37gZqrNMCcqEaExdkVq3Avwqok1bJFmvGMCkCE0FqzdY8VMOZpfpR3lwI+CsQ=="],
|
||||
|
||||
"defu": ["defu@6.1.4", "", {}, "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg=="],
|
||||
|
||||
"hono": ["hono@4.11.5", "", {}, "sha512-WemPi9/WfyMwZs+ZUXdiwcCh9Y+m7L+8vki9MzDw3jJ+W9Lc+12HGsd368Qc1vZi1xwW8BWMMsnK5efYKPdt4g=="],
|
||||
|
||||
"jose": ["jose@6.1.3", "", {}, "sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ=="],
|
||||
|
||||
"kysely": ["kysely@0.28.10", "", {}, "sha512-ksNxfzIW77OcZ+QWSAPC7yDqUSaIVwkTWnTPNiIy//vifNbwsSgQ57OkkncHxxpcBHM3LRfLAZVEh7kjq5twVA=="],
|
||||
|
||||
"nanostores": ["nanostores@1.1.0", "", {}, "sha512-yJBmDJr18xy47dbNVlHcgdPrulSn1nhSE6Ns9vTG+Nx9VPT6iV1MD6aQFp/t52zpf82FhLLTXAXr30NuCnxvwA=="],
|
||||
|
||||
"oxfmt": ["oxfmt@0.24.0", "", { "dependencies": { "tinypool": "2.0.0" }, "optionalDependencies": { "@oxfmt/darwin-arm64": "0.24.0", "@oxfmt/darwin-x64": "0.24.0", "@oxfmt/linux-arm64-gnu": "0.24.0", "@oxfmt/linux-arm64-musl": "0.24.0", "@oxfmt/linux-x64-gnu": "0.24.0", "@oxfmt/linux-x64-musl": "0.24.0", "@oxfmt/win32-arm64": "0.24.0", "@oxfmt/win32-x64": "0.24.0" }, "bin": { "oxfmt": "bin/oxfmt" } }, "sha512-UjeM3Peez8Tl7IJ9s5UwAoZSiDRMww7BEc21gDYxLq3S3/KqJnM3mjNxsoSHgmBvSeX6RBhoVc2MfC/+96RdSw=="],
|
||||
|
||||
"oxlint": ["oxlint@1.39.0", "", { "optionalDependencies": { "@oxlint/darwin-arm64": "1.39.0", "@oxlint/darwin-x64": "1.39.0", "@oxlint/linux-arm64-gnu": "1.39.0", "@oxlint/linux-arm64-musl": "1.39.0", "@oxlint/linux-x64-gnu": "1.39.0", "@oxlint/linux-x64-musl": "1.39.0", "@oxlint/win32-arm64": "1.39.0", "@oxlint/win32-x64": "1.39.0" }, "peerDependencies": { "oxlint-tsgolint": ">=0.10.0" }, "optionalPeers": ["oxlint-tsgolint"], "bin": { "oxlint": "bin/oxlint" } }, "sha512-wSiLr0wjG+KTU6c1LpVoQk7JZ7l8HCKlAkVDVTJKWmCGazsNxexxnOXl7dsar92mQcRnzko5g077ggP3RINSjA=="],
|
||||
|
||||
"pg": ["pg@8.17.2", "", { "dependencies": { "pg-connection-string": "^2.10.1", "pg-pool": "^3.11.0", "pg-protocol": "^1.11.0", "pg-types": "2.2.0", "pgpass": "1.0.5" }, "optionalDependencies": { "pg-cloudflare": "^1.3.0" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-vjbKdiBJRqzcYw1fNU5KuHyYvdJ1qpcQg1CeBrHFqV1pWgHeVR6j/+kX0E1AAXfyuLUGY1ICrN2ELKA/z2HWzw=="],
|
||||
|
||||
"pg-cloudflare": ["pg-cloudflare@1.3.0", "", {}, "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ=="],
|
||||
|
||||
"pg-connection-string": ["pg-connection-string@2.10.1", "", {}, "sha512-iNzslsoeSH2/gmDDKiyMqF64DATUCWj3YJ0wP14kqcsf2TUklwimd+66yYojKwZCA7h2yRNLGug71hCBA2a4sw=="],
|
||||
|
||||
"pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="],
|
||||
|
||||
"pg-pool": ["pg-pool@3.11.0", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-MJYfvHwtGp870aeusDh+hg9apvOe2zmpZJpyt+BMtzUWlVqbhFmMK6bOBXLBUPd7iRtIF9fZplDc7KrPN3PN7w=="],
|
||||
|
||||
"pg-protocol": ["pg-protocol@1.11.0", "", {}, "sha512-pfsxk2M9M3BuGgDOfuy37VNRRX3jmKgMjcvAcWqNDpZSf4cUmv8HSOl5ViRQFsfARFn0KuUQTgLxVMbNq5NW3g=="],
|
||||
|
||||
"pg-types": ["pg-types@2.2.0", "", { "dependencies": { "pg-int8": "1.0.1", "postgres-array": "~2.0.0", "postgres-bytea": "~1.0.0", "postgres-date": "~1.0.4", "postgres-interval": "^1.1.0" } }, "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA=="],
|
||||
|
||||
"pgpass": ["pgpass@1.0.5", "", { "dependencies": { "split2": "^4.1.0" } }, "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug=="],
|
||||
|
||||
"postgres-array": ["postgres-array@2.0.0", "", {}, "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA=="],
|
||||
|
||||
"postgres-bytea": ["postgres-bytea@1.0.1", "", {}, "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ=="],
|
||||
|
||||
"postgres-date": ["postgres-date@1.0.7", "", {}, "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="],
|
||||
|
||||
"postgres-interval": ["postgres-interval@1.2.0", "", { "dependencies": { "xtend": "^4.0.0" } }, "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ=="],
|
||||
|
||||
"rou3": ["rou3@0.7.12", "", {}, "sha512-iFE4hLDuloSWcD7mjdCDhx2bKcIsYbtOTpfH5MHHLSKMOUyjqQXTeZVa289uuwEGEKFoE/BAPbhaU4B774nceg=="],
|
||||
|
||||
"set-cookie-parser": ["set-cookie-parser@2.7.2", "", {}, "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw=="],
|
||||
|
||||
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
|
||||
|
||||
"tinypool": ["tinypool@2.0.0", "", {}, "sha512-/RX9RzeH2xU5ADE7n2Ykvmi9ED3FBGPAjw9u3zucrNNaEBIO0HPSYgL0NT7+3p147ojeSdaVu08F6hjpv31HJg=="],
|
||||
|
||||
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||
|
||||
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
|
||||
|
||||
"xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="],
|
||||
|
||||
"zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="],
|
||||
}
|
||||
}
|
||||
|
||||
178
docs/backend-spec.md
Normal file
178
docs/backend-spec.md
Normal file
@@ -0,0 +1,178 @@
|
||||
# ARIS Backend Specification
|
||||
|
||||
## Problem Statement
|
||||
|
||||
ARIS needs a backend service that manages per-user FeedEngine instances and delivers real-time feed updates to clients. The backend must handle authentication, maintain WebSocket connections for live updates, and accept context updates (like location) that trigger feed recalculations.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Authentication
|
||||
- Email/password authentication using BetterAuth
|
||||
- PostgreSQL for session and user storage
|
||||
- Session tokens validated via `Authorization: Bearer <token>` header
|
||||
- Auth endpoints exposed via BetterAuth's built-in routes
|
||||
|
||||
### FeedEngine Management
|
||||
- Each authenticated user gets their own FeedEngine instance
|
||||
- Instances are cached in memory with a 30-minute TTL
|
||||
- TTL resets on any activity (WebSocket message, location update)
|
||||
- Default sources registered for each user: `LocationSource`, `WeatherSource`, `TflSource`
|
||||
- Source configuration is hardcoded initially (customization deferred)
|
||||
|
||||
### WebSocket Connection
|
||||
- Single endpoint: `GET /ws` (upgrades to WebSocket)
|
||||
- Authentication via `Authorization: Bearer <token>` header on upgrade request
|
||||
- Rejected before upgrade if token is invalid
|
||||
- Multiple connections per user allowed (e.g., multiple devices)
|
||||
- All connections for a user receive the same feed updates
|
||||
- On connect: immediately send current feed state
|
||||
|
||||
### JSON-RPC Protocol
|
||||
All WebSocket communication uses JSON-RPC 2.0.
|
||||
|
||||
**Client → Server (Requests):**
|
||||
```json
|
||||
{ "jsonrpc": "2.0", "method": "location.update", "params": { "lat": 51.5, "lng": -0.1, "accuracy": 10, "timestamp": "2025-01-01T12:00:00Z" }, "id": 1 }
|
||||
{ "jsonrpc": "2.0", "method": "feed.refresh", "params": {}, "id": 2 }
|
||||
```
|
||||
|
||||
**Server → Client (Responses):**
|
||||
```json
|
||||
{ "jsonrpc": "2.0", "result": { "ok": true }, "id": 1 }
|
||||
```
|
||||
|
||||
**Server → Client (Notifications - no id):**
|
||||
```json
|
||||
{ "jsonrpc": "2.0", "method": "feed.update", "params": { "items": [...], "errors": [...] } }
|
||||
```
|
||||
|
||||
### JSON-RPC Methods
|
||||
|
||||
| Method | Params | Description |
|
||||
|--------|--------|-------------|
|
||||
| `location.update` | `{ lat, lng, accuracy, timestamp }` | Push location update, triggers feed refresh |
|
||||
| `feed.refresh` | `{}` | Force manual feed refresh |
|
||||
|
||||
### Server Notifications
|
||||
|
||||
| Method | Params | Description |
|
||||
|--------|--------|-------------|
|
||||
| `feed.update` | `{ context, items, errors }` | Feed state changed |
|
||||
| `error` | `{ code, message, data? }` | Source or system error |
|
||||
|
||||
### Error Handling
|
||||
- Source failures during refresh are reported via `error` notification
|
||||
- Format: `{ "jsonrpc": "2.0", "method": "error", "params": { "code": -32000, "message": "...", "data": { "sourceId": "weather" } } }`
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
1. **Auth Flow**
|
||||
- [ ] User can sign up with email/password via `POST /api/auth/sign-up`
|
||||
- [ ] User can sign in via `POST /api/auth/sign-in` and receive session token
|
||||
- [ ] Invalid credentials return 401
|
||||
|
||||
2. **WebSocket Connection**
|
||||
- [ ] `GET /ws` with valid `Authorization` header upgrades to WebSocket
|
||||
- [ ] `GET /ws` without valid token returns 401 (no upgrade)
|
||||
- [ ] On successful connect, client receives `feed.update` notification with current state
|
||||
- [ ] Multiple connections from same user all receive updates
|
||||
|
||||
3. **FeedEngine Lifecycle**
|
||||
- [ ] First connection for a user creates FeedEngine with default sources
|
||||
- [ ] Subsequent connections reuse the same FeedEngine
|
||||
- [ ] FeedEngine is destroyed after 30 minutes of inactivity
|
||||
- [ ] Activity (any WebSocket message) resets the TTL
|
||||
|
||||
4. **JSON-RPC Methods**
|
||||
- [ ] `location.update` updates LocationSource and triggers feed refresh
|
||||
- [ ] `feed.refresh` triggers manual refresh
|
||||
- [ ] Both return `{ "ok": true }` on success
|
||||
- [ ] Invalid method returns JSON-RPC error
|
||||
|
||||
5. **Feed Updates**
|
||||
- [ ] FeedEngine subscription pushes updates to all user's WebSocket connections
|
||||
- [ ] Updates include `context`, `items`, and `errors`
|
||||
|
||||
## Implementation Approach
|
||||
|
||||
### Phase 1: Project Setup
|
||||
1. Create `apps/aris-backend` with Hono
|
||||
2. Configure TypeScript, add dependencies (hono, better-auth, postgres driver)
|
||||
3. Set up database connection and BetterAuth
|
||||
|
||||
### Phase 2: Authentication
|
||||
4. Configure BetterAuth with email/password provider
|
||||
5. Mount BetterAuth routes at `/api/auth/*`
|
||||
6. Create session validation helper for extracting user from token
|
||||
|
||||
### Phase 3: FeedEngine Manager
|
||||
7. Create `FeedEngineManager` class:
|
||||
- `getOrCreate(userId): FeedEngine` - returns cached or creates new
|
||||
- `touch(userId)` - resets TTL
|
||||
- `destroy(userId)` - manual cleanup
|
||||
- Internal TTL cleanup loop
|
||||
8. Factory function to create FeedEngine with default sources
|
||||
|
||||
### Phase 4: WebSocket Handler
|
||||
9. Create WebSocket upgrade endpoint at `/ws`
|
||||
10. Validate `Authorization` header before upgrade
|
||||
11. On connect: register connection, send initial feed state
|
||||
12. On disconnect: unregister connection
|
||||
|
||||
### Phase 5: JSON-RPC Handler
|
||||
13. Create JSON-RPC message parser and dispatcher
|
||||
14. Implement `location.update` method
|
||||
15. Implement `feed.refresh` method
|
||||
16. Wire FeedEngine subscription to broadcast `feed.update` to all user connections
|
||||
|
||||
### Phase 6: Connection Manager
|
||||
17. Create `ConnectionManager` to track WebSocket connections per user
|
||||
18. Broadcast helper to send to all connections for a user
|
||||
|
||||
### Phase 7: Integration & Testing
|
||||
19. Integration test: auth → connect → location update → receive feed
|
||||
20. Test multiple connections receive same updates
|
||||
21. Test TTL cleanup
|
||||
|
||||
## Package Structure
|
||||
|
||||
```
|
||||
apps/aris-backend/
|
||||
├── package.json
|
||||
├── src/
|
||||
│ ├── index.ts # Entry point, Hono app
|
||||
│ ├── auth.ts # BetterAuth configuration
|
||||
│ ├── db.ts # Database connection
|
||||
│ ├── ws/
|
||||
│ │ ├── handler.ts # WebSocket upgrade & message handling
|
||||
│ │ ├── jsonrpc.ts # JSON-RPC parser & types
|
||||
│ │ └── methods.ts # Method implementations
|
||||
│ ├── feed/
|
||||
│ │ ├── manager.ts # FeedEngineManager (TTL cache)
|
||||
│ │ ├── factory.ts # Creates FeedEngine with default sources
|
||||
│ │ └── connections.ts # ConnectionManager (user → WebSocket[])
|
||||
│ └── types.ts # Shared types
|
||||
```
|
||||
|
||||
## Dependencies
|
||||
|
||||
```json
|
||||
{
|
||||
"dependencies": {
|
||||
"hono": "^4",
|
||||
"better-auth": "^1",
|
||||
"postgres": "^3",
|
||||
"@aris/core": "workspace:*",
|
||||
"@aris/source-location": "workspace:*",
|
||||
"@aris/source-weatherkit": "workspace:*",
|
||||
"@aris/data-source-tfl": "workspace:*"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Open Questions (Deferred)
|
||||
|
||||
- User source configuration storage (database schema)
|
||||
- Rate limiting on WebSocket methods
|
||||
- Reconnection handling (client-side concern)
|
||||
- Horizontal scaling (would need Redis for shared state)
|
||||
186
packages/aris-core/README.md
Normal file
186
packages/aris-core/README.md
Normal file
@@ -0,0 +1,186 @@
|
||||
# @aris/core
|
||||
|
||||
Core orchestration layer for ARIS feed reconciliation.
|
||||
|
||||
## Overview
|
||||
|
||||
```mermaid
|
||||
flowchart TB
|
||||
subgraph Sources["Feed Sources (Graph)"]
|
||||
LS[Location Source]
|
||||
WS[Weather Source]
|
||||
TS[TFL Source]
|
||||
CS[Calendar Source]
|
||||
end
|
||||
|
||||
LS --> WS
|
||||
LS --> TS
|
||||
|
||||
subgraph Controller["FeedController"]
|
||||
direction TB
|
||||
C1[Holds context]
|
||||
C2[Manages source graph]
|
||||
C3[Reconciles on update]
|
||||
C4[Notifies subscribers]
|
||||
end
|
||||
|
||||
Sources --> Controller
|
||||
Controller --> Sub[Subscribers]
|
||||
```
|
||||
|
||||
## Concepts
|
||||
|
||||
### FeedSource
|
||||
|
||||
A unified interface for sources that provide context and/or feed items. Sources form a dependency graph.
|
||||
|
||||
```ts
|
||||
interface FeedSource<TItem extends FeedItem = FeedItem> {
|
||||
readonly id: string
|
||||
readonly dependencies?: readonly string[]
|
||||
|
||||
// Context production (optional)
|
||||
onContextUpdate?(
|
||||
callback: (update: Partial<Context>) => void,
|
||||
getContext: () => Context,
|
||||
): () => void
|
||||
fetchContext?(context: Context): Promise<Partial<Context>>
|
||||
|
||||
// Feed item production (optional)
|
||||
onItemsUpdate?(callback: (items: TItem[]) => void, getContext: () => Context): () => void
|
||||
fetchItems?(context: Context): Promise<TItem[]>
|
||||
}
|
||||
```
|
||||
|
||||
A source may:
|
||||
|
||||
- Provide context for other sources (implement `fetchContext`/`onContextUpdate`)
|
||||
- Produce feed items (implement `fetchItems`/`onItemsUpdate`)
|
||||
- Both
|
||||
|
||||
### Context Keys
|
||||
|
||||
Each package exports typed context keys for type-safe access:
|
||||
|
||||
```ts
|
||||
import { contextKey, type ContextKey } from "@aris/core"
|
||||
|
||||
interface Location {
|
||||
lat: number
|
||||
lng: number
|
||||
}
|
||||
|
||||
export const LocationKey: ContextKey<Location> = contextKey("location")
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Define a Context-Only Source
|
||||
|
||||
```ts
|
||||
import type { FeedSource } from "@aris/core"
|
||||
|
||||
const locationSource: FeedSource = {
|
||||
id: "location",
|
||||
|
||||
onContextUpdate(callback, _getContext) {
|
||||
const watchId = navigator.geolocation.watchPosition((pos) => {
|
||||
callback({
|
||||
[LocationKey]: { lat: pos.coords.latitude, lng: pos.coords.longitude },
|
||||
})
|
||||
})
|
||||
return () => navigator.geolocation.clearWatch(watchId)
|
||||
},
|
||||
|
||||
async fetchContext() {
|
||||
const pos = await getCurrentPosition()
|
||||
return {
|
||||
[LocationKey]: { lat: pos.coords.latitude, lng: pos.coords.longitude },
|
||||
}
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Define a Source with Dependencies
|
||||
|
||||
```ts
|
||||
import type { FeedSource, FeedItem } from "@aris/core"
|
||||
import { contextValue } from "@aris/core"
|
||||
|
||||
type WeatherItem = FeedItem<"weather", { temp: number; condition: string }>
|
||||
|
||||
const weatherSource: FeedSource<WeatherItem> = {
|
||||
id: "weather",
|
||||
dependencies: ["location"],
|
||||
|
||||
async fetchContext(context) {
|
||||
const location = contextValue(context, LocationKey)
|
||||
if (!location) return {}
|
||||
|
||||
const weather = await fetchWeatherApi(location)
|
||||
return { [WeatherKey]: weather }
|
||||
},
|
||||
|
||||
async fetchItems(context) {
|
||||
const weather = contextValue(context, WeatherKey)
|
||||
if (!weather) return []
|
||||
|
||||
return [
|
||||
{
|
||||
id: `weather-${Date.now()}`,
|
||||
type: "weather",
|
||||
priority: 0.5,
|
||||
timestamp: new Date(),
|
||||
data: { temp: weather.temp, condition: weather.condition },
|
||||
},
|
||||
]
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Graph Behavior
|
||||
|
||||
The source graph:
|
||||
|
||||
1. Validates all dependencies exist
|
||||
2. Detects circular dependencies
|
||||
3. Topologically sorts sources
|
||||
|
||||
On refresh:
|
||||
|
||||
1. `fetchContext` runs in dependency order
|
||||
2. `fetchItems` runs on all sources
|
||||
3. Combined items returned to subscribers
|
||||
|
||||
On reactive update:
|
||||
|
||||
1. Source pushes context update via `onContextUpdate` callback
|
||||
2. Dependent sources re-run `fetchContext`
|
||||
3. Affected sources re-run `fetchItems`
|
||||
4. Subscribers notified
|
||||
|
||||
## API
|
||||
|
||||
### Context
|
||||
|
||||
| Export | Description |
|
||||
| ---------------------------- | --------------------------------------- |
|
||||
| `ContextKey<T>` | Branded type for type-safe context keys |
|
||||
| `contextKey<T>(key)` | Creates a typed context key |
|
||||
| `contextValue(context, key)` | Type-safe context value accessor |
|
||||
| `Context` | Time + arbitrary key-value bag |
|
||||
|
||||
### Feed
|
||||
|
||||
| Export | Description |
|
||||
| ------------------------ | ------------------------ |
|
||||
| `FeedSource<TItem>` | Unified source interface |
|
||||
| `FeedItem<TType, TData>` | Single item in the feed |
|
||||
|
||||
### Legacy (deprecated)
|
||||
|
||||
| Export | Description |
|
||||
| ---------------------------- | ------------------------ |
|
||||
| `DataSource<TItem, TConfig>` | Use `FeedSource` instead |
|
||||
| `ContextProvider<T>` | Use `FeedSource` instead |
|
||||
| `ContextBridge` | Use source graph instead |
|
||||
@@ -1,10 +0,0 @@
|
||||
export interface Location {
|
||||
lat: number
|
||||
lng: number
|
||||
accuracy: number
|
||||
}
|
||||
|
||||
export interface Context {
|
||||
time: Date
|
||||
location?: Location
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
import type { Context } from "./context"
|
||||
import type { FeedItem } from "./feed"
|
||||
|
||||
export interface DataSource<TItem extends FeedItem = FeedItem, TConfig = unknown> {
|
||||
readonly type: TItem["type"]
|
||||
query(context: Context, config: TConfig): Promise<TItem[]>
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
export interface FeedItem<
|
||||
TType extends string = string,
|
||||
TData extends Record<string, unknown> = Record<string, unknown>,
|
||||
> {
|
||||
id: string
|
||||
type: TType
|
||||
priority: number
|
||||
timestamp: Date
|
||||
data: TData
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
export type { Context, Location } from "./context"
|
||||
export type { FeedItem } from "./feed"
|
||||
export type { DataSource } from "./data-source"
|
||||
export type { ReconcilerConfig, ReconcileResult, SourceError } from "./reconciler"
|
||||
export { Reconciler } from "./reconciler"
|
||||
@@ -2,8 +2,8 @@
|
||||
"name": "@aris/core",
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"main": "index.ts",
|
||||
"types": "index.ts",
|
||||
"main": "src/index.ts",
|
||||
"types": "src/index.ts",
|
||||
"scripts": {
|
||||
"test": "bun test ."
|
||||
}
|
||||
|
||||
@@ -1,240 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
|
||||
import type { Context } from "./context"
|
||||
import type { DataSource } from "./data-source"
|
||||
import type { FeedItem } from "./feed"
|
||||
|
||||
import { Reconciler } from "./reconciler"
|
||||
|
||||
type WeatherData = { temp: number }
|
||||
type WeatherItem = FeedItem<"weather", WeatherData>
|
||||
|
||||
type CalendarData = { title: string }
|
||||
type CalendarItem = FeedItem<"calendar", CalendarData>
|
||||
|
||||
const createMockContext = (): Context => ({
|
||||
time: new Date("2026-01-15T12:00:00Z"),
|
||||
})
|
||||
|
||||
const createWeatherSource = (items: WeatherItem[], delay = 0): DataSource<WeatherItem> => ({
|
||||
type: "weather",
|
||||
async query() {
|
||||
if (delay > 0) {
|
||||
await new Promise((resolve) => setTimeout(resolve, delay))
|
||||
}
|
||||
return items
|
||||
},
|
||||
})
|
||||
|
||||
const createCalendarSource = (items: CalendarItem[]): DataSource<CalendarItem> => ({
|
||||
type: "calendar",
|
||||
async query() {
|
||||
return items
|
||||
},
|
||||
})
|
||||
|
||||
const createFailingSource = (type: string, error: Error): DataSource<FeedItem> => ({
|
||||
type,
|
||||
async query() {
|
||||
throw error
|
||||
},
|
||||
})
|
||||
|
||||
describe("Reconciler", () => {
|
||||
test("returns empty result when no sources registered", async () => {
|
||||
const reconciler = new Reconciler()
|
||||
const result = await reconciler.reconcile(createMockContext())
|
||||
|
||||
expect(result.items).toEqual([])
|
||||
expect(result.errors).toEqual([])
|
||||
})
|
||||
|
||||
test("collects items from single source", async () => {
|
||||
const items: WeatherItem[] = [
|
||||
{
|
||||
id: "weather-1",
|
||||
type: "weather",
|
||||
priority: 0.5,
|
||||
timestamp: new Date(),
|
||||
data: { temp: 20 },
|
||||
},
|
||||
]
|
||||
|
||||
const reconciler = new Reconciler().register(createWeatherSource(items))
|
||||
const result = await reconciler.reconcile(createMockContext())
|
||||
|
||||
expect(result.items).toEqual(items)
|
||||
expect(result.errors).toEqual([])
|
||||
})
|
||||
|
||||
test("collects items from multiple sources", async () => {
|
||||
const weatherItems: WeatherItem[] = [
|
||||
{
|
||||
id: "weather-1",
|
||||
type: "weather",
|
||||
priority: 0.5,
|
||||
timestamp: new Date(),
|
||||
data: { temp: 20 },
|
||||
},
|
||||
]
|
||||
|
||||
const calendarItems: CalendarItem[] = [
|
||||
{
|
||||
id: "calendar-1",
|
||||
type: "calendar",
|
||||
priority: 0.8,
|
||||
timestamp: new Date(),
|
||||
data: { title: "Meeting" },
|
||||
},
|
||||
]
|
||||
|
||||
const reconciler = new Reconciler()
|
||||
.register(createWeatherSource(weatherItems))
|
||||
.register(createCalendarSource(calendarItems))
|
||||
|
||||
const result = await reconciler.reconcile(createMockContext())
|
||||
|
||||
expect(result.items).toHaveLength(2)
|
||||
expect(result.errors).toEqual([])
|
||||
})
|
||||
|
||||
test("sorts items by priority descending", async () => {
|
||||
const weatherItems: WeatherItem[] = [
|
||||
{
|
||||
id: "weather-1",
|
||||
type: "weather",
|
||||
priority: 0.2,
|
||||
timestamp: new Date(),
|
||||
data: { temp: 20 },
|
||||
},
|
||||
]
|
||||
|
||||
const calendarItems: CalendarItem[] = [
|
||||
{
|
||||
id: "calendar-1",
|
||||
type: "calendar",
|
||||
priority: 0.9,
|
||||
timestamp: new Date(),
|
||||
data: { title: "Meeting" },
|
||||
},
|
||||
]
|
||||
|
||||
const reconciler = new Reconciler()
|
||||
.register(createWeatherSource(weatherItems))
|
||||
.register(createCalendarSource(calendarItems))
|
||||
|
||||
const result = await reconciler.reconcile(createMockContext())
|
||||
|
||||
expect(result.items[0]?.id).toBe("calendar-1")
|
||||
expect(result.items[1]?.id).toBe("weather-1")
|
||||
})
|
||||
|
||||
test("captures errors from failing sources", async () => {
|
||||
const error = new Error("Source failed")
|
||||
|
||||
const reconciler = new Reconciler().register(createFailingSource("failing", error))
|
||||
|
||||
const result = await reconciler.reconcile(createMockContext())
|
||||
|
||||
expect(result.items).toEqual([])
|
||||
expect(result.errors).toHaveLength(1)
|
||||
expect(result.errors[0]?.sourceType).toBe("failing")
|
||||
expect(result.errors[0]?.error.message).toBe("Source failed")
|
||||
})
|
||||
|
||||
test("returns partial results when some sources fail", async () => {
|
||||
const items: WeatherItem[] = [
|
||||
{
|
||||
id: "weather-1",
|
||||
type: "weather",
|
||||
priority: 0.5,
|
||||
timestamp: new Date(),
|
||||
data: { temp: 20 },
|
||||
},
|
||||
]
|
||||
|
||||
const reconciler = new Reconciler()
|
||||
.register(createWeatherSource(items))
|
||||
.register(createFailingSource("failing", new Error("Failed")))
|
||||
|
||||
const result = await reconciler.reconcile(createMockContext())
|
||||
|
||||
expect(result.items).toHaveLength(1)
|
||||
expect(result.errors).toHaveLength(1)
|
||||
})
|
||||
|
||||
test("times out slow sources", async () => {
|
||||
const items: WeatherItem[] = [
|
||||
{
|
||||
id: "weather-1",
|
||||
type: "weather",
|
||||
priority: 0.5,
|
||||
timestamp: new Date(),
|
||||
data: { temp: 20 },
|
||||
},
|
||||
]
|
||||
|
||||
const reconciler = new Reconciler({ timeout: 50 }).register(createWeatherSource(items, 100))
|
||||
|
||||
const result = await reconciler.reconcile(createMockContext())
|
||||
|
||||
expect(result.items).toEqual([])
|
||||
expect(result.errors).toHaveLength(1)
|
||||
expect(result.errors[0]?.sourceType).toBe("weather")
|
||||
expect(result.errors[0]?.error.message).toContain("timed out")
|
||||
})
|
||||
|
||||
test("unregister removes source", async () => {
|
||||
const items: WeatherItem[] = [
|
||||
{
|
||||
id: "weather-1",
|
||||
type: "weather",
|
||||
priority: 0.5,
|
||||
timestamp: new Date(),
|
||||
data: { temp: 20 },
|
||||
},
|
||||
]
|
||||
|
||||
const reconciler = new Reconciler().register(createWeatherSource(items)).unregister("weather")
|
||||
|
||||
const result = await reconciler.reconcile(createMockContext())
|
||||
expect(result.items).toEqual([])
|
||||
})
|
||||
|
||||
test("infers discriminated union type from chained registers", async () => {
|
||||
const weatherItems: WeatherItem[] = [
|
||||
{
|
||||
id: "weather-1",
|
||||
type: "weather",
|
||||
priority: 0.5,
|
||||
timestamp: new Date(),
|
||||
data: { temp: 20 },
|
||||
},
|
||||
]
|
||||
|
||||
const calendarItems: CalendarItem[] = [
|
||||
{
|
||||
id: "calendar-1",
|
||||
type: "calendar",
|
||||
priority: 0.8,
|
||||
timestamp: new Date(),
|
||||
data: { title: "Meeting" },
|
||||
},
|
||||
]
|
||||
|
||||
const reconciler = new Reconciler()
|
||||
.register(createWeatherSource(weatherItems))
|
||||
.register(createCalendarSource(calendarItems))
|
||||
|
||||
const { items } = await reconciler.reconcile(createMockContext())
|
||||
|
||||
// Type narrowing should work
|
||||
for (const item of items) {
|
||||
if (item.type === "weather") {
|
||||
expect(typeof item.data.temp).toBe("number")
|
||||
} else if (item.type === "calendar") {
|
||||
expect(typeof item.data.title).toBe("string")
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
102
packages/aris-core/src/context-bridge.ts
Normal file
102
packages/aris-core/src/context-bridge.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import type { Context } from "./context"
|
||||
import type { ContextProvider } from "./context-provider"
|
||||
|
||||
interface ContextUpdatable {
|
||||
pushContextUpdate(update: Partial<Context>): void
|
||||
}
|
||||
|
||||
export interface ProviderError {
|
||||
key: string
|
||||
error: Error
|
||||
}
|
||||
|
||||
export interface RefreshResult {
|
||||
errors: ProviderError[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Bridges context providers to a feed controller.
|
||||
*
|
||||
* Subscribes to provider updates and forwards them to the controller.
|
||||
* Supports manual refresh to gather current values from all providers.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const controller = new FeedController()
|
||||
* .addDataSource(new WeatherDataSource())
|
||||
* .addDataSource(new TflDataSource())
|
||||
*
|
||||
* const bridge = new ContextBridge(controller)
|
||||
* .addProvider(new LocationProvider())
|
||||
* .addProvider(new MusicProvider())
|
||||
*
|
||||
* // Manual refresh gathers from all providers
|
||||
* await bridge.refresh()
|
||||
*
|
||||
* // Cleanup
|
||||
* bridge.stop()
|
||||
* controller.stop()
|
||||
* ```
|
||||
*/
|
||||
export class ContextBridge {
|
||||
private controller: ContextUpdatable
|
||||
private providers = new Map<string, ContextProvider>()
|
||||
private cleanups: Array<() => void> = []
|
||||
|
||||
constructor(controller: ContextUpdatable) {
|
||||
this.controller = controller
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a context provider. Immediately subscribes to updates.
|
||||
*/
|
||||
addProvider<T>(provider: ContextProvider<T>): this {
|
||||
this.providers.set(provider.key, provider as ContextProvider)
|
||||
|
||||
const cleanup = provider.onUpdate((value) => {
|
||||
this.controller.pushContextUpdate({ [provider.key]: value })
|
||||
})
|
||||
this.cleanups.push(cleanup)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Gathers current values from all providers and pushes to controller.
|
||||
* Use for manual refresh when user pulls to refresh.
|
||||
* Returns errors from providers that failed to fetch.
|
||||
*/
|
||||
async refresh(): Promise<RefreshResult> {
|
||||
const updates: Partial<Context> = {}
|
||||
const errors: ProviderError[] = []
|
||||
|
||||
const entries = Array.from(this.providers.entries())
|
||||
const results = await Promise.allSettled(
|
||||
entries.map(([_, provider]) => provider.fetchCurrentValue()),
|
||||
)
|
||||
|
||||
entries.forEach(([key], i) => {
|
||||
const result = results[i]
|
||||
if (result?.status === "fulfilled") {
|
||||
updates[key] = result.value
|
||||
} else if (result?.status === "rejected") {
|
||||
errors.push({
|
||||
key,
|
||||
error: result.reason instanceof Error ? result.reason : new Error(String(result.reason)),
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
this.controller.pushContextUpdate(updates)
|
||||
|
||||
return { errors }
|
||||
}
|
||||
|
||||
/**
|
||||
* Unsubscribes from all providers.
|
||||
*/
|
||||
stop(): void {
|
||||
this.cleanups.forEach((cleanup) => cleanup())
|
||||
this.cleanups = []
|
||||
}
|
||||
}
|
||||
35
packages/aris-core/src/context-provider.ts
Normal file
35
packages/aris-core/src/context-provider.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
/**
|
||||
* Provides context values reactively and on-demand.
|
||||
*
|
||||
* Implementations push updates when values change (reactive) and
|
||||
* return current values when requested (for manual refresh).
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* class LocationProvider implements ContextProvider<Location> {
|
||||
* readonly key = LocationKey
|
||||
*
|
||||
* onUpdate(callback: (value: Location) => void): () => void {
|
||||
* const watchId = navigator.geolocation.watchPosition(pos => {
|
||||
* callback({ lat: pos.coords.latitude, lng: pos.coords.longitude, accuracy: pos.coords.accuracy })
|
||||
* })
|
||||
* return () => navigator.geolocation.clearWatch(watchId)
|
||||
* }
|
||||
*
|
||||
* async fetchCurrentValue(): Promise<Location> {
|
||||
* const pos = await getCurrentPosition()
|
||||
* return { lat: pos.coords.latitude, lng: pos.coords.longitude, accuracy: pos.coords.accuracy }
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export interface ContextProvider<T = unknown> {
|
||||
/** The context key this provider populates */
|
||||
readonly key: string
|
||||
|
||||
/** Subscribe to value changes. Returns cleanup function. */
|
||||
onUpdate(callback: (value: T) => void): () => void
|
||||
|
||||
/** Fetch current value on-demand (used for manual refresh). */
|
||||
fetchCurrentValue(): Promise<T>
|
||||
}
|
||||
46
packages/aris-core/src/context.ts
Normal file
46
packages/aris-core/src/context.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* Branded type for type-safe context keys.
|
||||
*
|
||||
* Each package defines its own keys with associated value types:
|
||||
* ```ts
|
||||
* const LocationKey: ContextKey<Location> = contextKey("location")
|
||||
* ```
|
||||
*/
|
||||
export type ContextKey<T> = string & { __contextValue?: T }
|
||||
|
||||
/**
|
||||
* Creates a typed context key.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* interface Location { lat: number; lng: number; accuracy: number }
|
||||
* const LocationKey: ContextKey<Location> = contextKey("location")
|
||||
* ```
|
||||
*/
|
||||
export function contextKey<T>(key: string): ContextKey<T> {
|
||||
return key as ContextKey<T>
|
||||
}
|
||||
|
||||
/**
|
||||
* Type-safe accessor for context values.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const location = contextValue(context, LocationKey)
|
||||
* if (location) {
|
||||
* console.log(location.lat, location.lng)
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function contextValue<T>(context: Context, key: ContextKey<T>): T | undefined {
|
||||
return context[key] as T | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Arbitrary key-value bag representing the current state.
|
||||
* Always includes `time`. Other keys are added by context providers.
|
||||
*/
|
||||
export interface Context {
|
||||
time: Date
|
||||
[key: string]: unknown
|
||||
}
|
||||
35
packages/aris-core/src/data-source.ts
Normal file
35
packages/aris-core/src/data-source.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import type { Context } from "./context"
|
||||
import type { FeedItem } from "./feed"
|
||||
|
||||
/**
|
||||
* Produces feed items from an external source.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* type WeatherItem = FeedItem<"weather", { temp: number }>
|
||||
*
|
||||
* class WeatherDataSource implements DataSource<WeatherItem> {
|
||||
* readonly type = "weather"
|
||||
*
|
||||
* async query(context: Context): Promise<WeatherItem[]> {
|
||||
* const location = contextValue(context, LocationKey)
|
||||
* if (!location) return []
|
||||
* const data = await fetchWeather(location)
|
||||
* return [{
|
||||
* id: `weather-${Date.now()}`,
|
||||
* type: this.type,
|
||||
* priority: 0.5,
|
||||
* timestamp: context.time,
|
||||
* data: { temp: data.temperature },
|
||||
* }]
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export interface DataSource<TItem extends FeedItem = FeedItem, TConfig = unknown> {
|
||||
/** Unique identifier for this source type */
|
||||
readonly type: TItem["type"]
|
||||
|
||||
/** Queries the source and returns feed items */
|
||||
query(context: Context, config: TConfig): Promise<TItem[]>
|
||||
}
|
||||
161
packages/aris-core/src/feed-controller.ts
Normal file
161
packages/aris-core/src/feed-controller.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import type { Context } from "./context"
|
||||
import type { DataSource } from "./data-source"
|
||||
import type { FeedItem } from "./feed"
|
||||
import type { ReconcileResult } from "./reconciler"
|
||||
|
||||
import { Reconciler } from "./reconciler"
|
||||
|
||||
export interface FeedControllerConfig {
|
||||
/** Timeout for each data source query in milliseconds */
|
||||
timeout?: number
|
||||
/** Debounce window for batching context updates (default: 100ms) */
|
||||
debounceMs?: number
|
||||
/** Initial context state */
|
||||
initialContext?: Context
|
||||
}
|
||||
|
||||
export type FeedSubscriber<TItems extends FeedItem> = (result: ReconcileResult<TItems>) => void
|
||||
|
||||
interface RegisteredSource {
|
||||
source: DataSource<FeedItem, unknown>
|
||||
config: unknown
|
||||
}
|
||||
|
||||
const DEFAULT_DEBOUNCE_MS = 100
|
||||
|
||||
/**
|
||||
* Orchestrates feed reconciliation in response to context updates.
|
||||
*
|
||||
* Holds context state, debounces updates, queries data sources, and
|
||||
* notifies subscribers. Each user should have their own instance.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const controller = new FeedController({ debounceMs: 100 })
|
||||
* .addDataSource(new WeatherDataSource())
|
||||
* .addDataSource(new TflDataSource())
|
||||
*
|
||||
* controller.subscribe((result) => {
|
||||
* console.log(result.items)
|
||||
* })
|
||||
*
|
||||
* // Context update triggers debounced reconcile
|
||||
* controller.pushContextUpdate({ [LocationKey]: location })
|
||||
*
|
||||
* // Direct reconcile (no debounce)
|
||||
* const result = await controller.reconcile()
|
||||
*
|
||||
* // Cleanup
|
||||
* controller.stop()
|
||||
* ```
|
||||
*/
|
||||
export class FeedController<TItems extends FeedItem = never> {
|
||||
private sources = new Map<string, RegisteredSource>()
|
||||
private subscribers = new Set<FeedSubscriber<TItems>>()
|
||||
private context: Context
|
||||
private debounceMs: number
|
||||
private timeout: number | undefined
|
||||
private pendingTimeout: ReturnType<typeof setTimeout> | null = null
|
||||
private stopped = false
|
||||
|
||||
constructor(config?: FeedControllerConfig) {
|
||||
this.context = config?.initialContext ?? { time: new Date() }
|
||||
this.debounceMs = config?.debounceMs ?? DEFAULT_DEBOUNCE_MS
|
||||
this.timeout = config?.timeout
|
||||
}
|
||||
|
||||
/** Registers a data source. */
|
||||
addDataSource<TItem extends FeedItem, TConfig>(
|
||||
source: DataSource<TItem, TConfig>,
|
||||
config?: TConfig,
|
||||
): FeedController<TItems | TItem> {
|
||||
this.sources.set(source.type, {
|
||||
source: source as DataSource<FeedItem, unknown>,
|
||||
config,
|
||||
})
|
||||
return this as FeedController<TItems | TItem>
|
||||
}
|
||||
|
||||
/** Removes a data source by type. */
|
||||
removeDataSource<T extends TItems["type"]>(
|
||||
sourceType: T,
|
||||
): FeedController<Exclude<TItems, { type: T }>> {
|
||||
this.sources.delete(sourceType)
|
||||
return this as unknown as FeedController<Exclude<TItems, { type: T }>>
|
||||
}
|
||||
|
||||
/** Stops the controller and cancels pending reconciles. */
|
||||
stop(): void {
|
||||
this.stopped = true
|
||||
|
||||
if (this.pendingTimeout) {
|
||||
clearTimeout(this.pendingTimeout)
|
||||
this.pendingTimeout = null
|
||||
}
|
||||
}
|
||||
|
||||
/** Merges update into context and schedules a debounced reconcile. */
|
||||
pushContextUpdate(update: Partial<Context>): void {
|
||||
this.context = { ...this.context, ...update, time: new Date() }
|
||||
this.scheduleReconcile()
|
||||
}
|
||||
|
||||
/** Subscribes to feed updates. Returns unsubscribe function. */
|
||||
subscribe(callback: FeedSubscriber<TItems>): () => void {
|
||||
this.subscribers.add(callback)
|
||||
|
||||
return () => {
|
||||
this.subscribers.delete(callback)
|
||||
}
|
||||
}
|
||||
|
||||
/** Immediately reconciles with current or provided context. */
|
||||
async reconcile(context?: Context): Promise<ReconcileResult<TItems>> {
|
||||
const ctx = context ?? this.context
|
||||
const reconciler = this.createReconciler()
|
||||
return reconciler.reconcile(ctx)
|
||||
}
|
||||
|
||||
/** Returns current context. */
|
||||
getContext(): Context {
|
||||
return this.context
|
||||
}
|
||||
|
||||
private scheduleReconcile(): void {
|
||||
if (this.pendingTimeout) return
|
||||
|
||||
this.pendingTimeout = setTimeout(() => {
|
||||
this.flushPending()
|
||||
}, this.debounceMs)
|
||||
}
|
||||
|
||||
private async flushPending(): Promise<void> {
|
||||
this.pendingTimeout = null
|
||||
|
||||
if (this.stopped) return
|
||||
if (this.sources.size === 0) return
|
||||
|
||||
const reconciler = this.createReconciler()
|
||||
const result = await reconciler.reconcile(this.context)
|
||||
|
||||
this.notifySubscribers(result)
|
||||
}
|
||||
|
||||
private createReconciler(): Reconciler<TItems> {
|
||||
const reconciler = new Reconciler<TItems>({ timeout: this.timeout })
|
||||
Array.from(this.sources.values()).forEach(({ source, config }) => {
|
||||
reconciler.register(source, config)
|
||||
})
|
||||
return reconciler as Reconciler<TItems>
|
||||
}
|
||||
|
||||
private notifySubscribers(result: ReconcileResult<TItems>): void {
|
||||
this.subscribers.forEach((callback) => {
|
||||
try {
|
||||
callback(result)
|
||||
} catch {
|
||||
// Subscriber errors shouldn't break other subscribers
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
458
packages/aris-core/src/feed-engine.test.ts
Normal file
458
packages/aris-core/src/feed-engine.test.ts
Normal file
@@ -0,0 +1,458 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
|
||||
import type { Context, ContextKey, FeedItem, FeedSource } from "./index"
|
||||
|
||||
import { FeedEngine } from "./feed-engine"
|
||||
import { contextKey, contextValue } from "./index"
|
||||
|
||||
// =============================================================================
|
||||
// CONTEXT KEYS
|
||||
// =============================================================================
|
||||
|
||||
interface Location {
|
||||
lat: number
|
||||
lng: number
|
||||
}
|
||||
|
||||
interface Weather {
|
||||
temperature: number
|
||||
condition: string
|
||||
}
|
||||
|
||||
const LocationKey: ContextKey<Location> = contextKey("location")
|
||||
const WeatherKey: ContextKey<Weather> = contextKey("weather")
|
||||
|
||||
// =============================================================================
|
||||
// FEED ITEMS
|
||||
// =============================================================================
|
||||
|
||||
type WeatherFeedItem = FeedItem<"weather", { temperature: number; condition: string }>
|
||||
type AlertFeedItem = FeedItem<"alert", { message: string }>
|
||||
|
||||
// =============================================================================
|
||||
// TEST HELPERS
|
||||
// =============================================================================
|
||||
|
||||
interface SimulatedLocationSource extends FeedSource {
|
||||
simulateUpdate(location: Location): void
|
||||
}
|
||||
|
||||
function createLocationSource(): SimulatedLocationSource {
|
||||
let callback: ((update: Partial<Context>) => void) | null = null
|
||||
let currentLocation: Location = { lat: 0, lng: 0 }
|
||||
|
||||
return {
|
||||
id: "location",
|
||||
|
||||
onContextUpdate(cb) {
|
||||
callback = cb
|
||||
return () => {
|
||||
callback = null
|
||||
}
|
||||
},
|
||||
|
||||
async fetchContext() {
|
||||
return { [LocationKey]: currentLocation }
|
||||
},
|
||||
|
||||
simulateUpdate(location: Location) {
|
||||
currentLocation = location
|
||||
callback?.({ [LocationKey]: location })
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function createWeatherSource(
|
||||
fetchWeather: (location: Location) => Promise<Weather> = async () => ({
|
||||
temperature: 20,
|
||||
condition: "sunny",
|
||||
}),
|
||||
): FeedSource<WeatherFeedItem> {
|
||||
return {
|
||||
id: "weather",
|
||||
dependencies: ["location"],
|
||||
|
||||
async fetchContext(context) {
|
||||
const location = contextValue(context, LocationKey)
|
||||
if (!location) return {}
|
||||
|
||||
const weather = await fetchWeather(location)
|
||||
return { [WeatherKey]: weather }
|
||||
},
|
||||
|
||||
async fetchItems(context) {
|
||||
const weather = contextValue(context, WeatherKey)
|
||||
if (!weather) return []
|
||||
|
||||
return [
|
||||
{
|
||||
id: `weather-${Date.now()}`,
|
||||
type: "weather",
|
||||
priority: 0.5,
|
||||
timestamp: new Date(),
|
||||
data: {
|
||||
temperature: weather.temperature,
|
||||
condition: weather.condition,
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function createAlertSource(): FeedSource<AlertFeedItem> {
|
||||
return {
|
||||
id: "alert",
|
||||
dependencies: ["weather"],
|
||||
|
||||
async fetchItems(context) {
|
||||
const weather = contextValue(context, WeatherKey)
|
||||
if (!weather) return []
|
||||
|
||||
if (weather.condition === "storm") {
|
||||
return [
|
||||
{
|
||||
id: "alert-storm",
|
||||
type: "alert",
|
||||
priority: 1.0,
|
||||
timestamp: new Date(),
|
||||
data: { message: "Storm warning!" },
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
return []
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// TESTS
|
||||
// =============================================================================
|
||||
|
||||
describe("FeedEngine", () => {
|
||||
describe("registration", () => {
|
||||
test("registers sources", () => {
|
||||
const engine = new FeedEngine()
|
||||
const location = createLocationSource()
|
||||
|
||||
engine.register(location)
|
||||
|
||||
// Can refresh without error
|
||||
expect(engine.refresh()).resolves.toBeDefined()
|
||||
})
|
||||
|
||||
test("unregisters sources", async () => {
|
||||
const engine = new FeedEngine()
|
||||
const location = createLocationSource()
|
||||
|
||||
engine.register(location)
|
||||
engine.unregister("location")
|
||||
|
||||
const result = await engine.refresh()
|
||||
expect(result.items).toHaveLength(0)
|
||||
})
|
||||
|
||||
test("allows chained registration", () => {
|
||||
const engine = new FeedEngine()
|
||||
.register(createLocationSource())
|
||||
.register(createWeatherSource())
|
||||
.register(createAlertSource())
|
||||
|
||||
expect(engine.refresh()).resolves.toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("graph validation", () => {
|
||||
test("throws on missing dependency", () => {
|
||||
const engine = new FeedEngine()
|
||||
const orphan: FeedSource = {
|
||||
id: "orphan",
|
||||
dependencies: ["nonexistent"],
|
||||
}
|
||||
|
||||
engine.register(orphan)
|
||||
|
||||
expect(engine.refresh()).rejects.toThrow(
|
||||
'Source "orphan" depends on "nonexistent" which is not registered',
|
||||
)
|
||||
})
|
||||
|
||||
test("throws on circular dependency", () => {
|
||||
const engine = new FeedEngine()
|
||||
const a: FeedSource = { id: "a", dependencies: ["b"] }
|
||||
const b: FeedSource = { id: "b", dependencies: ["a"] }
|
||||
|
||||
engine.register(a).register(b)
|
||||
|
||||
expect(engine.refresh()).rejects.toThrow("Circular dependency detected: a → b → a")
|
||||
})
|
||||
|
||||
test("throws on longer cycles", () => {
|
||||
const engine = new FeedEngine()
|
||||
const a: FeedSource = { id: "a", dependencies: ["c"] }
|
||||
const b: FeedSource = { id: "b", dependencies: ["a"] }
|
||||
const c: FeedSource = { id: "c", dependencies: ["b"] }
|
||||
|
||||
engine.register(a).register(b).register(c)
|
||||
|
||||
expect(engine.refresh()).rejects.toThrow("Circular dependency detected")
|
||||
})
|
||||
})
|
||||
|
||||
describe("refresh", () => {
|
||||
test("runs fetchContext in dependency order", async () => {
|
||||
const order: string[] = []
|
||||
|
||||
const location: FeedSource = {
|
||||
id: "location",
|
||||
async fetchContext() {
|
||||
order.push("location")
|
||||
return { [LocationKey]: { lat: 51.5, lng: -0.1 } }
|
||||
},
|
||||
}
|
||||
|
||||
const weather: FeedSource = {
|
||||
id: "weather",
|
||||
dependencies: ["location"],
|
||||
async fetchContext(ctx) {
|
||||
order.push("weather")
|
||||
const loc = contextValue(ctx, LocationKey)
|
||||
expect(loc).toBeDefined()
|
||||
return { [WeatherKey]: { temperature: 20, condition: "sunny" } }
|
||||
},
|
||||
}
|
||||
|
||||
const engine = new FeedEngine().register(weather).register(location)
|
||||
|
||||
await engine.refresh()
|
||||
|
||||
expect(order).toEqual(["location", "weather"])
|
||||
})
|
||||
|
||||
test("accumulates context across sources", async () => {
|
||||
const location = createLocationSource()
|
||||
location.simulateUpdate({ lat: 51.5, lng: -0.1 })
|
||||
|
||||
const weather = createWeatherSource()
|
||||
|
||||
const engine = new FeedEngine().register(location).register(weather)
|
||||
|
||||
const { context } = await engine.refresh()
|
||||
|
||||
expect(contextValue(context, LocationKey)).toEqual({ lat: 51.5, lng: -0.1 })
|
||||
expect(contextValue(context, WeatherKey)).toEqual({ temperature: 20, condition: "sunny" })
|
||||
})
|
||||
|
||||
test("collects items from all sources", async () => {
|
||||
const location = createLocationSource()
|
||||
location.simulateUpdate({ lat: 51.5, lng: -0.1 })
|
||||
|
||||
const weather = createWeatherSource()
|
||||
|
||||
const engine = new FeedEngine().register(location).register(weather)
|
||||
|
||||
const { items } = await engine.refresh()
|
||||
|
||||
expect(items).toHaveLength(1)
|
||||
expect(items[0]!.type).toBe("weather")
|
||||
})
|
||||
|
||||
test("sorts items by priority descending", async () => {
|
||||
const location = createLocationSource()
|
||||
location.simulateUpdate({ lat: 51.5, lng: -0.1 })
|
||||
|
||||
const weather = createWeatherSource(async () => ({
|
||||
temperature: 15,
|
||||
condition: "storm",
|
||||
}))
|
||||
|
||||
const alert = createAlertSource()
|
||||
|
||||
const engine = new FeedEngine().register(location).register(weather).register(alert)
|
||||
|
||||
const { items } = await engine.refresh()
|
||||
|
||||
expect(items).toHaveLength(2)
|
||||
expect(items[0]!.type).toBe("alert") // priority 1.0
|
||||
expect(items[1]!.type).toBe("weather") // priority 0.5
|
||||
})
|
||||
|
||||
test("handles missing upstream context gracefully", async () => {
|
||||
const location: FeedSource = {
|
||||
id: "location",
|
||||
async fetchContext() {
|
||||
return {} // No location available
|
||||
},
|
||||
}
|
||||
|
||||
const weather = createWeatherSource()
|
||||
|
||||
const engine = new FeedEngine().register(location).register(weather)
|
||||
|
||||
const { context, items } = await engine.refresh()
|
||||
|
||||
expect(contextValue(context, WeatherKey)).toBeUndefined()
|
||||
expect(items).toHaveLength(0)
|
||||
})
|
||||
|
||||
test("captures errors from fetchContext", async () => {
|
||||
const failing: FeedSource = {
|
||||
id: "failing",
|
||||
async fetchContext() {
|
||||
throw new Error("Context fetch failed")
|
||||
},
|
||||
}
|
||||
|
||||
const engine = new FeedEngine().register(failing)
|
||||
|
||||
const { errors } = await engine.refresh()
|
||||
|
||||
expect(errors).toHaveLength(1)
|
||||
expect(errors[0]!.sourceId).toBe("failing")
|
||||
expect(errors[0]!.error.message).toBe("Context fetch failed")
|
||||
})
|
||||
|
||||
test("captures errors from fetchItems", async () => {
|
||||
const failing: FeedSource = {
|
||||
id: "failing",
|
||||
async fetchItems() {
|
||||
throw new Error("Items fetch failed")
|
||||
},
|
||||
}
|
||||
|
||||
const engine = new FeedEngine().register(failing)
|
||||
|
||||
const { errors } = await engine.refresh()
|
||||
|
||||
expect(errors).toHaveLength(1)
|
||||
expect(errors[0]!.sourceId).toBe("failing")
|
||||
expect(errors[0]!.error.message).toBe("Items fetch failed")
|
||||
})
|
||||
|
||||
test("continues after source error", async () => {
|
||||
const failing: FeedSource = {
|
||||
id: "failing",
|
||||
async fetchContext() {
|
||||
throw new Error("Failed")
|
||||
},
|
||||
}
|
||||
|
||||
const working: FeedSource = {
|
||||
id: "working",
|
||||
async fetchItems() {
|
||||
return [
|
||||
{
|
||||
id: "item-1",
|
||||
type: "test",
|
||||
priority: 0.5,
|
||||
timestamp: new Date(),
|
||||
data: {},
|
||||
},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
const engine = new FeedEngine().register(failing).register(working)
|
||||
|
||||
const { items, errors } = await engine.refresh()
|
||||
|
||||
expect(errors).toHaveLength(1)
|
||||
expect(items).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("currentContext", () => {
|
||||
test("returns initial context before refresh", () => {
|
||||
const engine = new FeedEngine()
|
||||
|
||||
const context = engine.currentContext()
|
||||
|
||||
expect(context.time).toBeInstanceOf(Date)
|
||||
})
|
||||
|
||||
test("returns accumulated context after refresh", async () => {
|
||||
const location = createLocationSource()
|
||||
location.simulateUpdate({ lat: 51.5, lng: -0.1 })
|
||||
|
||||
const engine = new FeedEngine().register(location)
|
||||
|
||||
await engine.refresh()
|
||||
|
||||
const context = engine.currentContext()
|
||||
expect(contextValue(context, LocationKey)).toEqual({ lat: 51.5, lng: -0.1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe("subscribe", () => {
|
||||
test("returns unsubscribe function", () => {
|
||||
const engine = new FeedEngine()
|
||||
let callCount = 0
|
||||
|
||||
const unsubscribe = engine.subscribe(() => {
|
||||
callCount++
|
||||
})
|
||||
|
||||
unsubscribe()
|
||||
|
||||
// Subscriber should not be called after unsubscribe
|
||||
expect(callCount).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("reactive updates", () => {
|
||||
test("start subscribes to onContextUpdate", async () => {
|
||||
const location = createLocationSource()
|
||||
const weather = createWeatherSource()
|
||||
|
||||
const engine = new FeedEngine().register(location).register(weather)
|
||||
|
||||
const results: Array<{ items: FeedItem[] }> = []
|
||||
engine.subscribe((result) => {
|
||||
results.push({ items: result.items })
|
||||
})
|
||||
|
||||
engine.start()
|
||||
|
||||
// Simulate location update
|
||||
location.simulateUpdate({ lat: 51.5, lng: -0.1 })
|
||||
|
||||
// Wait for async refresh
|
||||
await new Promise((resolve) => setTimeout(resolve, 50))
|
||||
|
||||
expect(results.length).toBeGreaterThan(0)
|
||||
expect(results[0]!.items[0]!.type).toBe("weather")
|
||||
})
|
||||
|
||||
test("stop unsubscribes from all sources", async () => {
|
||||
const location = createLocationSource()
|
||||
|
||||
const engine = new FeedEngine().register(location)
|
||||
|
||||
let callCount = 0
|
||||
engine.subscribe(() => {
|
||||
callCount++
|
||||
})
|
||||
|
||||
engine.start()
|
||||
engine.stop()
|
||||
|
||||
// Simulate update after stop
|
||||
location.simulateUpdate({ lat: 1, lng: 1 })
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 50))
|
||||
|
||||
expect(callCount).toBe(0)
|
||||
})
|
||||
|
||||
test("start is idempotent", () => {
|
||||
const location = createLocationSource()
|
||||
const engine = new FeedEngine().register(location)
|
||||
|
||||
// Should not throw or double-subscribe
|
||||
engine.start()
|
||||
engine.start()
|
||||
engine.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
335
packages/aris-core/src/feed-engine.ts
Normal file
335
packages/aris-core/src/feed-engine.ts
Normal file
@@ -0,0 +1,335 @@
|
||||
import type { Context } from "./context"
|
||||
import type { FeedItem } from "./feed"
|
||||
import type { FeedSource } from "./feed-source"
|
||||
|
||||
export interface SourceError {
|
||||
sourceId: string
|
||||
error: Error
|
||||
}
|
||||
|
||||
export interface FeedResult<TItem extends FeedItem = FeedItem> {
|
||||
context: Context
|
||||
items: TItem[]
|
||||
errors: SourceError[]
|
||||
}
|
||||
|
||||
export type FeedSubscriber<TItem extends FeedItem = FeedItem> = (result: FeedResult<TItem>) => void
|
||||
|
||||
interface SourceGraph {
|
||||
sources: Map<string, FeedSource>
|
||||
sorted: FeedSource[]
|
||||
dependents: Map<string, string[]>
|
||||
}
|
||||
|
||||
/**
|
||||
* Orchestrates FeedSources, managing the dependency graph and context flow.
|
||||
*
|
||||
* Sources declare dependencies on other sources. The engine:
|
||||
* - Validates the dependency graph (no missing deps, no cycles)
|
||||
* - Runs fetchContext() in topological order during refresh
|
||||
* - Runs fetchItems() on all sources with accumulated context
|
||||
* - Subscribes to reactive updates via onContextUpdate/onItemsUpdate
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const engine = new FeedEngine()
|
||||
* .register(locationSource)
|
||||
* .register(weatherSource)
|
||||
* .register(alertSource)
|
||||
*
|
||||
* // Pull-based refresh
|
||||
* const { context, items, errors } = await engine.refresh()
|
||||
*
|
||||
* // Reactive updates
|
||||
* engine.subscribe((result) => {
|
||||
* console.log(result.items)
|
||||
* })
|
||||
* engine.start()
|
||||
*
|
||||
* // Cleanup
|
||||
* engine.stop()
|
||||
* ```
|
||||
*/
|
||||
export class FeedEngine<TItems extends FeedItem = FeedItem> {
|
||||
private sources = new Map<string, FeedSource>()
|
||||
private graph: SourceGraph | null = null
|
||||
private context: Context = { time: new Date() }
|
||||
private subscribers = new Set<FeedSubscriber<TItems>>()
|
||||
private cleanups: Array<() => void> = []
|
||||
private started = false
|
||||
|
||||
/**
|
||||
* Registers a FeedSource. Invalidates the cached graph.
|
||||
*/
|
||||
register<TItem extends FeedItem>(source: FeedSource<TItem>): FeedEngine<TItems | TItem> {
|
||||
this.sources.set(source.id, source)
|
||||
this.graph = null
|
||||
return this as FeedEngine<TItems | TItem>
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregisters a FeedSource by ID. Invalidates the cached graph.
|
||||
*/
|
||||
unregister(sourceId: string): this {
|
||||
this.sources.delete(sourceId)
|
||||
this.graph = null
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Refreshes the feed by running all sources in dependency order.
|
||||
* Calls fetchContext() then fetchItems() on each source.
|
||||
*/
|
||||
async refresh(): Promise<FeedResult<TItems>> {
|
||||
const graph = this.ensureGraph()
|
||||
const errors: SourceError[] = []
|
||||
|
||||
// Reset context with fresh time
|
||||
let context: Context = { time: new Date() }
|
||||
|
||||
// Run fetchContext in topological order
|
||||
for (const source of graph.sorted) {
|
||||
if (source.fetchContext) {
|
||||
try {
|
||||
const update = await source.fetchContext(context)
|
||||
context = { ...context, ...update }
|
||||
} catch (err) {
|
||||
errors.push({
|
||||
sourceId: source.id,
|
||||
error: err instanceof Error ? err : new Error(String(err)),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Run fetchItems on all sources
|
||||
const items: FeedItem[] = []
|
||||
for (const source of graph.sorted) {
|
||||
if (source.fetchItems) {
|
||||
try {
|
||||
const sourceItems = await source.fetchItems(context)
|
||||
items.push(...sourceItems)
|
||||
} catch (err) {
|
||||
errors.push({
|
||||
sourceId: source.id,
|
||||
error: err instanceof Error ? err : new Error(String(err)),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by priority descending
|
||||
items.sort((a, b) => b.priority - a.priority)
|
||||
|
||||
this.context = context
|
||||
|
||||
return { context, items: items as TItems[], errors }
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribes to feed updates. Returns unsubscribe function.
|
||||
*/
|
||||
subscribe(callback: FeedSubscriber<TItems>): () => void {
|
||||
this.subscribers.add(callback)
|
||||
return () => {
|
||||
this.subscribers.delete(callback)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts reactive subscriptions on all sources.
|
||||
* Sources with onContextUpdate will trigger re-computation of dependents.
|
||||
*/
|
||||
start(): void {
|
||||
if (this.started) return
|
||||
|
||||
this.started = true
|
||||
const graph = this.ensureGraph()
|
||||
|
||||
for (const source of graph.sorted) {
|
||||
if (source.onContextUpdate) {
|
||||
const cleanup = source.onContextUpdate(
|
||||
(update) => {
|
||||
this.handleContextUpdate(source.id, update)
|
||||
},
|
||||
() => this.context,
|
||||
)
|
||||
this.cleanups.push(cleanup)
|
||||
}
|
||||
|
||||
if (source.onItemsUpdate) {
|
||||
const cleanup = source.onItemsUpdate(
|
||||
() => {
|
||||
this.scheduleRefresh()
|
||||
},
|
||||
() => this.context,
|
||||
)
|
||||
this.cleanups.push(cleanup)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops all reactive subscriptions.
|
||||
*/
|
||||
stop(): void {
|
||||
this.started = false
|
||||
for (const cleanup of this.cleanups) {
|
||||
cleanup()
|
||||
}
|
||||
this.cleanups = []
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current accumulated context.
|
||||
*/
|
||||
currentContext(): Context {
|
||||
return this.context
|
||||
}
|
||||
|
||||
private ensureGraph(): SourceGraph {
|
||||
if (!this.graph) {
|
||||
this.graph = buildGraph(Array.from(this.sources.values()))
|
||||
}
|
||||
return this.graph
|
||||
}
|
||||
|
||||
private handleContextUpdate(sourceId: string, update: Partial<Context>): void {
|
||||
this.context = { ...this.context, ...update, time: new Date() }
|
||||
|
||||
// Re-run dependents and notify
|
||||
this.refreshDependents(sourceId)
|
||||
}
|
||||
|
||||
private async refreshDependents(sourceId: string): Promise<void> {
|
||||
const graph = this.ensureGraph()
|
||||
const toRefresh = this.collectDependents(sourceId, graph)
|
||||
|
||||
// Re-run fetchContext for dependents in order
|
||||
for (const id of toRefresh) {
|
||||
const source = graph.sources.get(id)
|
||||
if (source?.fetchContext) {
|
||||
try {
|
||||
const update = await source.fetchContext(this.context)
|
||||
this.context = { ...this.context, ...update }
|
||||
} catch {
|
||||
// Errors during reactive updates are logged but don't stop propagation
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Collect items from all sources
|
||||
const items: FeedItem[] = []
|
||||
const errors: SourceError[] = []
|
||||
|
||||
for (const source of graph.sorted) {
|
||||
if (source.fetchItems) {
|
||||
try {
|
||||
const sourceItems = await source.fetchItems(this.context)
|
||||
items.push(...sourceItems)
|
||||
} catch (err) {
|
||||
errors.push({
|
||||
sourceId: source.id,
|
||||
error: err instanceof Error ? err : new Error(String(err)),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
items.sort((a, b) => b.priority - a.priority)
|
||||
|
||||
this.notifySubscribers({ context: this.context, items: items as TItems[], errors })
|
||||
}
|
||||
|
||||
private collectDependents(sourceId: string, graph: SourceGraph): string[] {
|
||||
const result: string[] = []
|
||||
const visited = new Set<string>()
|
||||
|
||||
const collect = (id: string): void => {
|
||||
const deps = graph.dependents.get(id) ?? []
|
||||
for (const dep of deps) {
|
||||
if (!visited.has(dep)) {
|
||||
visited.add(dep)
|
||||
result.push(dep)
|
||||
collect(dep)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
collect(sourceId)
|
||||
|
||||
// Return in topological order
|
||||
return graph.sorted.filter((s) => result.includes(s.id)).map((s) => s.id)
|
||||
}
|
||||
|
||||
private scheduleRefresh(): void {
|
||||
// Simple immediate refresh for now - could add debouncing later
|
||||
this.refresh().then((result) => {
|
||||
this.notifySubscribers(result)
|
||||
})
|
||||
}
|
||||
|
||||
private notifySubscribers(result: FeedResult<TItems>): void {
|
||||
this.subscribers.forEach((callback) => {
|
||||
try {
|
||||
callback(result)
|
||||
} catch {
|
||||
// Subscriber errors shouldn't break other subscribers
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function buildGraph(sources: FeedSource[]): SourceGraph {
|
||||
const byId = new Map<string, FeedSource>()
|
||||
for (const source of sources) {
|
||||
byId.set(source.id, source)
|
||||
}
|
||||
|
||||
// Validate dependencies exist
|
||||
for (const source of sources) {
|
||||
for (const dep of source.dependencies ?? []) {
|
||||
if (!byId.has(dep)) {
|
||||
throw new Error(`Source "${source.id}" depends on "${dep}" which is not registered`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for cycles and topologically sort
|
||||
const visited = new Set<string>()
|
||||
const visiting = new Set<string>()
|
||||
const sorted: FeedSource[] = []
|
||||
|
||||
function visit(id: string, path: string[]): void {
|
||||
if (visiting.has(id)) {
|
||||
const cycle = [...path.slice(path.indexOf(id)), id].join(" → ")
|
||||
throw new Error(`Circular dependency detected: ${cycle}`)
|
||||
}
|
||||
if (visited.has(id)) return
|
||||
|
||||
visiting.add(id)
|
||||
const source = byId.get(id)!
|
||||
for (const dep of source.dependencies ?? []) {
|
||||
visit(dep, [...path, id])
|
||||
}
|
||||
visiting.delete(id)
|
||||
visited.add(id)
|
||||
sorted.push(source)
|
||||
}
|
||||
|
||||
for (const source of sources) {
|
||||
visit(source.id, [])
|
||||
}
|
||||
|
||||
// Build reverse dependency map
|
||||
const dependents = new Map<string, string[]>()
|
||||
for (const source of sources) {
|
||||
for (const dep of source.dependencies ?? []) {
|
||||
const list = dependents.get(dep) ?? []
|
||||
list.push(source.id)
|
||||
dependents.set(dep, list)
|
||||
}
|
||||
}
|
||||
|
||||
return { sources: byId, sorted, dependents }
|
||||
}
|
||||
422
packages/aris-core/src/feed-source.test.ts
Normal file
422
packages/aris-core/src/feed-source.test.ts
Normal file
@@ -0,0 +1,422 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
|
||||
import type { Context, ContextKey, FeedItem, FeedSource } from "./index"
|
||||
|
||||
import { contextKey, contextValue } from "./index"
|
||||
|
||||
// =============================================================================
|
||||
// CONTEXT KEYS
|
||||
// =============================================================================
|
||||
|
||||
interface Location {
|
||||
lat: number
|
||||
lng: number
|
||||
}
|
||||
|
||||
interface Weather {
|
||||
temperature: number
|
||||
condition: string
|
||||
}
|
||||
|
||||
const LocationKey: ContextKey<Location> = contextKey("location")
|
||||
const WeatherKey: ContextKey<Weather> = contextKey("weather")
|
||||
|
||||
// =============================================================================
|
||||
// FEED ITEMS
|
||||
// =============================================================================
|
||||
|
||||
type WeatherFeedItem = FeedItem<"weather", { temperature: number; condition: string }>
|
||||
type AlertFeedItem = FeedItem<"alert", { message: string }>
|
||||
|
||||
// =============================================================================
|
||||
// TEST HELPERS
|
||||
// =============================================================================
|
||||
|
||||
interface SimulatedLocationSource extends FeedSource {
|
||||
simulateUpdate(location: Location): void
|
||||
}
|
||||
|
||||
function createLocationSource(): SimulatedLocationSource {
|
||||
let callback: ((update: Partial<Context>) => void) | null = null
|
||||
let currentLocation: Location = { lat: 0, lng: 0 }
|
||||
|
||||
return {
|
||||
id: "location",
|
||||
|
||||
onContextUpdate(cb) {
|
||||
callback = cb
|
||||
return () => {
|
||||
callback = null
|
||||
}
|
||||
},
|
||||
|
||||
async fetchContext() {
|
||||
return { [LocationKey]: currentLocation }
|
||||
},
|
||||
|
||||
simulateUpdate(location: Location) {
|
||||
currentLocation = location
|
||||
callback?.({ [LocationKey]: location })
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function createWeatherSource(
|
||||
fetchWeather: (location: Location) => Promise<Weather> = async () => ({
|
||||
temperature: 20,
|
||||
condition: "sunny",
|
||||
}),
|
||||
): FeedSource<WeatherFeedItem> {
|
||||
return {
|
||||
id: "weather",
|
||||
dependencies: ["location"],
|
||||
|
||||
async fetchContext(context) {
|
||||
const location = contextValue(context, LocationKey)
|
||||
if (!location) return {}
|
||||
|
||||
const weather = await fetchWeather(location)
|
||||
return { [WeatherKey]: weather }
|
||||
},
|
||||
|
||||
async fetchItems(context) {
|
||||
const weather = contextValue(context, WeatherKey)
|
||||
if (!weather) return []
|
||||
|
||||
return [
|
||||
{
|
||||
id: `weather-${Date.now()}`,
|
||||
type: "weather",
|
||||
priority: 0.5,
|
||||
timestamp: new Date(),
|
||||
data: {
|
||||
temperature: weather.temperature,
|
||||
condition: weather.condition,
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function createAlertSource(): FeedSource<AlertFeedItem> {
|
||||
return {
|
||||
id: "alert",
|
||||
dependencies: ["weather"],
|
||||
|
||||
async fetchItems(context) {
|
||||
const weather = contextValue(context, WeatherKey)
|
||||
if (!weather) return []
|
||||
|
||||
if (weather.condition === "storm") {
|
||||
return [
|
||||
{
|
||||
id: "alert-storm",
|
||||
type: "alert",
|
||||
priority: 1.0,
|
||||
timestamp: new Date(),
|
||||
data: { message: "Storm warning!" },
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
return []
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// GRAPH SIMULATION (until FeedController is updated)
|
||||
// =============================================================================
|
||||
|
||||
interface SourceGraph {
|
||||
sources: Map<string, FeedSource>
|
||||
sorted: FeedSource[]
|
||||
dependents: Map<string, string[]>
|
||||
}
|
||||
|
||||
function buildGraph(sources: FeedSource[]): SourceGraph {
|
||||
const byId = new Map<string, FeedSource>()
|
||||
for (const source of sources) {
|
||||
byId.set(source.id, source)
|
||||
}
|
||||
|
||||
// Validate dependencies exist
|
||||
for (const source of sources) {
|
||||
for (const dep of source.dependencies ?? []) {
|
||||
if (!byId.has(dep)) {
|
||||
throw new Error(`Source "${source.id}" depends on "${dep}" which is not registered`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for cycles and topologically sort
|
||||
const visited = new Set<string>()
|
||||
const visiting = new Set<string>()
|
||||
const sorted: FeedSource[] = []
|
||||
|
||||
function visit(id: string, path: string[]): void {
|
||||
if (visiting.has(id)) {
|
||||
const cycle = [...path.slice(path.indexOf(id)), id].join(" → ")
|
||||
throw new Error(`Circular dependency detected: ${cycle}`)
|
||||
}
|
||||
if (visited.has(id)) return
|
||||
|
||||
visiting.add(id)
|
||||
const source = byId.get(id)!
|
||||
for (const dep of source.dependencies ?? []) {
|
||||
visit(dep, [...path, id])
|
||||
}
|
||||
visiting.delete(id)
|
||||
visited.add(id)
|
||||
sorted.push(source)
|
||||
}
|
||||
|
||||
for (const source of sources) {
|
||||
visit(source.id, [])
|
||||
}
|
||||
|
||||
// Build reverse dependency map
|
||||
const dependents = new Map<string, string[]>()
|
||||
for (const source of sources) {
|
||||
for (const dep of source.dependencies ?? []) {
|
||||
const list = dependents.get(dep) ?? []
|
||||
list.push(source.id)
|
||||
dependents.set(dep, list)
|
||||
}
|
||||
}
|
||||
|
||||
return { sources: byId, sorted, dependents }
|
||||
}
|
||||
|
||||
async function refreshGraph(graph: SourceGraph): Promise<{ context: Context; items: FeedItem[] }> {
|
||||
let context: Context = { time: new Date() }
|
||||
|
||||
// Run fetchContext in topological order
|
||||
for (const source of graph.sorted) {
|
||||
if (source.fetchContext) {
|
||||
const update = await source.fetchContext(context)
|
||||
context = { ...context, ...update }
|
||||
}
|
||||
}
|
||||
|
||||
// Run fetchItems on all sources
|
||||
const items: FeedItem[] = []
|
||||
for (const source of graph.sorted) {
|
||||
if (source.fetchItems) {
|
||||
const sourceItems = await source.fetchItems(context)
|
||||
items.push(...sourceItems)
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by priority descending
|
||||
items.sort((a, b) => b.priority - a.priority)
|
||||
|
||||
return { context, items }
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// TESTS
|
||||
// =============================================================================
|
||||
|
||||
describe("FeedSource", () => {
|
||||
describe("interface", () => {
|
||||
test("source with only context production", () => {
|
||||
const source = createLocationSource()
|
||||
|
||||
expect(source.id).toBe("location")
|
||||
expect(source.dependencies).toBeUndefined()
|
||||
expect(source.fetchContext).toBeDefined()
|
||||
expect(source.onContextUpdate).toBeDefined()
|
||||
expect(source.fetchItems).toBeUndefined()
|
||||
})
|
||||
|
||||
test("source with dependencies and both context and items", () => {
|
||||
const source = createWeatherSource()
|
||||
|
||||
expect(source.id).toBe("weather")
|
||||
expect(source.dependencies).toEqual(["location"])
|
||||
expect(source.fetchContext).toBeDefined()
|
||||
expect(source.fetchItems).toBeDefined()
|
||||
})
|
||||
|
||||
test("source with only item production", () => {
|
||||
const source = createAlertSource()
|
||||
|
||||
expect(source.id).toBe("alert")
|
||||
expect(source.dependencies).toEqual(["weather"])
|
||||
expect(source.fetchContext).toBeUndefined()
|
||||
expect(source.fetchItems).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("graph validation", () => {
|
||||
test("validates all dependencies exist", () => {
|
||||
const orphan: FeedSource = {
|
||||
id: "orphan",
|
||||
dependencies: ["nonexistent"],
|
||||
}
|
||||
|
||||
expect(() => buildGraph([orphan])).toThrow(
|
||||
'Source "orphan" depends on "nonexistent" which is not registered',
|
||||
)
|
||||
})
|
||||
|
||||
test("detects circular dependencies", () => {
|
||||
const a: FeedSource = { id: "a", dependencies: ["b"] }
|
||||
const b: FeedSource = { id: "b", dependencies: ["a"] }
|
||||
|
||||
expect(() => buildGraph([a, b])).toThrow("Circular dependency detected: a → b → a")
|
||||
})
|
||||
|
||||
test("detects longer cycles", () => {
|
||||
const a: FeedSource = { id: "a", dependencies: ["c"] }
|
||||
const b: FeedSource = { id: "b", dependencies: ["a"] }
|
||||
const c: FeedSource = { id: "c", dependencies: ["b"] }
|
||||
|
||||
expect(() => buildGraph([a, b, c])).toThrow("Circular dependency detected")
|
||||
})
|
||||
|
||||
test("topologically sorts sources", () => {
|
||||
const location = createLocationSource()
|
||||
const weather = createWeatherSource()
|
||||
const alert = createAlertSource()
|
||||
|
||||
// Register in wrong order
|
||||
const graph = buildGraph([alert, weather, location])
|
||||
|
||||
expect(graph.sorted.map((s) => s.id)).toEqual(["location", "weather", "alert"])
|
||||
})
|
||||
|
||||
test("builds reverse dependency map", () => {
|
||||
const location = createLocationSource()
|
||||
const weather = createWeatherSource()
|
||||
const alert = createAlertSource()
|
||||
|
||||
const graph = buildGraph([location, weather, alert])
|
||||
|
||||
expect(graph.dependents.get("location")).toEqual(["weather"])
|
||||
expect(graph.dependents.get("weather")).toEqual(["alert"])
|
||||
expect(graph.dependents.get("alert")).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("graph refresh", () => {
|
||||
test("runs fetchContext in dependency order", async () => {
|
||||
const order: string[] = []
|
||||
|
||||
const location: FeedSource = {
|
||||
id: "location",
|
||||
async fetchContext() {
|
||||
order.push("location")
|
||||
return { [LocationKey]: { lat: 51.5, lng: -0.1 } }
|
||||
},
|
||||
}
|
||||
|
||||
const weather: FeedSource = {
|
||||
id: "weather",
|
||||
dependencies: ["location"],
|
||||
async fetchContext(ctx) {
|
||||
order.push("weather")
|
||||
const loc = contextValue(ctx, LocationKey)
|
||||
expect(loc).toBeDefined()
|
||||
return { [WeatherKey]: { temperature: 20, condition: "sunny" } }
|
||||
},
|
||||
}
|
||||
|
||||
const graph = buildGraph([weather, location])
|
||||
await refreshGraph(graph)
|
||||
|
||||
expect(order).toEqual(["location", "weather"])
|
||||
})
|
||||
|
||||
test("accumulates context across sources", async () => {
|
||||
const location = createLocationSource()
|
||||
location.simulateUpdate({ lat: 51.5, lng: -0.1 })
|
||||
|
||||
const weather = createWeatherSource()
|
||||
|
||||
const graph = buildGraph([location, weather])
|
||||
const { context } = await refreshGraph(graph)
|
||||
|
||||
expect(contextValue(context, LocationKey)).toEqual({ lat: 51.5, lng: -0.1 })
|
||||
expect(contextValue(context, WeatherKey)).toEqual({ temperature: 20, condition: "sunny" })
|
||||
})
|
||||
|
||||
test("collects items from all sources", async () => {
|
||||
const location = createLocationSource()
|
||||
location.simulateUpdate({ lat: 51.5, lng: -0.1 })
|
||||
|
||||
const weather = createWeatherSource()
|
||||
|
||||
const graph = buildGraph([location, weather])
|
||||
const { items } = await refreshGraph(graph)
|
||||
|
||||
expect(items).toHaveLength(1)
|
||||
expect(items[0]!.type).toBe("weather")
|
||||
})
|
||||
|
||||
test("downstream source receives upstream context", async () => {
|
||||
const location = createLocationSource()
|
||||
location.simulateUpdate({ lat: 51.5, lng: -0.1 })
|
||||
|
||||
const weather = createWeatherSource(async () => ({
|
||||
temperature: 15,
|
||||
condition: "storm",
|
||||
}))
|
||||
|
||||
const alert = createAlertSource()
|
||||
|
||||
const graph = buildGraph([location, weather, alert])
|
||||
const { items } = await refreshGraph(graph)
|
||||
|
||||
expect(items).toHaveLength(2)
|
||||
expect(items[0]!.type).toBe("alert") // priority 1.0
|
||||
expect(items[1]!.type).toBe("weather") // priority 0.5
|
||||
})
|
||||
|
||||
test("source without location context returns empty items", async () => {
|
||||
// Location source exists but hasn't been updated (returns default 0,0)
|
||||
const location: FeedSource = {
|
||||
id: "location",
|
||||
async fetchContext() {
|
||||
// Simulate no location available
|
||||
return {}
|
||||
},
|
||||
}
|
||||
|
||||
const weather = createWeatherSource()
|
||||
|
||||
const graph = buildGraph([location, weather])
|
||||
const { context, items } = await refreshGraph(graph)
|
||||
|
||||
expect(contextValue(context, WeatherKey)).toBeUndefined()
|
||||
expect(items).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("reactive updates", () => {
|
||||
test("onContextUpdate receives callback and returns cleanup", () => {
|
||||
const location = createLocationSource()
|
||||
let updateCount = 0
|
||||
|
||||
const cleanup = location.onContextUpdate!(
|
||||
() => {
|
||||
updateCount++
|
||||
},
|
||||
() => ({ time: new Date() }),
|
||||
)
|
||||
|
||||
location.simulateUpdate({ lat: 1, lng: 1 })
|
||||
expect(updateCount).toBe(1)
|
||||
|
||||
location.simulateUpdate({ lat: 2, lng: 2 })
|
||||
expect(updateCount).toBe(2)
|
||||
|
||||
cleanup()
|
||||
|
||||
location.simulateUpdate({ lat: 3, lng: 3 })
|
||||
expect(updateCount).toBe(2) // no more updates after cleanup
|
||||
})
|
||||
})
|
||||
})
|
||||
76
packages/aris-core/src/feed-source.ts
Normal file
76
packages/aris-core/src/feed-source.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import type { Context } from "./context"
|
||||
import type { FeedItem } from "./feed"
|
||||
|
||||
/**
|
||||
* Unified interface for sources that provide context and/or feed items.
|
||||
*
|
||||
* Sources form a dependency graph - a source declares which other sources
|
||||
* it depends on, and the graph ensures dependencies are resolved before
|
||||
* dependents run.
|
||||
*
|
||||
* A source may:
|
||||
* - Provide context for other sources (implement fetchContext/onContextUpdate)
|
||||
* - Produce feed items (implement fetchItems/onItemsUpdate)
|
||||
* - Both
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Location source - provides context only
|
||||
* const locationSource: FeedSource = {
|
||||
* id: "location",
|
||||
* fetchContext: async () => {
|
||||
* const pos = await getCurrentPosition()
|
||||
* return { location: { lat: pos.coords.latitude, lng: pos.coords.longitude } }
|
||||
* },
|
||||
* }
|
||||
*
|
||||
* // Weather source - depends on location, provides both context and items
|
||||
* const weatherSource: FeedSource<WeatherFeedItem> = {
|
||||
* id: "weather",
|
||||
* dependencies: ["location"],
|
||||
* fetchContext: async (ctx) => {
|
||||
* const weather = await fetchWeather(ctx.location)
|
||||
* return { weather }
|
||||
* },
|
||||
* fetchItems: async (ctx) => {
|
||||
* return createWeatherFeedItems(ctx.weather)
|
||||
* },
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export interface FeedSource<TItem extends FeedItem = FeedItem> {
|
||||
/** Unique identifier for this source */
|
||||
readonly id: string
|
||||
|
||||
/** IDs of sources this source depends on */
|
||||
readonly dependencies?: readonly string[]
|
||||
|
||||
/**
|
||||
* Subscribe to reactive context updates.
|
||||
* Called when the source can push context changes proactively.
|
||||
* Returns cleanup function.
|
||||
*/
|
||||
onContextUpdate?(
|
||||
callback: (update: Partial<Context>) => void,
|
||||
getContext: () => Context,
|
||||
): () => void
|
||||
|
||||
/**
|
||||
* Fetch context on-demand.
|
||||
* Called during manual refresh or initial load.
|
||||
*/
|
||||
fetchContext?(context: Context): Promise<Partial<Context>>
|
||||
|
||||
/**
|
||||
* Subscribe to reactive feed item updates.
|
||||
* Called when the source can push item changes proactively.
|
||||
* Returns cleanup function.
|
||||
*/
|
||||
onItemsUpdate?(callback: (items: TItem[]) => void, getContext: () => Context): () => void
|
||||
|
||||
/**
|
||||
* Fetch feed items on-demand.
|
||||
* Called during manual refresh or when dependencies update.
|
||||
*/
|
||||
fetchItems?(context: Context): Promise<TItem[]>
|
||||
}
|
||||
31
packages/aris-core/src/feed.ts
Normal file
31
packages/aris-core/src/feed.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
* A single item in the feed.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* type WeatherItem = FeedItem<"weather", { temp: number; condition: string }>
|
||||
*
|
||||
* const item: WeatherItem = {
|
||||
* id: "weather-123",
|
||||
* type: "weather",
|
||||
* priority: 0.5,
|
||||
* timestamp: new Date(),
|
||||
* data: { temp: 18, condition: "cloudy" },
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export interface FeedItem<
|
||||
TType extends string = string,
|
||||
TData extends Record<string, unknown> = Record<string, unknown>,
|
||||
> {
|
||||
/** Unique identifier */
|
||||
id: string
|
||||
/** Item type, matches the data source type */
|
||||
type: TType
|
||||
/** Sort priority (higher = more important, shown first) */
|
||||
priority: number
|
||||
/** When this item was generated */
|
||||
timestamp: Date
|
||||
/** Type-specific payload */
|
||||
data: TData
|
||||
}
|
||||
42
packages/aris-core/src/index.ts
Normal file
42
packages/aris-core/src/index.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
// Context
|
||||
export type { Context, ContextKey } from "./context"
|
||||
export { contextKey, contextValue } from "./context"
|
||||
|
||||
// Feed
|
||||
export type { FeedItem } from "./feed"
|
||||
|
||||
// Feed Source
|
||||
export type { FeedSource } from "./feed-source"
|
||||
|
||||
// Feed Engine
|
||||
export type { FeedResult, FeedSubscriber, SourceError } from "./feed-engine"
|
||||
export { FeedEngine } from "./feed-engine"
|
||||
|
||||
// =============================================================================
|
||||
// DEPRECATED - Use FeedSource + FeedEngine instead
|
||||
// =============================================================================
|
||||
|
||||
// Data Source (deprecated - use FeedSource)
|
||||
export type { DataSource } from "./data-source"
|
||||
|
||||
// Context Provider (deprecated - use FeedSource)
|
||||
export type { ContextProvider } from "./context-provider"
|
||||
|
||||
// Context Bridge (deprecated - use FeedEngine)
|
||||
export type { ProviderError, RefreshResult } from "./context-bridge"
|
||||
export { ContextBridge } from "./context-bridge"
|
||||
|
||||
// Reconciler (deprecated - use FeedEngine)
|
||||
export type {
|
||||
ReconcileResult,
|
||||
ReconcilerConfig,
|
||||
SourceError as ReconcilerSourceError,
|
||||
} from "./reconciler"
|
||||
export { Reconciler } from "./reconciler"
|
||||
|
||||
// Feed Controller (deprecated - use FeedEngine)
|
||||
export type {
|
||||
FeedControllerConfig,
|
||||
FeedSubscriber as FeedControllerSubscriber,
|
||||
} from "./feed-controller"
|
||||
export { FeedController } from "./feed-controller"
|
||||
112
packages/aris-source-location/README.md
Normal file
112
packages/aris-source-location/README.md
Normal file
@@ -0,0 +1,112 @@
|
||||
# @aris/source-location
|
||||
|
||||
A FeedSource that provides location context to the ARIS feed graph.
|
||||
|
||||
## Overview
|
||||
|
||||
This source accepts external location pushes and does not query location itself. It provides location context to downstream sources (e.g., weather, transit) but does not produce feed items.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
bun add @aris/source-location
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import { LocationSource, LocationKey, type Location } from "@aris/source-location"
|
||||
import { contextValue } from "@aris/core"
|
||||
|
||||
// Create source with default history size (1)
|
||||
const locationSource = new LocationSource()
|
||||
|
||||
// Or keep last 10 locations
|
||||
const locationSource = new LocationSource({ historySize: 10 })
|
||||
|
||||
// Push location from external provider (GPS, network, etc.)
|
||||
locationSource.pushLocation({
|
||||
lat: 37.7749,
|
||||
lng: -122.4194,
|
||||
accuracy: 10,
|
||||
timestamp: new Date(),
|
||||
})
|
||||
|
||||
// Access current location
|
||||
locationSource.lastLocation // { lat, lng, accuracy, timestamp } | null
|
||||
|
||||
// Access location history (oldest first)
|
||||
locationSource.locationHistory // readonly Location[]
|
||||
```
|
||||
|
||||
### With FeedController
|
||||
|
||||
```ts
|
||||
import { FeedController } from "@aris/core"
|
||||
import { LocationSource } from "@aris/source-location"
|
||||
|
||||
const locationSource = new LocationSource()
|
||||
|
||||
const controller = new FeedController({
|
||||
sources: [locationSource, weatherSource, transitSource],
|
||||
})
|
||||
|
||||
// Push location updates - downstream sources will re-fetch
|
||||
locationSource.pushLocation({
|
||||
lat: 37.7749,
|
||||
lng: -122.4194,
|
||||
accuracy: 10,
|
||||
timestamp: new Date(),
|
||||
})
|
||||
```
|
||||
|
||||
### Reading Location in Downstream Sources
|
||||
|
||||
```ts
|
||||
import { contextValue, type FeedSource } from "@aris/core"
|
||||
import { LocationKey } from "@aris/source-location"
|
||||
|
||||
const weatherSource: FeedSource = {
|
||||
id: "weather",
|
||||
dependencies: ["location"],
|
||||
|
||||
async fetchContext(context) {
|
||||
const location = contextValue(context, LocationKey)
|
||||
if (!location) return {}
|
||||
|
||||
const weather = await fetchWeather(location.lat, location.lng)
|
||||
return { [WeatherKey]: weather }
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### `LocationSource`
|
||||
|
||||
| Member | Type | Description |
|
||||
| ------------------------ | --------------------- | ------------------------------------- |
|
||||
| `id` | `"location"` | Source identifier |
|
||||
| `constructor(options?)` | | Create with optional `historySize` |
|
||||
| `pushLocation(location)` | `void` | Push new location, notifies listeners |
|
||||
| `lastLocation` | `Location \| null` | Most recent location |
|
||||
| `locationHistory` | `readonly Location[]` | All retained locations, oldest first |
|
||||
|
||||
### `Location`
|
||||
|
||||
```ts
|
||||
interface Location {
|
||||
lat: number
|
||||
lng: number
|
||||
accuracy: number // meters
|
||||
timestamp: Date
|
||||
}
|
||||
```
|
||||
|
||||
### `LocationKey`
|
||||
|
||||
Typed context key for accessing location in downstream sources:
|
||||
|
||||
```ts
|
||||
const location = contextValue(context, LocationKey)
|
||||
```
|
||||
13
packages/aris-source-location/package.json
Normal file
13
packages/aris-source-location/package.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"name": "@aris/source-location",
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"main": "src/index.ts",
|
||||
"types": "src/index.ts",
|
||||
"scripts": {
|
||||
"test": "bun test src/"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aris/core": "workspace:*"
|
||||
}
|
||||
}
|
||||
6
packages/aris-source-location/src/index.ts
Normal file
6
packages/aris-source-location/src/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export {
|
||||
LocationSource,
|
||||
LocationKey,
|
||||
type Location,
|
||||
type LocationSourceOptions,
|
||||
} from "./location-source.ts"
|
||||
150
packages/aris-source-location/src/location-source.test.ts
Normal file
150
packages/aris-source-location/src/location-source.test.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import { describe, expect, mock, test } from "bun:test"
|
||||
|
||||
import { LocationKey, LocationSource, type Location } from "./location-source.ts"
|
||||
|
||||
function createLocation(overrides: Partial<Location> = {}): Location {
|
||||
return {
|
||||
lat: 37.7749,
|
||||
lng: -122.4194,
|
||||
accuracy: 10,
|
||||
timestamp: new Date(),
|
||||
...overrides,
|
||||
}
|
||||
}
|
||||
|
||||
describe("LocationSource", () => {
|
||||
describe("FeedSource interface", () => {
|
||||
test("has correct id", () => {
|
||||
const source = new LocationSource()
|
||||
expect(source.id).toBe("location")
|
||||
})
|
||||
|
||||
test("fetchItems always returns empty array", async () => {
|
||||
const source = new LocationSource()
|
||||
source.pushLocation(createLocation())
|
||||
|
||||
const items = await source.fetchItems()
|
||||
expect(items).toEqual([])
|
||||
})
|
||||
|
||||
test("fetchContext returns empty when no location", async () => {
|
||||
const source = new LocationSource()
|
||||
|
||||
const context = await source.fetchContext()
|
||||
expect(context).toEqual({})
|
||||
})
|
||||
|
||||
test("fetchContext returns location when available", async () => {
|
||||
const source = new LocationSource()
|
||||
const location = createLocation()
|
||||
source.pushLocation(location)
|
||||
|
||||
const context = await source.fetchContext()
|
||||
expect(context).toEqual({ [LocationKey]: location })
|
||||
})
|
||||
})
|
||||
|
||||
describe("pushLocation", () => {
|
||||
test("updates lastLocation", () => {
|
||||
const source = new LocationSource()
|
||||
expect(source.lastLocation).toBeNull()
|
||||
|
||||
const location = createLocation()
|
||||
source.pushLocation(location)
|
||||
|
||||
expect(source.lastLocation).toEqual(location)
|
||||
})
|
||||
|
||||
test("notifies listeners", () => {
|
||||
const source = new LocationSource()
|
||||
const listener = mock()
|
||||
|
||||
source.onContextUpdate(listener)
|
||||
|
||||
const location = createLocation()
|
||||
source.pushLocation(location)
|
||||
|
||||
expect(listener).toHaveBeenCalledTimes(1)
|
||||
expect(listener).toHaveBeenCalledWith({ [LocationKey]: location })
|
||||
})
|
||||
})
|
||||
|
||||
describe("history", () => {
|
||||
test("default historySize is 1", () => {
|
||||
const source = new LocationSource()
|
||||
|
||||
source.pushLocation(createLocation({ lat: 1 }))
|
||||
source.pushLocation(createLocation({ lat: 2 }))
|
||||
|
||||
expect(source.locationHistory).toHaveLength(1)
|
||||
expect(source.lastLocation?.lat).toBe(2)
|
||||
})
|
||||
|
||||
test("respects configured historySize", () => {
|
||||
const source = new LocationSource({ historySize: 3 })
|
||||
|
||||
const loc1 = createLocation({ lat: 1 })
|
||||
const loc2 = createLocation({ lat: 2 })
|
||||
const loc3 = createLocation({ lat: 3 })
|
||||
|
||||
source.pushLocation(loc1)
|
||||
source.pushLocation(loc2)
|
||||
source.pushLocation(loc3)
|
||||
|
||||
expect(source.locationHistory).toEqual([loc1, loc2, loc3])
|
||||
})
|
||||
|
||||
test("evicts oldest when exceeding historySize", () => {
|
||||
const source = new LocationSource({ historySize: 2 })
|
||||
|
||||
const loc1 = createLocation({ lat: 1 })
|
||||
const loc2 = createLocation({ lat: 2 })
|
||||
const loc3 = createLocation({ lat: 3 })
|
||||
|
||||
source.pushLocation(loc1)
|
||||
source.pushLocation(loc2)
|
||||
source.pushLocation(loc3)
|
||||
|
||||
expect(source.locationHistory).toEqual([loc2, loc3])
|
||||
})
|
||||
|
||||
test("locationHistory is readonly", () => {
|
||||
const source = new LocationSource({ historySize: 3 })
|
||||
source.pushLocation(createLocation())
|
||||
|
||||
const history = source.locationHistory
|
||||
expect(Array.isArray(history)).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("onContextUpdate", () => {
|
||||
test("returns cleanup function", () => {
|
||||
const source = new LocationSource()
|
||||
const listener = mock()
|
||||
|
||||
const cleanup = source.onContextUpdate(listener)
|
||||
|
||||
source.pushLocation(createLocation({ lat: 1 }))
|
||||
expect(listener).toHaveBeenCalledTimes(1)
|
||||
|
||||
cleanup()
|
||||
|
||||
source.pushLocation(createLocation({ lat: 2 }))
|
||||
expect(listener).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test("supports multiple listeners", () => {
|
||||
const source = new LocationSource()
|
||||
const listener1 = mock()
|
||||
const listener2 = mock()
|
||||
|
||||
source.onContextUpdate(listener1)
|
||||
source.onContextUpdate(listener2)
|
||||
|
||||
source.pushLocation(createLocation())
|
||||
|
||||
expect(listener1).toHaveBeenCalledTimes(1)
|
||||
expect(listener2).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
86
packages/aris-source-location/src/location-source.ts
Normal file
86
packages/aris-source-location/src/location-source.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import type { Context, FeedSource } from "@aris/core"
|
||||
|
||||
import { contextKey, type ContextKey } from "@aris/core"
|
||||
|
||||
/**
|
||||
* Geographic coordinates with accuracy and timestamp.
|
||||
*/
|
||||
export interface Location {
|
||||
lat: number
|
||||
lng: number
|
||||
/** Accuracy in meters */
|
||||
accuracy: number
|
||||
timestamp: Date
|
||||
}
|
||||
|
||||
export interface LocationSourceOptions {
|
||||
/** Number of locations to retain in history. Defaults to 1. */
|
||||
historySize?: number
|
||||
}
|
||||
|
||||
export const LocationKey: ContextKey<Location> = contextKey("location")
|
||||
|
||||
/**
|
||||
* A FeedSource that provides location context.
|
||||
*
|
||||
* This source accepts external location pushes and does not query location itself.
|
||||
* Use `pushLocation` to update the location from an external provider (e.g., GPS, network).
|
||||
*
|
||||
* Does not produce feed items - always returns empty array from `fetchItems`.
|
||||
*/
|
||||
export class LocationSource implements FeedSource {
|
||||
readonly id = "location"
|
||||
|
||||
private readonly historySize: number
|
||||
private locations: Location[] = []
|
||||
private listeners = new Set<(update: Partial<Context>) => void>()
|
||||
|
||||
constructor(options: LocationSourceOptions = {}) {
|
||||
this.historySize = options.historySize ?? 1
|
||||
}
|
||||
|
||||
/**
|
||||
* Push a new location update. Notifies all context listeners.
|
||||
*/
|
||||
pushLocation(location: Location): void {
|
||||
this.locations.push(location)
|
||||
if (this.locations.length > this.historySize) {
|
||||
this.locations.shift()
|
||||
}
|
||||
this.listeners.forEach((listener) => {
|
||||
listener({ [LocationKey]: location })
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Most recent location, or null if none pushed.
|
||||
*/
|
||||
get lastLocation(): Location | null {
|
||||
return this.locations[this.locations.length - 1] ?? null
|
||||
}
|
||||
|
||||
/**
|
||||
* Location history, oldest first. Length limited by `historySize`.
|
||||
*/
|
||||
get locationHistory(): readonly Location[] {
|
||||
return this.locations
|
||||
}
|
||||
|
||||
onContextUpdate(callback: (update: Partial<Context>) => void): () => void {
|
||||
this.listeners.add(callback)
|
||||
return () => {
|
||||
this.listeners.delete(callback)
|
||||
}
|
||||
}
|
||||
|
||||
async fetchContext(): Promise<Partial<Context>> {
|
||||
if (this.lastLocation) {
|
||||
return { [LocationKey]: this.lastLocation }
|
||||
}
|
||||
return {}
|
||||
}
|
||||
|
||||
async fetchItems(): Promise<[]> {
|
||||
return []
|
||||
}
|
||||
}
|
||||
1
packages/aris-source-tfl/fixtures/tfl-responses.json
Normal file
1
packages/aris-source-tfl/fixtures/tfl-responses.json
Normal file
File diff suppressed because one or more lines are too long
16
packages/aris-source-tfl/package.json
Normal file
16
packages/aris-source-tfl/package.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"name": "@aris/source-tfl",
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"main": "src/index.ts",
|
||||
"types": "src/index.ts",
|
||||
"scripts": {
|
||||
"test": "bun test src/",
|
||||
"fetch-fixtures": "bun run scripts/fetch-fixtures.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aris/core": "workspace:*",
|
||||
"@aris/source-location": "workspace:*",
|
||||
"arktype": "^2.1.0"
|
||||
}
|
||||
}
|
||||
35
packages/aris-source-tfl/scripts/fetch-fixtures.ts
Normal file
35
packages/aris-source-tfl/scripts/fetch-fixtures.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
// Fetches real TfL API responses and saves them as test fixtures
|
||||
|
||||
const TEST_LINES = ["northern", "central", "elizabeth"]
|
||||
const BASE_URL = "https://api.tfl.gov.uk"
|
||||
|
||||
async function fetchFixtures() {
|
||||
console.log("Fetching line statuses...")
|
||||
const statusRes = await fetch(`${BASE_URL}/Line/${TEST_LINES.join(",")}/Status`)
|
||||
const lineStatuses = await statusRes.json()
|
||||
|
||||
console.log("Fetching stop points...")
|
||||
const stopPoints: Record<string, unknown> = {}
|
||||
for (const lineId of TEST_LINES) {
|
||||
console.log(` Fetching ${lineId}...`)
|
||||
const res = await fetch(`${BASE_URL}/Line/${lineId}/StopPoints`)
|
||||
stopPoints[lineId] = await res.json()
|
||||
}
|
||||
|
||||
const fixtures = {
|
||||
fetchedAt: new Date().toISOString(),
|
||||
lineStatuses,
|
||||
stopPoints,
|
||||
}
|
||||
|
||||
const path = new URL("../fixtures/tfl-responses.json", import.meta.url)
|
||||
await Bun.write(path, JSON.stringify(fixtures))
|
||||
|
||||
console.log(`\nFixtures saved to fixtures/tfl-responses.json`)
|
||||
console.log(` Line statuses: ${(lineStatuses as unknown[]).length} lines`)
|
||||
for (const [lineId, stops] of Object.entries(stopPoints)) {
|
||||
console.log(` ${lineId} stops: ${(stops as unknown[]).length}`)
|
||||
}
|
||||
}
|
||||
|
||||
fetchFixtures().catch(console.error)
|
||||
11
packages/aris-source-tfl/src/index.ts
Normal file
11
packages/aris-source-tfl/src/index.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export { TflSource } from "./tfl-source.ts"
|
||||
export { TflApi } from "./tfl-api.ts"
|
||||
export type { TflLineId } from "./tfl-api.ts"
|
||||
export type {
|
||||
StationLocation,
|
||||
TflAlertData,
|
||||
TflAlertFeedItem,
|
||||
TflAlertSeverity,
|
||||
TflLineStatus,
|
||||
TflSourceOptions,
|
||||
} from "./types.ts"
|
||||
172
packages/aris-source-tfl/src/tfl-api.ts
Normal file
172
packages/aris-source-tfl/src/tfl-api.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
import { type } from "arktype"
|
||||
|
||||
import type { StationLocation, TflAlertSeverity, TflLineStatus } from "./types.ts"
|
||||
|
||||
const TFL_API_BASE = "https://api.tfl.gov.uk"
|
||||
|
||||
const ALL_LINE_IDS: TflLineId[] = [
|
||||
"bakerloo",
|
||||
"central",
|
||||
"circle",
|
||||
"district",
|
||||
"hammersmith-city",
|
||||
"jubilee",
|
||||
"metropolitan",
|
||||
"northern",
|
||||
"piccadilly",
|
||||
"victoria",
|
||||
"waterloo-city",
|
||||
"lioness",
|
||||
"mildmay",
|
||||
"windrush",
|
||||
"weaver",
|
||||
"suffragette",
|
||||
"liberty",
|
||||
"elizabeth",
|
||||
]
|
||||
|
||||
// TfL severity codes: https://api.tfl.gov.uk/Line/Meta/Severity
|
||||
// 0 = Special Service, 1 = Closed, 6 = Severe Delays, 9 = Minor Delays, 10 = Good Service
|
||||
const SEVERITY_MAP: Record<number, TflAlertSeverity | null> = {
|
||||
1: "closure",
|
||||
2: "closure", // Suspended
|
||||
3: "closure", // Part Suspended
|
||||
4: "closure", // Planned Closure
|
||||
5: "closure", // Part Closure
|
||||
6: "major-delays", // Severe Delays
|
||||
7: "major-delays", // Reduced Service
|
||||
8: "major-delays", // Bus Service
|
||||
9: "minor-delays", // Minor Delays
|
||||
10: null, // Good Service
|
||||
11: null, // Part Closed
|
||||
12: null, // Exit Only
|
||||
13: null, // No Step Free Access
|
||||
14: null, // Change of frequency
|
||||
15: null, // Diverted
|
||||
16: null, // Not Running
|
||||
17: null, // Issues Reported
|
||||
18: null, // No Issues
|
||||
19: null, // Information
|
||||
20: null, // Service Closed
|
||||
}
|
||||
|
||||
export class TflApi {
|
||||
private apiKey: string
|
||||
private stationsCache: StationLocation[] | null = null
|
||||
|
||||
constructor(apiKey: string) {
|
||||
this.apiKey = apiKey
|
||||
}
|
||||
|
||||
private async fetch<T>(path: string): Promise<T> {
|
||||
const url = new URL(path, TFL_API_BASE)
|
||||
url.searchParams.set("app_key", this.apiKey)
|
||||
const response = await fetch(url.toString())
|
||||
if (!response.ok) {
|
||||
throw new Error(`TfL API error: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
return response.json() as Promise<T>
|
||||
}
|
||||
|
||||
async fetchLineStatuses(lines?: TflLineId[]): Promise<TflLineStatus[]> {
|
||||
const lineIds = lines ?? ALL_LINE_IDS
|
||||
const data = await this.fetch<unknown>(`/Line/${lineIds.join(",")}/Status`)
|
||||
|
||||
const parsed = lineResponseArray(data)
|
||||
if (parsed instanceof type.errors) {
|
||||
throw new Error(`Invalid TfL API response: ${parsed.summary}`)
|
||||
}
|
||||
|
||||
const statuses: TflLineStatus[] = []
|
||||
|
||||
for (const line of parsed) {
|
||||
for (const status of line.lineStatuses) {
|
||||
const severity = SEVERITY_MAP[status.statusSeverity]
|
||||
if (severity) {
|
||||
statuses.push({
|
||||
lineId: line.id,
|
||||
lineName: line.name,
|
||||
severity,
|
||||
description: status.reason ?? status.statusSeverityDescription,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return statuses
|
||||
}
|
||||
|
||||
async fetchStations(): Promise<StationLocation[]> {
|
||||
if (this.stationsCache) {
|
||||
return this.stationsCache
|
||||
}
|
||||
|
||||
// Fetch stations for all lines in parallel
|
||||
const responses = await Promise.all(
|
||||
ALL_LINE_IDS.map(async (id) => {
|
||||
const data = await this.fetch<unknown>(`/Line/${id}/StopPoints`)
|
||||
const parsed = lineStopPointsArray(data)
|
||||
if (parsed instanceof type.errors) {
|
||||
throw new Error(`Invalid TfL API response for line ${id}: ${parsed.summary}`)
|
||||
}
|
||||
return { lineId: id, stops: parsed }
|
||||
}),
|
||||
)
|
||||
|
||||
// Merge stations, combining lines for shared stations
|
||||
const stationMap = new Map<string, StationLocation>()
|
||||
|
||||
for (const { lineId: currentLineId, stops } of responses) {
|
||||
for (const stop of stops) {
|
||||
const existing = stationMap.get(stop.naptanId)
|
||||
if (existing) {
|
||||
if (!existing.lines.includes(currentLineId)) {
|
||||
existing.lines.push(currentLineId)
|
||||
}
|
||||
} else {
|
||||
stationMap.set(stop.naptanId, {
|
||||
id: stop.naptanId,
|
||||
name: stop.commonName,
|
||||
lat: stop.lat,
|
||||
lng: stop.lon,
|
||||
lines: [currentLineId],
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.stationsCache = Array.from(stationMap.values())
|
||||
return this.stationsCache
|
||||
}
|
||||
}
|
||||
|
||||
// Schemas
|
||||
|
||||
const lineId = type(
|
||||
"'bakerloo' | 'central' | 'circle' | 'district' | 'hammersmith-city' | 'jubilee' | 'metropolitan' | 'northern' | 'piccadilly' | 'victoria' | 'waterloo-city' | 'lioness' | 'mildmay' | 'windrush' | 'weaver' | 'suffragette' | 'liberty' | 'elizabeth'",
|
||||
)
|
||||
|
||||
export type TflLineId = typeof lineId.infer
|
||||
|
||||
const lineStatus = type({
|
||||
statusSeverity: "number",
|
||||
statusSeverityDescription: "string",
|
||||
"reason?": "string",
|
||||
})
|
||||
|
||||
const lineResponse = type({
|
||||
id: lineId,
|
||||
name: "string",
|
||||
lineStatuses: lineStatus.array(),
|
||||
})
|
||||
|
||||
const lineResponseArray = lineResponse.array()
|
||||
|
||||
const lineStopPoint = type({
|
||||
naptanId: "string",
|
||||
commonName: "string",
|
||||
lat: "number",
|
||||
lon: "number",
|
||||
})
|
||||
|
||||
const lineStopPointsArray = lineStopPoint.array()
|
||||
243
packages/aris-source-tfl/src/tfl-source.test.ts
Normal file
243
packages/aris-source-tfl/src/tfl-source.test.ts
Normal file
@@ -0,0 +1,243 @@
|
||||
import type { Context } from "@aris/core"
|
||||
|
||||
import { LocationKey, type Location } from "@aris/source-location"
|
||||
import { describe, expect, test } from "bun:test"
|
||||
|
||||
import type {
|
||||
ITflApi,
|
||||
StationLocation,
|
||||
TflAlertSeverity,
|
||||
TflLineId,
|
||||
TflLineStatus,
|
||||
} from "./types.ts"
|
||||
|
||||
import fixtures from "../fixtures/tfl-responses.json"
|
||||
import { TflSource } from "./tfl-source.ts"
|
||||
|
||||
// Mock API that returns fixture data
|
||||
class FixtureTflApi implements ITflApi {
|
||||
async fetchLineStatuses(_lines?: TflLineId[]): Promise<TflLineStatus[]> {
|
||||
const statuses: TflLineStatus[] = []
|
||||
|
||||
for (const line of fixtures.lineStatuses as Record<string, unknown>[]) {
|
||||
for (const status of line.lineStatuses as Record<string, unknown>[]) {
|
||||
const severityCode = status.statusSeverity as number
|
||||
const severity = this.mapSeverity(severityCode)
|
||||
if (severity) {
|
||||
statuses.push({
|
||||
lineId: line.id as TflLineId,
|
||||
lineName: line.name as string,
|
||||
severity,
|
||||
description: (status.reason as string) ?? (status.statusSeverityDescription as string),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return statuses
|
||||
}
|
||||
|
||||
async fetchStations(): Promise<StationLocation[]> {
|
||||
const stationMap = new Map<string, StationLocation>()
|
||||
|
||||
for (const [lineId, stops] of Object.entries(fixtures.stopPoints)) {
|
||||
for (const stop of stops as Record<string, unknown>[]) {
|
||||
const id = stop.naptanId as string
|
||||
const existing = stationMap.get(id)
|
||||
if (existing) {
|
||||
if (!existing.lines.includes(lineId as TflLineId)) {
|
||||
existing.lines.push(lineId as TflLineId)
|
||||
}
|
||||
} else {
|
||||
stationMap.set(id, {
|
||||
id,
|
||||
name: stop.commonName as string,
|
||||
lat: stop.lat as number,
|
||||
lng: stop.lon as number,
|
||||
lines: [lineId as TflLineId],
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(stationMap.values())
|
||||
}
|
||||
|
||||
private mapSeverity(code: number): TflAlertSeverity | null {
|
||||
const map: Record<number, TflAlertSeverity | null> = {
|
||||
1: "closure",
|
||||
2: "closure",
|
||||
3: "closure",
|
||||
4: "closure",
|
||||
5: "closure",
|
||||
6: "major-delays",
|
||||
7: "major-delays",
|
||||
8: "major-delays",
|
||||
9: "minor-delays",
|
||||
10: null,
|
||||
}
|
||||
return map[code] ?? null
|
||||
}
|
||||
}
|
||||
|
||||
function createContext(location?: Location): Context {
|
||||
const ctx: Context = { time: new Date("2026-01-15T12:00:00Z") }
|
||||
if (location) {
|
||||
ctx[LocationKey] = location
|
||||
}
|
||||
return ctx
|
||||
}
|
||||
|
||||
describe("TflSource", () => {
|
||||
const api = new FixtureTflApi()
|
||||
|
||||
describe("interface", () => {
|
||||
test("has correct id", () => {
|
||||
const source = new TflSource({ client: api })
|
||||
expect(source.id).toBe("tfl")
|
||||
})
|
||||
|
||||
test("depends on location", () => {
|
||||
const source = new TflSource({ client: api })
|
||||
expect(source.dependencies).toEqual(["location"])
|
||||
})
|
||||
|
||||
test("implements fetchItems", () => {
|
||||
const source = new TflSource({ client: api })
|
||||
expect(source.fetchItems).toBeDefined()
|
||||
})
|
||||
|
||||
test("throws if neither client nor apiKey provided", () => {
|
||||
expect(() => new TflSource({})).toThrow("Either client or apiKey must be provided")
|
||||
})
|
||||
})
|
||||
|
||||
describe("fetchItems", () => {
|
||||
test("returns feed items array", async () => {
|
||||
const source = new TflSource({ client: api })
|
||||
const items = await source.fetchItems(createContext())
|
||||
expect(Array.isArray(items)).toBe(true)
|
||||
})
|
||||
|
||||
test("feed items have correct base structure", async () => {
|
||||
const source = new TflSource({ client: api })
|
||||
const location: Location = { lat: 51.5074, lng: -0.1278, accuracy: 10, timestamp: new Date() }
|
||||
const items = await source.fetchItems(createContext(location))
|
||||
|
||||
for (const item of items) {
|
||||
expect(typeof item.id).toBe("string")
|
||||
expect(item.id).toMatch(/^tfl-alert-/)
|
||||
expect(item.type).toBe("tfl-alert")
|
||||
expect(typeof item.priority).toBe("number")
|
||||
expect(item.timestamp).toBeInstanceOf(Date)
|
||||
}
|
||||
})
|
||||
|
||||
test("feed items have correct data structure", async () => {
|
||||
const source = new TflSource({ client: api })
|
||||
const location: Location = { lat: 51.5074, lng: -0.1278, accuracy: 10, timestamp: new Date() }
|
||||
const items = await source.fetchItems(createContext(location))
|
||||
|
||||
for (const item of items) {
|
||||
expect(typeof item.data.line).toBe("string")
|
||||
expect(typeof item.data.lineName).toBe("string")
|
||||
expect(["minor-delays", "major-delays", "closure"]).toContain(item.data.severity)
|
||||
expect(typeof item.data.description).toBe("string")
|
||||
expect(
|
||||
item.data.closestStationDistance === null ||
|
||||
typeof item.data.closestStationDistance === "number",
|
||||
).toBe(true)
|
||||
}
|
||||
})
|
||||
|
||||
test("feed item ids are unique", async () => {
|
||||
const source = new TflSource({ client: api })
|
||||
const items = await source.fetchItems(createContext())
|
||||
|
||||
const ids = items.map((item) => item.id)
|
||||
const uniqueIds = new Set(ids)
|
||||
expect(uniqueIds.size).toBe(ids.length)
|
||||
})
|
||||
|
||||
test("feed items are sorted by priority descending", async () => {
|
||||
const source = new TflSource({ client: api })
|
||||
const items = await source.fetchItems(createContext())
|
||||
|
||||
for (let i = 1; i < items.length; i++) {
|
||||
const prev = items[i - 1]!
|
||||
const curr = items[i]!
|
||||
expect(prev.priority).toBeGreaterThanOrEqual(curr.priority)
|
||||
}
|
||||
})
|
||||
|
||||
test("priority values match severity levels", async () => {
|
||||
const source = new TflSource({ client: api })
|
||||
const items = await source.fetchItems(createContext())
|
||||
|
||||
const severityPriority: Record<string, number> = {
|
||||
closure: 1.0,
|
||||
"major-delays": 0.8,
|
||||
"minor-delays": 0.6,
|
||||
}
|
||||
|
||||
for (const item of items) {
|
||||
expect(item.priority).toBe(severityPriority[item.data.severity]!)
|
||||
}
|
||||
})
|
||||
|
||||
test("closestStationDistance is number when location provided", async () => {
|
||||
const source = new TflSource({ client: api })
|
||||
const location: Location = { lat: 51.5074, lng: -0.1278, accuracy: 10, timestamp: new Date() }
|
||||
const items = await source.fetchItems(createContext(location))
|
||||
|
||||
for (const item of items) {
|
||||
expect(typeof item.data.closestStationDistance).toBe("number")
|
||||
expect(item.data.closestStationDistance!).toBeGreaterThan(0)
|
||||
}
|
||||
})
|
||||
|
||||
test("closestStationDistance is null when no location provided", async () => {
|
||||
const source = new TflSource({ client: api })
|
||||
const items = await source.fetchItems(createContext())
|
||||
|
||||
for (const item of items) {
|
||||
expect(item.data.closestStationDistance).toBeNull()
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("TfL Fixture Data Shape", () => {
|
||||
test("fixtures have expected structure", () => {
|
||||
expect(typeof fixtures.fetchedAt).toBe("string")
|
||||
expect(Array.isArray(fixtures.lineStatuses)).toBe(true)
|
||||
expect(typeof fixtures.stopPoints).toBe("object")
|
||||
})
|
||||
|
||||
test("line statuses have required fields", () => {
|
||||
for (const line of fixtures.lineStatuses as Record<string, unknown>[]) {
|
||||
expect(typeof line.id).toBe("string")
|
||||
expect(typeof line.name).toBe("string")
|
||||
expect(Array.isArray(line.lineStatuses)).toBe(true)
|
||||
|
||||
for (const status of line.lineStatuses as Record<string, unknown>[]) {
|
||||
expect(typeof status.statusSeverity).toBe("number")
|
||||
expect(typeof status.statusSeverityDescription).toBe("string")
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
test("stop points have required fields", () => {
|
||||
for (const [lineId, stops] of Object.entries(fixtures.stopPoints)) {
|
||||
expect(typeof lineId).toBe("string")
|
||||
expect(Array.isArray(stops)).toBe(true)
|
||||
|
||||
for (const stop of stops as Record<string, unknown>[]) {
|
||||
expect(typeof stop.naptanId).toBe("string")
|
||||
expect(typeof stop.commonName).toBe("string")
|
||||
expect(typeof stop.lat).toBe("number")
|
||||
expect(typeof stop.lon).toBe("number")
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
136
packages/aris-source-tfl/src/tfl-source.ts
Normal file
136
packages/aris-source-tfl/src/tfl-source.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
import type { Context, FeedSource } from "@aris/core"
|
||||
|
||||
import { contextValue } from "@aris/core"
|
||||
import { LocationKey } from "@aris/source-location"
|
||||
|
||||
import type {
|
||||
ITflApi,
|
||||
StationLocation,
|
||||
TflAlertData,
|
||||
TflAlertFeedItem,
|
||||
TflAlertSeverity,
|
||||
TflLineId,
|
||||
TflSourceOptions,
|
||||
} from "./types.ts"
|
||||
|
||||
import { TflApi } from "./tfl-api.ts"
|
||||
|
||||
const SEVERITY_PRIORITY: Record<TflAlertSeverity, number> = {
|
||||
closure: 1.0,
|
||||
"major-delays": 0.8,
|
||||
"minor-delays": 0.6,
|
||||
}
|
||||
|
||||
/**
|
||||
* A FeedSource that provides TfL (Transport for London) service alerts.
|
||||
*
|
||||
* Depends on location source for proximity-based sorting. Produces feed items
|
||||
* for tube, overground, and Elizabeth line disruptions.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const tflSource = new TflSource({
|
||||
* apiKey: process.env.TFL_API_KEY!,
|
||||
* lines: ["northern", "victoria", "jubilee"],
|
||||
* })
|
||||
*
|
||||
* const engine = new FeedEngine()
|
||||
* .register(locationSource)
|
||||
* .register(tflSource)
|
||||
*
|
||||
* const { items } = await engine.refresh()
|
||||
* ```
|
||||
*/
|
||||
export class TflSource implements FeedSource<TflAlertFeedItem> {
|
||||
readonly id = "tfl"
|
||||
readonly dependencies = ["location"]
|
||||
|
||||
private readonly client: ITflApi
|
||||
private readonly lines?: TflLineId[]
|
||||
|
||||
constructor(options: TflSourceOptions) {
|
||||
if (!options.client && !options.apiKey) {
|
||||
throw new Error("Either client or apiKey must be provided")
|
||||
}
|
||||
this.client = options.client ?? new TflApi(options.apiKey!)
|
||||
this.lines = options.lines
|
||||
}
|
||||
|
||||
async fetchItems(context: Context): Promise<TflAlertFeedItem[]> {
|
||||
const [statuses, stations] = await Promise.all([
|
||||
this.client.fetchLineStatuses(this.lines),
|
||||
this.client.fetchStations(),
|
||||
])
|
||||
|
||||
const location = contextValue(context, LocationKey)
|
||||
|
||||
const items: TflAlertFeedItem[] = statuses.map((status) => {
|
||||
const closestStationDistance = location
|
||||
? findClosestStationDistance(status.lineId, stations, location.lat, location.lng)
|
||||
: null
|
||||
|
||||
const data: TflAlertData = {
|
||||
line: status.lineId,
|
||||
lineName: status.lineName,
|
||||
severity: status.severity,
|
||||
description: status.description,
|
||||
closestStationDistance,
|
||||
}
|
||||
|
||||
return {
|
||||
id: `tfl-alert-${status.lineId}-${status.severity}`,
|
||||
type: "tfl-alert",
|
||||
priority: SEVERITY_PRIORITY[status.severity],
|
||||
timestamp: context.time,
|
||||
data,
|
||||
}
|
||||
})
|
||||
|
||||
// Sort by severity (desc), then by proximity (asc) if location available
|
||||
items.sort((a, b) => {
|
||||
if (b.priority !== a.priority) {
|
||||
return b.priority - a.priority
|
||||
}
|
||||
if (a.data.closestStationDistance !== null && b.data.closestStationDistance !== null) {
|
||||
return a.data.closestStationDistance - b.data.closestStationDistance
|
||||
}
|
||||
return 0
|
||||
})
|
||||
|
||||
return items
|
||||
}
|
||||
}
|
||||
|
||||
function haversineDistance(lat1: number, lng1: number, lat2: number, lng2: number): number {
|
||||
const R = 6371 // Earth's radius in km
|
||||
const dLat = ((lat2 - lat1) * Math.PI) / 180
|
||||
const dLng = ((lng2 - lng1) * Math.PI) / 180
|
||||
const a =
|
||||
Math.sin(dLat / 2) * Math.sin(dLat / 2) +
|
||||
Math.cos((lat1 * Math.PI) / 180) *
|
||||
Math.cos((lat2 * Math.PI) / 180) *
|
||||
Math.sin(dLng / 2) *
|
||||
Math.sin(dLng / 2)
|
||||
const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a))
|
||||
return R * c
|
||||
}
|
||||
|
||||
function findClosestStationDistance(
|
||||
lineId: TflLineId,
|
||||
stations: StationLocation[],
|
||||
userLat: number,
|
||||
userLng: number,
|
||||
): number | null {
|
||||
const lineStations = stations.filter((s) => s.lines.includes(lineId))
|
||||
if (lineStations.length === 0) return null
|
||||
|
||||
let minDistance = Infinity
|
||||
for (const station of lineStations) {
|
||||
const distance = haversineDistance(userLat, userLng, station.lat, station.lng)
|
||||
if (distance < minDistance) {
|
||||
minDistance = distance
|
||||
}
|
||||
}
|
||||
|
||||
return minDistance
|
||||
}
|
||||
50
packages/aris-source-tfl/src/types.ts
Normal file
50
packages/aris-source-tfl/src/types.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import type { FeedItem } from "@aris/core"
|
||||
|
||||
import type { TflLineId } from "./tfl-api.ts"
|
||||
|
||||
export type { TflLineId } from "./tfl-api.ts"
|
||||
|
||||
export const TflAlertSeverity = {
|
||||
MinorDelays: "minor-delays",
|
||||
MajorDelays: "major-delays",
|
||||
Closure: "closure",
|
||||
} as const
|
||||
|
||||
export type TflAlertSeverity = (typeof TflAlertSeverity)[keyof typeof TflAlertSeverity]
|
||||
|
||||
export interface TflAlertData extends Record<string, unknown> {
|
||||
line: TflLineId
|
||||
lineName: string
|
||||
severity: TflAlertSeverity
|
||||
description: string
|
||||
closestStationDistance: number | null
|
||||
}
|
||||
|
||||
export type TflAlertFeedItem = FeedItem<"tfl-alert", TflAlertData>
|
||||
|
||||
export interface TflSourceOptions {
|
||||
apiKey?: string
|
||||
client?: ITflApi
|
||||
/** Lines to monitor. Defaults to all lines. */
|
||||
lines?: TflLineId[]
|
||||
}
|
||||
|
||||
export interface StationLocation {
|
||||
id: string
|
||||
name: string
|
||||
lat: number
|
||||
lng: number
|
||||
lines: TflLineId[]
|
||||
}
|
||||
|
||||
export interface ITflApi {
|
||||
fetchLineStatuses(lines?: TflLineId[]): Promise<TflLineStatus[]>
|
||||
fetchStations(): Promise<StationLocation[]>
|
||||
}
|
||||
|
||||
export interface TflLineStatus {
|
||||
lineId: TflLineId
|
||||
lineName: string
|
||||
severity: TflAlertSeverity
|
||||
description: string
|
||||
}
|
||||
101
packages/aris-source-weatherkit/README.md
Normal file
101
packages/aris-source-weatherkit/README.md
Normal file
@@ -0,0 +1,101 @@
|
||||
# @aris/source-weatherkit
|
||||
|
||||
Weather feed source using Apple WeatherKit API.
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Setup
|
||||
|
||||
```ts
|
||||
import { WeatherSource, Units } from "@aris/source-weatherkit"
|
||||
|
||||
const weatherSource = new WeatherSource({
|
||||
credentials: {
|
||||
privateKey: process.env.WEATHERKIT_PRIVATE_KEY!,
|
||||
keyId: process.env.WEATHERKIT_KEY_ID!,
|
||||
teamId: process.env.WEATHERKIT_TEAM_ID!,
|
||||
serviceId: process.env.WEATHERKIT_SERVICE_ID!,
|
||||
},
|
||||
units: Units.metric,
|
||||
})
|
||||
```
|
||||
|
||||
### With Feed Source Graph
|
||||
|
||||
```ts
|
||||
import { LocationSource } from "@aris/source-location"
|
||||
import { WeatherSource } from "@aris/source-weatherkit"
|
||||
|
||||
const locationSource = new LocationSource()
|
||||
const weatherSource = new WeatherSource({ credentials })
|
||||
|
||||
// Weather depends on location - graph handles ordering
|
||||
const sources = [locationSource, weatherSource]
|
||||
```
|
||||
|
||||
### Reading Weather Context
|
||||
|
||||
Downstream sources can access weather data:
|
||||
|
||||
```ts
|
||||
import { contextValue } from "@aris/core"
|
||||
import { WeatherKey } from "@aris/source-weatherkit"
|
||||
|
||||
async function fetchContext(context: Context) {
|
||||
const weather = contextValue(context, WeatherKey)
|
||||
|
||||
if (weather?.condition === "Rain") {
|
||||
// Suggest umbrella, indoor activities, etc.
|
||||
}
|
||||
|
||||
if (weather && weather.uvIndex > 7) {
|
||||
// Suggest sunscreen
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Exports
|
||||
|
||||
| Export | Description |
|
||||
| --------------- | --------------------------------------- |
|
||||
| `WeatherSource` | FeedSource implementation |
|
||||
| `WeatherKey` | Context key for simplified weather data |
|
||||
| `Weather` | Type for weather context |
|
||||
| `Units` | `metric` or `imperial` |
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Default | Description |
|
||||
| ------------- | -------- | -------------------------- |
|
||||
| `credentials` | - | WeatherKit API credentials |
|
||||
| `client` | - | Custom WeatherKit client |
|
||||
| `hourlyLimit` | `12` | Max hourly forecasts |
|
||||
| `dailyLimit` | `7` | Max daily forecasts |
|
||||
| `units` | `metric` | Temperature/speed units |
|
||||
|
||||
## Context
|
||||
|
||||
Provides simplified weather context for downstream sources:
|
||||
|
||||
```ts
|
||||
interface Weather {
|
||||
temperature: number
|
||||
temperatureApparent: number
|
||||
condition: ConditionCode
|
||||
humidity: number
|
||||
uvIndex: number
|
||||
windSpeed: number
|
||||
daylight: boolean
|
||||
}
|
||||
```
|
||||
|
||||
## Feed Items
|
||||
|
||||
Produces feed items:
|
||||
|
||||
- `weather-current` - Current conditions
|
||||
- `weather-hourly` - Hourly forecasts (up to `hourlyLimit`)
|
||||
- `weather-daily` - Daily forecasts (up to `dailyLimit`)
|
||||
- `weather-alert` - Weather alerts when present
|
||||
|
||||
Priority is adjusted based on weather severity (storms, extreme temperatures).
|
||||
File diff suppressed because one or more lines are too long
15
packages/aris-source-weatherkit/package.json
Normal file
15
packages/aris-source-weatherkit/package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "@aris/source-weatherkit",
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"main": "src/index.ts",
|
||||
"types": "src/index.ts",
|
||||
"scripts": {
|
||||
"test": "bun test ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@aris/core": "workspace:*",
|
||||
"@aris/source-location": "workspace:*",
|
||||
"arktype": "^2.1.0"
|
||||
}
|
||||
}
|
||||
97
packages/aris-source-weatherkit/src/feed-items.ts
Normal file
97
packages/aris-source-weatherkit/src/feed-items.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import type { FeedItem } from "@aris/core"
|
||||
|
||||
import type { Certainty, ConditionCode, PrecipitationType, Severity, Urgency } from "./weatherkit"
|
||||
|
||||
export const WeatherFeedItemType = {
|
||||
current: "weather-current",
|
||||
hourly: "weather-hourly",
|
||||
daily: "weather-daily",
|
||||
alert: "weather-alert",
|
||||
} as const
|
||||
|
||||
export type WeatherFeedItemType = (typeof WeatherFeedItemType)[keyof typeof WeatherFeedItemType]
|
||||
|
||||
export type CurrentWeatherData = {
|
||||
conditionCode: ConditionCode
|
||||
daylight: boolean
|
||||
humidity: number
|
||||
precipitationIntensity: number
|
||||
pressure: number
|
||||
pressureTrend: "rising" | "falling" | "steady"
|
||||
temperature: number
|
||||
temperatureApparent: number
|
||||
uvIndex: number
|
||||
visibility: number
|
||||
windDirection: number
|
||||
windGust: number
|
||||
windSpeed: number
|
||||
}
|
||||
|
||||
export interface CurrentWeatherFeedItem extends FeedItem<
|
||||
typeof WeatherFeedItemType.current,
|
||||
CurrentWeatherData
|
||||
> {}
|
||||
|
||||
export type HourlyWeatherData = {
|
||||
forecastTime: Date
|
||||
conditionCode: ConditionCode
|
||||
daylight: boolean
|
||||
humidity: number
|
||||
precipitationAmount: number
|
||||
precipitationChance: number
|
||||
precipitationType: PrecipitationType
|
||||
temperature: number
|
||||
temperatureApparent: number
|
||||
uvIndex: number
|
||||
windDirection: number
|
||||
windGust: number
|
||||
windSpeed: number
|
||||
}
|
||||
|
||||
export interface HourlyWeatherFeedItem extends FeedItem<
|
||||
typeof WeatherFeedItemType.hourly,
|
||||
HourlyWeatherData
|
||||
> {}
|
||||
|
||||
export type DailyWeatherData = {
|
||||
forecastDate: Date
|
||||
conditionCode: ConditionCode
|
||||
maxUvIndex: number
|
||||
precipitationAmount: number
|
||||
precipitationChance: number
|
||||
precipitationType: PrecipitationType
|
||||
snowfallAmount: number
|
||||
sunrise: Date
|
||||
sunset: Date
|
||||
temperatureMax: number
|
||||
temperatureMin: number
|
||||
}
|
||||
|
||||
export interface DailyWeatherFeedItem extends FeedItem<
|
||||
typeof WeatherFeedItemType.daily,
|
||||
DailyWeatherData
|
||||
> {}
|
||||
|
||||
export type WeatherAlertData = {
|
||||
alertId: string
|
||||
areaName: string
|
||||
certainty: Certainty
|
||||
description: string
|
||||
detailsUrl: string
|
||||
effectiveTime: Date
|
||||
expireTime: Date
|
||||
severity: Severity
|
||||
source: string
|
||||
urgency: Urgency
|
||||
}
|
||||
|
||||
export interface WeatherAlertFeedItem extends FeedItem<
|
||||
typeof WeatherFeedItemType.alert,
|
||||
WeatherAlertData
|
||||
> {}
|
||||
|
||||
export type WeatherFeedItem =
|
||||
| CurrentWeatherFeedItem
|
||||
| HourlyWeatherFeedItem
|
||||
| DailyWeatherFeedItem
|
||||
| WeatherAlertFeedItem
|
||||
39
packages/aris-source-weatherkit/src/index.ts
Normal file
39
packages/aris-source-weatherkit/src/index.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
export { WeatherKey, type Weather } from "./weather-context"
|
||||
export {
|
||||
WeatherSource,
|
||||
Units,
|
||||
type Units as UnitsType,
|
||||
type WeatherSourceOptions,
|
||||
} from "./weather-source"
|
||||
|
||||
export {
|
||||
WeatherFeedItemType,
|
||||
type WeatherFeedItemType as WeatherFeedItemTypeType,
|
||||
type WeatherFeedItem,
|
||||
type CurrentWeatherFeedItem,
|
||||
type CurrentWeatherData,
|
||||
type HourlyWeatherFeedItem,
|
||||
type HourlyWeatherData,
|
||||
type DailyWeatherFeedItem,
|
||||
type DailyWeatherData,
|
||||
type WeatherAlertFeedItem,
|
||||
type WeatherAlertData,
|
||||
} from "./feed-items"
|
||||
|
||||
export {
|
||||
ConditionCode,
|
||||
Severity,
|
||||
Urgency,
|
||||
Certainty,
|
||||
PrecipitationType,
|
||||
DefaultWeatherKitClient,
|
||||
type ConditionCode as ConditionCodeType,
|
||||
type Severity as SeverityType,
|
||||
type Urgency as UrgencyType,
|
||||
type Certainty as CertaintyType,
|
||||
type PrecipitationType as PrecipitationTypeType,
|
||||
type WeatherKitClient,
|
||||
type WeatherKitCredentials,
|
||||
type WeatherKitQueryOptions,
|
||||
type WeatherKitResponse,
|
||||
} from "./weatherkit"
|
||||
27
packages/aris-source-weatherkit/src/weather-context.ts
Normal file
27
packages/aris-source-weatherkit/src/weather-context.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import type { ContextKey } from "@aris/core"
|
||||
|
||||
import { contextKey } from "@aris/core"
|
||||
|
||||
import type { ConditionCode } from "./weatherkit"
|
||||
|
||||
/**
|
||||
* Simplified weather context for downstream sources.
|
||||
*/
|
||||
export interface Weather {
|
||||
/** Current temperature */
|
||||
temperature: number
|
||||
/** Feels-like temperature */
|
||||
temperatureApparent: number
|
||||
/** Weather condition */
|
||||
condition: ConditionCode
|
||||
/** Relative humidity (0-1) */
|
||||
humidity: number
|
||||
/** UV index */
|
||||
uvIndex: number
|
||||
/** Wind speed */
|
||||
windSpeed: number
|
||||
/** Is it currently daytime */
|
||||
daylight: boolean
|
||||
}
|
||||
|
||||
export const WeatherKey: ContextKey<Weather> = contextKey("weather")
|
||||
182
packages/aris-source-weatherkit/src/weather-source.test.ts
Normal file
182
packages/aris-source-weatherkit/src/weather-source.test.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
import { contextValue, type Context } from "@aris/core"
|
||||
import { LocationKey } from "@aris/source-location"
|
||||
import { describe, expect, test } from "bun:test"
|
||||
|
||||
import type { WeatherKitClient, WeatherKitResponse } from "./weatherkit"
|
||||
|
||||
import fixture from "../fixtures/san-francisco.json"
|
||||
import { WeatherFeedItemType } from "./feed-items"
|
||||
import { WeatherKey } from "./weather-context"
|
||||
import { WeatherSource, Units } from "./weather-source"
|
||||
|
||||
const mockCredentials = {
|
||||
privateKey: "mock",
|
||||
keyId: "mock",
|
||||
teamId: "mock",
|
||||
serviceId: "mock",
|
||||
}
|
||||
|
||||
function createMockClient(response: WeatherKitResponse): WeatherKitClient {
|
||||
return {
|
||||
fetch: async () => response,
|
||||
}
|
||||
}
|
||||
|
||||
function createMockContext(location?: { lat: number; lng: number }): Context {
|
||||
const ctx: Context = { time: new Date("2026-01-17T00:00:00Z") }
|
||||
if (location) {
|
||||
ctx[LocationKey] = { ...location, accuracy: 10, timestamp: new Date() }
|
||||
}
|
||||
return ctx
|
||||
}
|
||||
|
||||
describe("WeatherSource", () => {
|
||||
describe("properties", () => {
|
||||
test("has correct id", () => {
|
||||
const source = new WeatherSource({ credentials: mockCredentials })
|
||||
expect(source.id).toBe("weather")
|
||||
})
|
||||
|
||||
test("depends on location", () => {
|
||||
const source = new WeatherSource({ credentials: mockCredentials })
|
||||
expect(source.dependencies).toEqual(["location"])
|
||||
})
|
||||
|
||||
test("throws error if neither client nor credentials provided", () => {
|
||||
expect(() => new WeatherSource({} as never)).toThrow(
|
||||
"Either client or credentials must be provided",
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("fetchContext", () => {
|
||||
const mockClient = createMockClient(fixture.response as WeatherKitResponse)
|
||||
|
||||
test("returns empty when no location", async () => {
|
||||
const source = new WeatherSource({ client: mockClient })
|
||||
const result = await source.fetchContext(createMockContext())
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
test("returns simplified weather context", async () => {
|
||||
const source = new WeatherSource({ client: mockClient })
|
||||
const context = createMockContext({ lat: 37.7749, lng: -122.4194 })
|
||||
|
||||
const result = await source.fetchContext(context)
|
||||
const weather = contextValue(result, WeatherKey)
|
||||
|
||||
expect(weather).toBeDefined()
|
||||
expect(typeof weather!.temperature).toBe("number")
|
||||
expect(typeof weather!.temperatureApparent).toBe("number")
|
||||
expect(typeof weather!.condition).toBe("string")
|
||||
expect(typeof weather!.humidity).toBe("number")
|
||||
expect(typeof weather!.uvIndex).toBe("number")
|
||||
expect(typeof weather!.windSpeed).toBe("number")
|
||||
expect(typeof weather!.daylight).toBe("boolean")
|
||||
})
|
||||
|
||||
test("converts temperature to imperial", async () => {
|
||||
const source = new WeatherSource({ client: mockClient, units: Units.imperial })
|
||||
const context = createMockContext({ lat: 37.7749, lng: -122.4194 })
|
||||
|
||||
const result = await source.fetchContext(context)
|
||||
const weather = contextValue(result, WeatherKey)
|
||||
|
||||
// Fixture has temperature around 10°C, imperial should be around 50°F
|
||||
expect(weather!.temperature).toBeGreaterThan(40)
|
||||
})
|
||||
})
|
||||
|
||||
describe("fetchItems", () => {
|
||||
const mockClient = createMockClient(fixture.response as WeatherKitResponse)
|
||||
|
||||
test("returns empty array when no location", async () => {
|
||||
const source = new WeatherSource({ client: mockClient })
|
||||
const items = await source.fetchItems(createMockContext())
|
||||
|
||||
expect(items).toEqual([])
|
||||
})
|
||||
|
||||
test("returns feed items with all types", async () => {
|
||||
const source = new WeatherSource({ client: mockClient })
|
||||
const context = createMockContext({ lat: 37.7749, lng: -122.4194 })
|
||||
|
||||
const items = await source.fetchItems(context)
|
||||
|
||||
expect(items.length).toBeGreaterThan(0)
|
||||
expect(items.some((i) => i.type === WeatherFeedItemType.current)).toBe(true)
|
||||
expect(items.some((i) => i.type === WeatherFeedItemType.hourly)).toBe(true)
|
||||
expect(items.some((i) => i.type === WeatherFeedItemType.daily)).toBe(true)
|
||||
})
|
||||
|
||||
test("applies hourly and daily limits", async () => {
|
||||
const source = new WeatherSource({
|
||||
client: mockClient,
|
||||
hourlyLimit: 3,
|
||||
dailyLimit: 2,
|
||||
})
|
||||
const context = createMockContext({ lat: 37.7749, lng: -122.4194 })
|
||||
|
||||
const items = await source.fetchItems(context)
|
||||
|
||||
const hourlyItems = items.filter((i) => i.type === WeatherFeedItemType.hourly)
|
||||
const dailyItems = items.filter((i) => i.type === WeatherFeedItemType.daily)
|
||||
|
||||
expect(hourlyItems.length).toBe(3)
|
||||
expect(dailyItems.length).toBe(2)
|
||||
})
|
||||
|
||||
test("sets timestamp from context.time", async () => {
|
||||
const source = new WeatherSource({ client: mockClient })
|
||||
const queryTime = new Date("2026-01-17T12:00:00Z")
|
||||
const context = createMockContext({ lat: 37.7749, lng: -122.4194 })
|
||||
context.time = queryTime
|
||||
|
||||
const items = await source.fetchItems(context)
|
||||
|
||||
for (const item of items) {
|
||||
expect(item.timestamp).toEqual(queryTime)
|
||||
}
|
||||
})
|
||||
|
||||
test("assigns priority based on weather conditions", async () => {
|
||||
const source = new WeatherSource({ client: mockClient })
|
||||
const context = createMockContext({ lat: 37.7749, lng: -122.4194 })
|
||||
|
||||
const items = await source.fetchItems(context)
|
||||
|
||||
for (const item of items) {
|
||||
expect(item.priority).toBeGreaterThanOrEqual(0)
|
||||
expect(item.priority).toBeLessThanOrEqual(1)
|
||||
}
|
||||
|
||||
const currentItem = items.find((i) => i.type === WeatherFeedItemType.current)
|
||||
expect(currentItem).toBeDefined()
|
||||
expect(currentItem!.priority).toBeGreaterThanOrEqual(0.5)
|
||||
})
|
||||
|
||||
test("generates unique IDs for each item", async () => {
|
||||
const source = new WeatherSource({ client: mockClient })
|
||||
const context = createMockContext({ lat: 37.7749, lng: -122.4194 })
|
||||
|
||||
const items = await source.fetchItems(context)
|
||||
const ids = items.map((i) => i.id)
|
||||
const uniqueIds = new Set(ids)
|
||||
|
||||
expect(uniqueIds.size).toBe(ids.length)
|
||||
})
|
||||
})
|
||||
|
||||
describe("no reactive methods", () => {
|
||||
test("does not implement onContextUpdate", () => {
|
||||
const source = new WeatherSource({ credentials: mockCredentials })
|
||||
expect(source.onContextUpdate).toBeUndefined()
|
||||
})
|
||||
|
||||
test("does not implement onItemsUpdate", () => {
|
||||
const source = new WeatherSource({ credentials: mockCredentials })
|
||||
expect(source.onItemsUpdate).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
363
packages/aris-source-weatherkit/src/weather-source.ts
Normal file
363
packages/aris-source-weatherkit/src/weather-source.ts
Normal file
@@ -0,0 +1,363 @@
|
||||
import type { Context, FeedSource } from "@aris/core"
|
||||
|
||||
import { contextValue } from "@aris/core"
|
||||
import { LocationKey } from "@aris/source-location"
|
||||
|
||||
import { WeatherFeedItemType, type WeatherFeedItem } from "./feed-items"
|
||||
import { WeatherKey, type Weather } from "./weather-context"
|
||||
import {
|
||||
DefaultWeatherKitClient,
|
||||
type ConditionCode,
|
||||
type CurrentWeather,
|
||||
type DailyForecast,
|
||||
type HourlyForecast,
|
||||
type Severity,
|
||||
type WeatherAlert,
|
||||
type WeatherKitClient,
|
||||
type WeatherKitCredentials,
|
||||
} from "./weatherkit"
|
||||
|
||||
export const Units = {
|
||||
metric: "metric",
|
||||
imperial: "imperial",
|
||||
} as const
|
||||
|
||||
export type Units = (typeof Units)[keyof typeof Units]
|
||||
|
||||
export interface WeatherSourceOptions {
|
||||
credentials?: WeatherKitCredentials
|
||||
client?: WeatherKitClient
|
||||
/** Number of hourly forecasts to include (default: 12) */
|
||||
hourlyLimit?: number
|
||||
/** Number of daily forecasts to include (default: 7) */
|
||||
dailyLimit?: number
|
||||
/** Units for temperature and measurements (default: metric) */
|
||||
units?: Units
|
||||
}
|
||||
|
||||
const DEFAULT_HOURLY_LIMIT = 12
|
||||
const DEFAULT_DAILY_LIMIT = 7
|
||||
|
||||
const BASE_PRIORITY = {
|
||||
current: 0.5,
|
||||
hourly: 0.3,
|
||||
daily: 0.2,
|
||||
alert: 0.7,
|
||||
} as const
|
||||
|
||||
const SEVERE_CONDITIONS = new Set<ConditionCode>([
|
||||
"SevereThunderstorm",
|
||||
"Hurricane",
|
||||
"Tornado",
|
||||
"TropicalStorm",
|
||||
"Blizzard",
|
||||
"FreezingRain",
|
||||
"Hail",
|
||||
"Frigid",
|
||||
"Hot",
|
||||
])
|
||||
|
||||
const MODERATE_CONDITIONS = new Set<ConditionCode>([
|
||||
"Thunderstorm",
|
||||
"IsolatedThunderstorms",
|
||||
"ScatteredThunderstorms",
|
||||
"HeavyRain",
|
||||
"HeavySnow",
|
||||
"FreezingDrizzle",
|
||||
"BlowingSnow",
|
||||
])
|
||||
|
||||
/**
|
||||
* A FeedSource that provides weather context and feed items using Apple WeatherKit.
|
||||
*
|
||||
* Depends on location source for coordinates. Provides simplified weather context
|
||||
* for downstream sources and produces weather feed items (current, hourly, daily, alerts).
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const weatherSource = new WeatherSource({
|
||||
* credentials: {
|
||||
* privateKey: process.env.WEATHERKIT_PRIVATE_KEY!,
|
||||
* keyId: process.env.WEATHERKIT_KEY_ID!,
|
||||
* teamId: process.env.WEATHERKIT_TEAM_ID!,
|
||||
* serviceId: process.env.WEATHERKIT_SERVICE_ID!,
|
||||
* },
|
||||
* units: Units.metric,
|
||||
* })
|
||||
*
|
||||
* // Access weather context in downstream sources
|
||||
* const weather = contextValue(context, WeatherKey)
|
||||
* if (weather?.condition === "Rain") {
|
||||
* // suggest umbrella
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export class WeatherSource implements FeedSource<WeatherFeedItem> {
|
||||
readonly id = "weather"
|
||||
readonly dependencies = ["location"]
|
||||
|
||||
private readonly client: WeatherKitClient
|
||||
private readonly hourlyLimit: number
|
||||
private readonly dailyLimit: number
|
||||
private readonly units: Units
|
||||
|
||||
constructor(options: WeatherSourceOptions) {
|
||||
if (!options.client && !options.credentials) {
|
||||
throw new Error("Either client or credentials must be provided")
|
||||
}
|
||||
this.client = options.client ?? new DefaultWeatherKitClient(options.credentials!)
|
||||
this.hourlyLimit = options.hourlyLimit ?? DEFAULT_HOURLY_LIMIT
|
||||
this.dailyLimit = options.dailyLimit ?? DEFAULT_DAILY_LIMIT
|
||||
this.units = options.units ?? Units.metric
|
||||
}
|
||||
|
||||
async fetchContext(context: Context): Promise<Partial<Context>> {
|
||||
const location = contextValue(context, LocationKey)
|
||||
if (!location) {
|
||||
return {}
|
||||
}
|
||||
|
||||
const response = await this.client.fetch({
|
||||
lat: location.lat,
|
||||
lng: location.lng,
|
||||
})
|
||||
|
||||
if (!response.currentWeather) {
|
||||
return {}
|
||||
}
|
||||
|
||||
const weather: Weather = {
|
||||
temperature: convertTemperature(response.currentWeather.temperature, this.units),
|
||||
temperatureApparent: convertTemperature(
|
||||
response.currentWeather.temperatureApparent,
|
||||
this.units,
|
||||
),
|
||||
condition: response.currentWeather.conditionCode,
|
||||
humidity: response.currentWeather.humidity,
|
||||
uvIndex: response.currentWeather.uvIndex,
|
||||
windSpeed: convertSpeed(response.currentWeather.windSpeed, this.units),
|
||||
daylight: response.currentWeather.daylight,
|
||||
}
|
||||
|
||||
return { [WeatherKey]: weather }
|
||||
}
|
||||
|
||||
async fetchItems(context: Context): Promise<WeatherFeedItem[]> {
|
||||
const location = contextValue(context, LocationKey)
|
||||
if (!location) {
|
||||
return []
|
||||
}
|
||||
|
||||
const timestamp = context.time
|
||||
|
||||
const response = await this.client.fetch({
|
||||
lat: location.lat,
|
||||
lng: location.lng,
|
||||
})
|
||||
|
||||
const items: WeatherFeedItem[] = []
|
||||
|
||||
if (response.currentWeather) {
|
||||
items.push(createCurrentWeatherFeedItem(response.currentWeather, timestamp, this.units))
|
||||
}
|
||||
|
||||
if (response.forecastHourly?.hours) {
|
||||
const hours = response.forecastHourly.hours.slice(0, this.hourlyLimit)
|
||||
for (let i = 0; i < hours.length; i++) {
|
||||
const hour = hours[i]
|
||||
if (hour) {
|
||||
items.push(createHourlyWeatherFeedItem(hour, i, timestamp, this.units))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (response.forecastDaily?.days) {
|
||||
const days = response.forecastDaily.days.slice(0, this.dailyLimit)
|
||||
for (let i = 0; i < days.length; i++) {
|
||||
const day = days[i]
|
||||
if (day) {
|
||||
items.push(createDailyWeatherFeedItem(day, i, timestamp, this.units))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (response.weatherAlerts?.alerts) {
|
||||
for (const alert of response.weatherAlerts.alerts) {
|
||||
items.push(createWeatherAlertFeedItem(alert, timestamp))
|
||||
}
|
||||
}
|
||||
|
||||
return items
|
||||
}
|
||||
}
|
||||
|
||||
function adjustPriorityForCondition(basePriority: number, conditionCode: ConditionCode): number {
|
||||
if (SEVERE_CONDITIONS.has(conditionCode)) {
|
||||
return Math.min(1, basePriority + 0.3)
|
||||
}
|
||||
if (MODERATE_CONDITIONS.has(conditionCode)) {
|
||||
return Math.min(1, basePriority + 0.15)
|
||||
}
|
||||
return basePriority
|
||||
}
|
||||
|
||||
function adjustPriorityForAlertSeverity(severity: Severity): number {
|
||||
switch (severity) {
|
||||
case "extreme":
|
||||
return 1
|
||||
case "severe":
|
||||
return 0.9
|
||||
case "moderate":
|
||||
return 0.75
|
||||
case "minor":
|
||||
return BASE_PRIORITY.alert
|
||||
}
|
||||
}
|
||||
|
||||
function convertTemperature(celsius: number, units: Units): number {
|
||||
if (units === Units.imperial) {
|
||||
return (celsius * 9) / 5 + 32
|
||||
}
|
||||
return celsius
|
||||
}
|
||||
|
||||
function convertSpeed(kmh: number, units: Units): number {
|
||||
if (units === Units.imperial) {
|
||||
return kmh * 0.621371
|
||||
}
|
||||
return kmh
|
||||
}
|
||||
|
||||
function convertDistance(km: number, units: Units): number {
|
||||
if (units === Units.imperial) {
|
||||
return km * 0.621371
|
||||
}
|
||||
return km
|
||||
}
|
||||
|
||||
function convertPrecipitation(mm: number, units: Units): number {
|
||||
if (units === Units.imperial) {
|
||||
return mm * 0.0393701
|
||||
}
|
||||
return mm
|
||||
}
|
||||
|
||||
function convertPressure(mb: number, units: Units): number {
|
||||
if (units === Units.imperial) {
|
||||
return mb * 0.02953
|
||||
}
|
||||
return mb
|
||||
}
|
||||
|
||||
function createCurrentWeatherFeedItem(
|
||||
current: CurrentWeather,
|
||||
timestamp: Date,
|
||||
units: Units,
|
||||
): WeatherFeedItem {
|
||||
const priority = adjustPriorityForCondition(BASE_PRIORITY.current, current.conditionCode)
|
||||
|
||||
return {
|
||||
id: `weather-current-${timestamp.getTime()}`,
|
||||
type: WeatherFeedItemType.current,
|
||||
priority,
|
||||
timestamp,
|
||||
data: {
|
||||
conditionCode: current.conditionCode,
|
||||
daylight: current.daylight,
|
||||
humidity: current.humidity,
|
||||
precipitationIntensity: convertPrecipitation(current.precipitationIntensity, units),
|
||||
pressure: convertPressure(current.pressure, units),
|
||||
pressureTrend: current.pressureTrend,
|
||||
temperature: convertTemperature(current.temperature, units),
|
||||
temperatureApparent: convertTemperature(current.temperatureApparent, units),
|
||||
uvIndex: current.uvIndex,
|
||||
visibility: convertDistance(current.visibility, units),
|
||||
windDirection: current.windDirection,
|
||||
windGust: convertSpeed(current.windGust, units),
|
||||
windSpeed: convertSpeed(current.windSpeed, units),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function createHourlyWeatherFeedItem(
|
||||
hourly: HourlyForecast,
|
||||
index: number,
|
||||
timestamp: Date,
|
||||
units: Units,
|
||||
): WeatherFeedItem {
|
||||
const priority = adjustPriorityForCondition(BASE_PRIORITY.hourly, hourly.conditionCode)
|
||||
|
||||
return {
|
||||
id: `weather-hourly-${timestamp.getTime()}-${index}`,
|
||||
type: WeatherFeedItemType.hourly,
|
||||
priority,
|
||||
timestamp,
|
||||
data: {
|
||||
forecastTime: new Date(hourly.forecastStart),
|
||||
conditionCode: hourly.conditionCode,
|
||||
daylight: hourly.daylight,
|
||||
humidity: hourly.humidity,
|
||||
precipitationAmount: convertPrecipitation(hourly.precipitationAmount, units),
|
||||
precipitationChance: hourly.precipitationChance,
|
||||
precipitationType: hourly.precipitationType,
|
||||
temperature: convertTemperature(hourly.temperature, units),
|
||||
temperatureApparent: convertTemperature(hourly.temperatureApparent, units),
|
||||
uvIndex: hourly.uvIndex,
|
||||
windDirection: hourly.windDirection,
|
||||
windGust: convertSpeed(hourly.windGust, units),
|
||||
windSpeed: convertSpeed(hourly.windSpeed, units),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function createDailyWeatherFeedItem(
|
||||
daily: DailyForecast,
|
||||
index: number,
|
||||
timestamp: Date,
|
||||
units: Units,
|
||||
): WeatherFeedItem {
|
||||
const priority = adjustPriorityForCondition(BASE_PRIORITY.daily, daily.conditionCode)
|
||||
|
||||
return {
|
||||
id: `weather-daily-${timestamp.getTime()}-${index}`,
|
||||
type: WeatherFeedItemType.daily,
|
||||
priority,
|
||||
timestamp,
|
||||
data: {
|
||||
forecastDate: new Date(daily.forecastStart),
|
||||
conditionCode: daily.conditionCode,
|
||||
maxUvIndex: daily.maxUvIndex,
|
||||
precipitationAmount: convertPrecipitation(daily.precipitationAmount, units),
|
||||
precipitationChance: daily.precipitationChance,
|
||||
precipitationType: daily.precipitationType,
|
||||
snowfallAmount: convertPrecipitation(daily.snowfallAmount, units),
|
||||
sunrise: new Date(daily.sunrise),
|
||||
sunset: new Date(daily.sunset),
|
||||
temperatureMax: convertTemperature(daily.temperatureMax, units),
|
||||
temperatureMin: convertTemperature(daily.temperatureMin, units),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function createWeatherAlertFeedItem(alert: WeatherAlert, timestamp: Date): WeatherFeedItem {
|
||||
const priority = adjustPriorityForAlertSeverity(alert.severity)
|
||||
|
||||
return {
|
||||
id: `weather-alert-${alert.id}`,
|
||||
type: WeatherFeedItemType.alert,
|
||||
priority,
|
||||
timestamp,
|
||||
data: {
|
||||
alertId: alert.id,
|
||||
areaName: alert.areaName,
|
||||
certainty: alert.certainty,
|
||||
description: alert.description,
|
||||
detailsUrl: alert.detailsUrl,
|
||||
effectiveTime: new Date(alert.effectiveTime),
|
||||
expireTime: new Date(alert.expireTime),
|
||||
severity: alert.severity,
|
||||
source: alert.source,
|
||||
urgency: alert.urgency,
|
||||
},
|
||||
}
|
||||
}
|
||||
367
packages/aris-source-weatherkit/src/weatherkit.ts
Normal file
367
packages/aris-source-weatherkit/src/weatherkit.ts
Normal file
@@ -0,0 +1,367 @@
|
||||
// WeatherKit REST API client and response types
|
||||
// https://developer.apple.com/documentation/weatherkitrestapi
|
||||
|
||||
import { type } from "arktype"
|
||||
|
||||
export interface WeatherKitCredentials {
|
||||
privateKey: string
|
||||
keyId: string
|
||||
teamId: string
|
||||
serviceId: string
|
||||
}
|
||||
|
||||
export interface WeatherKitQueryOptions {
|
||||
lat: number
|
||||
lng: number
|
||||
language?: string
|
||||
timezone?: string
|
||||
}
|
||||
|
||||
export interface WeatherKitClient {
|
||||
fetch(query: WeatherKitQueryOptions): Promise<WeatherKitResponse>
|
||||
}
|
||||
|
||||
export class DefaultWeatherKitClient implements WeatherKitClient {
|
||||
private readonly credentials: WeatherKitCredentials
|
||||
|
||||
constructor(credentials: WeatherKitCredentials) {
|
||||
this.credentials = credentials
|
||||
}
|
||||
|
||||
async fetch(query: WeatherKitQueryOptions): Promise<WeatherKitResponse> {
|
||||
const token = await generateJwt(this.credentials)
|
||||
|
||||
const dataSets = ["currentWeather", "forecastHourly", "forecastDaily", "weatherAlerts"].join(
|
||||
",",
|
||||
)
|
||||
|
||||
const url = new URL(
|
||||
`${WEATHERKIT_API_BASE}/weather/${query.language ?? "en"}/${query.lat}/${query.lng}`,
|
||||
)
|
||||
url.searchParams.set("dataSets", dataSets)
|
||||
if (query.timezone) {
|
||||
url.searchParams.set("timezone", query.timezone)
|
||||
}
|
||||
|
||||
const response = await fetch(url.toString(), {
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const body = await response.text()
|
||||
throw new Error(`WeatherKit API error: ${response.status} ${response.statusText}: ${body}`)
|
||||
}
|
||||
|
||||
const json = await response.json()
|
||||
const result = weatherKitResponseSchema(json)
|
||||
|
||||
if (result instanceof type.errors) {
|
||||
throw new Error(`WeatherKit API response validation failed: ${result.summary}`)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
export const Severity = {
|
||||
Minor: "minor",
|
||||
Moderate: "moderate",
|
||||
Severe: "severe",
|
||||
Extreme: "extreme",
|
||||
} as const
|
||||
|
||||
export type Severity = (typeof Severity)[keyof typeof Severity]
|
||||
|
||||
export const Urgency = {
|
||||
Immediate: "immediate",
|
||||
Expected: "expected",
|
||||
Future: "future",
|
||||
Past: "past",
|
||||
Unknown: "unknown",
|
||||
} as const
|
||||
|
||||
export type Urgency = (typeof Urgency)[keyof typeof Urgency]
|
||||
|
||||
export const Certainty = {
|
||||
Observed: "observed",
|
||||
Likely: "likely",
|
||||
Possible: "possible",
|
||||
Unlikely: "unlikely",
|
||||
Unknown: "unknown",
|
||||
} as const
|
||||
|
||||
export type Certainty = (typeof Certainty)[keyof typeof Certainty]
|
||||
|
||||
export const PrecipitationType = {
|
||||
Clear: "clear",
|
||||
Precipitation: "precipitation",
|
||||
Rain: "rain",
|
||||
Snow: "snow",
|
||||
Sleet: "sleet",
|
||||
Hail: "hail",
|
||||
Mixed: "mixed",
|
||||
} as const
|
||||
|
||||
export type PrecipitationType = (typeof PrecipitationType)[keyof typeof PrecipitationType]
|
||||
|
||||
export const ConditionCode = {
|
||||
Clear: "Clear",
|
||||
Cloudy: "Cloudy",
|
||||
Dust: "Dust",
|
||||
Fog: "Fog",
|
||||
Haze: "Haze",
|
||||
MostlyClear: "MostlyClear",
|
||||
MostlyCloudy: "MostlyCloudy",
|
||||
PartlyCloudy: "PartlyCloudy",
|
||||
ScatteredThunderstorms: "ScatteredThunderstorms",
|
||||
Smoke: "Smoke",
|
||||
Breezy: "Breezy",
|
||||
Windy: "Windy",
|
||||
Drizzle: "Drizzle",
|
||||
HeavyRain: "HeavyRain",
|
||||
Rain: "Rain",
|
||||
Showers: "Showers",
|
||||
Flurries: "Flurries",
|
||||
HeavySnow: "HeavySnow",
|
||||
MixedRainAndSleet: "MixedRainAndSleet",
|
||||
MixedRainAndSnow: "MixedRainAndSnow",
|
||||
MixedRainfall: "MixedRainfall",
|
||||
MixedSnowAndSleet: "MixedSnowAndSleet",
|
||||
ScatteredShowers: "ScatteredShowers",
|
||||
ScatteredSnowShowers: "ScatteredSnowShowers",
|
||||
Sleet: "Sleet",
|
||||
Snow: "Snow",
|
||||
SnowShowers: "SnowShowers",
|
||||
Blizzard: "Blizzard",
|
||||
BlowingSnow: "BlowingSnow",
|
||||
FreezingDrizzle: "FreezingDrizzle",
|
||||
FreezingRain: "FreezingRain",
|
||||
Frigid: "Frigid",
|
||||
Hail: "Hail",
|
||||
Hot: "Hot",
|
||||
Hurricane: "Hurricane",
|
||||
IsolatedThunderstorms: "IsolatedThunderstorms",
|
||||
SevereThunderstorm: "SevereThunderstorm",
|
||||
Thunderstorm: "Thunderstorm",
|
||||
Tornado: "Tornado",
|
||||
TropicalStorm: "TropicalStorm",
|
||||
} as const
|
||||
|
||||
export type ConditionCode = (typeof ConditionCode)[keyof typeof ConditionCode]
|
||||
|
||||
const WEATHERKIT_API_BASE = "https://weatherkit.apple.com/api/v1"
|
||||
|
||||
const severitySchema = type.enumerated(
|
||||
Severity.Minor,
|
||||
Severity.Moderate,
|
||||
Severity.Severe,
|
||||
Severity.Extreme,
|
||||
)
|
||||
|
||||
const urgencySchema = type.enumerated(
|
||||
Urgency.Immediate,
|
||||
Urgency.Expected,
|
||||
Urgency.Future,
|
||||
Urgency.Past,
|
||||
Urgency.Unknown,
|
||||
)
|
||||
|
||||
const certaintySchema = type.enumerated(
|
||||
Certainty.Observed,
|
||||
Certainty.Likely,
|
||||
Certainty.Possible,
|
||||
Certainty.Unlikely,
|
||||
Certainty.Unknown,
|
||||
)
|
||||
|
||||
const precipitationTypeSchema = type.enumerated(
|
||||
PrecipitationType.Clear,
|
||||
PrecipitationType.Precipitation,
|
||||
PrecipitationType.Rain,
|
||||
PrecipitationType.Snow,
|
||||
PrecipitationType.Sleet,
|
||||
PrecipitationType.Hail,
|
||||
PrecipitationType.Mixed,
|
||||
)
|
||||
|
||||
const conditionCodeSchema = type.enumerated(...Object.values(ConditionCode))
|
||||
|
||||
const pressureTrendSchema = type.enumerated("rising", "falling", "steady")
|
||||
|
||||
const currentWeatherSchema = type({
|
||||
asOf: "string",
|
||||
conditionCode: conditionCodeSchema,
|
||||
daylight: "boolean",
|
||||
humidity: "number",
|
||||
precipitationIntensity: "number",
|
||||
pressure: "number",
|
||||
pressureTrend: pressureTrendSchema,
|
||||
temperature: "number",
|
||||
temperatureApparent: "number",
|
||||
temperatureDewPoint: "number",
|
||||
uvIndex: "number",
|
||||
visibility: "number",
|
||||
windDirection: "number",
|
||||
windGust: "number",
|
||||
windSpeed: "number",
|
||||
})
|
||||
|
||||
export type CurrentWeather = typeof currentWeatherSchema.infer
|
||||
|
||||
const hourlyForecastSchema = type({
|
||||
forecastStart: "string",
|
||||
conditionCode: conditionCodeSchema,
|
||||
daylight: "boolean",
|
||||
humidity: "number",
|
||||
precipitationAmount: "number",
|
||||
precipitationChance: "number",
|
||||
precipitationType: precipitationTypeSchema,
|
||||
pressure: "number",
|
||||
snowfallIntensity: "number",
|
||||
temperature: "number",
|
||||
temperatureApparent: "number",
|
||||
temperatureDewPoint: "number",
|
||||
uvIndex: "number",
|
||||
visibility: "number",
|
||||
windDirection: "number",
|
||||
windGust: "number",
|
||||
windSpeed: "number",
|
||||
})
|
||||
|
||||
export type HourlyForecast = typeof hourlyForecastSchema.infer
|
||||
|
||||
const dayWeatherConditionsSchema = type({
|
||||
conditionCode: conditionCodeSchema,
|
||||
humidity: "number",
|
||||
precipitationAmount: "number",
|
||||
precipitationChance: "number",
|
||||
precipitationType: precipitationTypeSchema,
|
||||
snowfallAmount: "number",
|
||||
temperatureMax: "number",
|
||||
temperatureMin: "number",
|
||||
windDirection: "number",
|
||||
"windGust?": "number",
|
||||
windSpeed: "number",
|
||||
})
|
||||
|
||||
export type DayWeatherConditions = typeof dayWeatherConditionsSchema.infer
|
||||
|
||||
const dailyForecastSchema = type({
|
||||
forecastStart: "string",
|
||||
forecastEnd: "string",
|
||||
conditionCode: conditionCodeSchema,
|
||||
maxUvIndex: "number",
|
||||
moonPhase: "string",
|
||||
"moonrise?": "string",
|
||||
"moonset?": "string",
|
||||
precipitationAmount: "number",
|
||||
precipitationChance: "number",
|
||||
precipitationType: precipitationTypeSchema,
|
||||
snowfallAmount: "number",
|
||||
sunrise: "string",
|
||||
sunriseCivil: "string",
|
||||
sunriseNautical: "string",
|
||||
sunriseAstronomical: "string",
|
||||
sunset: "string",
|
||||
sunsetCivil: "string",
|
||||
sunsetNautical: "string",
|
||||
sunsetAstronomical: "string",
|
||||
temperatureMax: "number",
|
||||
temperatureMin: "number",
|
||||
"daytimeForecast?": dayWeatherConditionsSchema,
|
||||
"overnightForecast?": dayWeatherConditionsSchema,
|
||||
})
|
||||
|
||||
export type DailyForecast = typeof dailyForecastSchema.infer
|
||||
|
||||
const weatherAlertSchema = type({
|
||||
id: "string",
|
||||
areaId: "string",
|
||||
areaName: "string",
|
||||
certainty: certaintySchema,
|
||||
countryCode: "string",
|
||||
description: "string",
|
||||
detailsUrl: "string",
|
||||
effectiveTime: "string",
|
||||
expireTime: "string",
|
||||
issuedTime: "string",
|
||||
responses: "string[]",
|
||||
severity: severitySchema,
|
||||
source: "string",
|
||||
urgency: urgencySchema,
|
||||
})
|
||||
|
||||
export type WeatherAlert = typeof weatherAlertSchema.infer
|
||||
|
||||
const weatherKitResponseSchema = type({
|
||||
"currentWeather?": currentWeatherSchema,
|
||||
"forecastHourly?": type({
|
||||
hours: hourlyForecastSchema.array(),
|
||||
}),
|
||||
"forecastDaily?": type({
|
||||
days: dailyForecastSchema.array(),
|
||||
}),
|
||||
"weatherAlerts?": type({
|
||||
alerts: weatherAlertSchema.array(),
|
||||
}),
|
||||
})
|
||||
|
||||
export type WeatherKitResponse = typeof weatherKitResponseSchema.infer
|
||||
|
||||
async function generateJwt(credentials: WeatherKitCredentials): Promise<string> {
|
||||
const header = {
|
||||
alg: "ES256",
|
||||
kid: credentials.keyId,
|
||||
id: `${credentials.teamId}.${credentials.serviceId}`,
|
||||
}
|
||||
|
||||
const now = Math.floor(Date.now() / 1000)
|
||||
const payload = {
|
||||
iss: credentials.teamId,
|
||||
iat: now,
|
||||
exp: now + 3600,
|
||||
sub: credentials.serviceId,
|
||||
}
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
const headerB64 = btoa(JSON.stringify(header))
|
||||
.replace(/\+/g, "-")
|
||||
.replace(/\//g, "_")
|
||||
.replace(/=+$/, "")
|
||||
const payloadB64 = btoa(JSON.stringify(payload))
|
||||
.replace(/\+/g, "-")
|
||||
.replace(/\//g, "_")
|
||||
.replace(/=+$/, "")
|
||||
|
||||
const signingInput = `${headerB64}.${payloadB64}`
|
||||
|
||||
const pemContents = credentials.privateKey
|
||||
.replace(/-----BEGIN PRIVATE KEY-----/, "")
|
||||
.replace(/-----END PRIVATE KEY-----/, "")
|
||||
.replace(/\s/g, "")
|
||||
|
||||
const binaryKey = Uint8Array.from(atob(pemContents), (c) => c.charCodeAt(0))
|
||||
|
||||
const cryptoKey = await crypto.subtle.importKey(
|
||||
"pkcs8",
|
||||
binaryKey,
|
||||
{ name: "ECDSA", namedCurve: "P-256" },
|
||||
false,
|
||||
["sign"],
|
||||
)
|
||||
|
||||
const signature = await crypto.subtle.sign(
|
||||
{ name: "ECDSA", hash: "SHA-256" },
|
||||
cryptoKey,
|
||||
encoder.encode(signingInput),
|
||||
)
|
||||
|
||||
const signatureB64 = btoa(String.fromCharCode(...new Uint8Array(signature)))
|
||||
.replace(/\+/g, "-")
|
||||
.replace(/\//g, "_")
|
||||
.replace(/=+$/, "")
|
||||
|
||||
return `${signingInput}.${signatureB64}`
|
||||
}
|
||||
Reference in New Issue
Block a user