2026-02-15 12:26:23 +00:00
|
|
|
import type { ActionDefinition } from "./action"
|
2026-03-01 22:52:41 +00:00
|
|
|
import type { ContextEntry } from "./context"
|
2026-01-24 22:42:00 +00:00
|
|
|
import type { FeedItem } from "./feed"
|
2026-02-28 15:57:01 +00:00
|
|
|
import type { FeedPostProcessor, ItemGroup } from "./feed-post-processor"
|
2026-01-24 22:42:00 +00:00
|
|
|
import type { FeedSource } from "./feed-source"
|
|
|
|
|
|
2026-03-01 22:52:41 +00:00
|
|
|
import { Context } from "./context"
|
|
|
|
|
|
2026-01-24 22:42:00 +00:00
|
|
|
export interface SourceError {
|
|
|
|
|
sourceId: string
|
|
|
|
|
error: Error
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export interface FeedResult<TItem extends FeedItem = FeedItem> {
|
|
|
|
|
context: Context
|
|
|
|
|
items: TItem[]
|
|
|
|
|
errors: SourceError[]
|
2026-02-28 15:57:01 +00:00
|
|
|
/** Item groups produced by post-processors */
|
|
|
|
|
groupedItems?: ItemGroup[]
|
2026-01-24 22:42:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export type FeedSubscriber<TItem extends FeedItem = FeedItem> = (result: FeedResult<TItem>) => void
|
|
|
|
|
|
2026-02-24 01:13:41 +00:00
|
|
|
const DEFAULT_CACHE_TTL_MS = 300_000 // 5 minutes
|
|
|
|
|
const MIN_CACHE_TTL_MS = 10 // prevent spin from zero/negative values
|
|
|
|
|
|
|
|
|
|
export interface FeedEngineConfig {
|
|
|
|
|
/** Cache TTL in milliseconds. Default: 300_000 (5 minutes). Minimum: 10. */
|
|
|
|
|
cacheTtlMs?: number
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-24 22:42:00 +00:00
|
|
|
interface SourceGraph {
|
|
|
|
|
sources: Map<string, FeedSource>
|
|
|
|
|
sorted: FeedSource[]
|
|
|
|
|
dependents: Map<string, string[]>
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Orchestrates FeedSources, managing the dependency graph and context flow.
|
|
|
|
|
*
|
|
|
|
|
* Sources declare dependencies on other sources. The engine:
|
|
|
|
|
* - Validates the dependency graph (no missing deps, no cycles)
|
|
|
|
|
* - Runs fetchContext() in topological order during refresh
|
|
|
|
|
* - Runs fetchItems() on all sources with accumulated context
|
|
|
|
|
* - Subscribes to reactive updates via onContextUpdate/onItemsUpdate
|
|
|
|
|
*
|
|
|
|
|
* @example
|
|
|
|
|
* ```ts
|
|
|
|
|
* const engine = new FeedEngine()
|
|
|
|
|
* .register(locationSource)
|
|
|
|
|
* .register(weatherSource)
|
|
|
|
|
* .register(alertSource)
|
|
|
|
|
*
|
|
|
|
|
* // Pull-based refresh
|
|
|
|
|
* const { context, items, errors } = await engine.refresh()
|
|
|
|
|
*
|
|
|
|
|
* // Reactive updates
|
|
|
|
|
* engine.subscribe((result) => {
|
|
|
|
|
* console.log(result.items)
|
|
|
|
|
* })
|
|
|
|
|
* engine.start()
|
|
|
|
|
*
|
|
|
|
|
* // Cleanup
|
|
|
|
|
* engine.stop()
|
|
|
|
|
* ```
|
|
|
|
|
*/
|
|
|
|
|
export class FeedEngine<TItems extends FeedItem = FeedItem> {
|
|
|
|
|
private sources = new Map<string, FeedSource>()
|
|
|
|
|
private graph: SourceGraph | null = null
|
2026-03-01 22:52:41 +00:00
|
|
|
private context: Context = new Context()
|
2026-01-24 22:42:00 +00:00
|
|
|
private subscribers = new Set<FeedSubscriber<TItems>>()
|
|
|
|
|
private cleanups: Array<() => void> = []
|
|
|
|
|
private started = false
|
2026-02-28 15:57:01 +00:00
|
|
|
private postProcessors: FeedPostProcessor[] = []
|
2026-01-24 22:42:00 +00:00
|
|
|
|
2026-02-24 01:13:41 +00:00
|
|
|
private readonly cacheTtlMs: number
|
|
|
|
|
private cachedResult: FeedResult<TItems> | null = null
|
|
|
|
|
private cachedAt: number | null = null
|
|
|
|
|
private refreshTimer: ReturnType<typeof setTimeout> | null = null
|
|
|
|
|
|
|
|
|
|
constructor(config?: FeedEngineConfig) {
|
|
|
|
|
this.cacheTtlMs = Math.max(config?.cacheTtlMs ?? DEFAULT_CACHE_TTL_MS, MIN_CACHE_TTL_MS)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Returns the cached FeedResult if available and not expired.
|
|
|
|
|
* Returns null if no refresh has completed or the cache TTL has elapsed.
|
|
|
|
|
*/
|
|
|
|
|
lastFeed(): FeedResult<TItems> | null {
|
|
|
|
|
if (this.cachedResult === null || this.cachedAt === null) {
|
|
|
|
|
return null
|
|
|
|
|
}
|
|
|
|
|
if (Date.now() - this.cachedAt > this.cacheTtlMs) {
|
|
|
|
|
return null
|
|
|
|
|
}
|
|
|
|
|
return this.cachedResult
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-24 22:42:00 +00:00
|
|
|
/**
|
|
|
|
|
* Registers a FeedSource. Invalidates the cached graph.
|
|
|
|
|
*/
|
|
|
|
|
register<TItem extends FeedItem>(source: FeedSource<TItem>): FeedEngine<TItems | TItem> {
|
|
|
|
|
this.sources.set(source.id, source)
|
|
|
|
|
this.graph = null
|
|
|
|
|
return this as FeedEngine<TItems | TItem>
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Unregisters a FeedSource by ID. Invalidates the cached graph.
|
|
|
|
|
*/
|
|
|
|
|
unregister(sourceId: string): this {
|
|
|
|
|
this.sources.delete(sourceId)
|
|
|
|
|
this.graph = null
|
|
|
|
|
return this
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-28 15:57:01 +00:00
|
|
|
/**
|
|
|
|
|
* Registers a post-processor. Processors run in registration order
|
|
|
|
|
* after items are collected, on every update path.
|
|
|
|
|
*/
|
|
|
|
|
registerPostProcessor(processor: FeedPostProcessor): this {
|
|
|
|
|
this.postProcessors.push(processor)
|
|
|
|
|
return this
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Unregisters a post-processor by reference.
|
|
|
|
|
*/
|
|
|
|
|
unregisterPostProcessor(processor: FeedPostProcessor): this {
|
|
|
|
|
this.postProcessors = this.postProcessors.filter((p) => p !== processor)
|
|
|
|
|
return this
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-24 22:42:00 +00:00
|
|
|
/**
|
|
|
|
|
* Refreshes the feed by running all sources in dependency order.
|
|
|
|
|
* Calls fetchContext() then fetchItems() on each source.
|
|
|
|
|
*/
|
|
|
|
|
async refresh(): Promise<FeedResult<TItems>> {
|
|
|
|
|
const graph = this.ensureGraph()
|
|
|
|
|
const errors: SourceError[] = []
|
|
|
|
|
|
|
|
|
|
// Reset context with fresh time
|
2026-03-01 22:52:41 +00:00
|
|
|
const context = new Context()
|
2026-01-24 22:42:00 +00:00
|
|
|
|
|
|
|
|
// Run fetchContext in topological order
|
|
|
|
|
for (const source of graph.sorted) {
|
2026-02-14 16:20:24 +00:00
|
|
|
try {
|
2026-03-01 22:52:41 +00:00
|
|
|
const entries = await source.fetchContext(context)
|
|
|
|
|
if (entries) {
|
|
|
|
|
context.set(entries)
|
2026-01-24 22:42:00 +00:00
|
|
|
}
|
2026-02-14 16:20:24 +00:00
|
|
|
} catch (err) {
|
|
|
|
|
errors.push({
|
|
|
|
|
sourceId: source.id,
|
|
|
|
|
error: err instanceof Error ? err : new Error(String(err)),
|
|
|
|
|
})
|
2026-01-24 22:42:00 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Run fetchItems on all sources
|
|
|
|
|
const items: FeedItem[] = []
|
|
|
|
|
for (const source of graph.sorted) {
|
|
|
|
|
if (source.fetchItems) {
|
|
|
|
|
try {
|
|
|
|
|
const sourceItems = await source.fetchItems(context)
|
|
|
|
|
items.push(...sourceItems)
|
|
|
|
|
} catch (err) {
|
|
|
|
|
errors.push({
|
|
|
|
|
sourceId: source.id,
|
|
|
|
|
error: err instanceof Error ? err : new Error(String(err)),
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
this.context = context
|
|
|
|
|
|
2026-02-28 15:57:01 +00:00
|
|
|
const {
|
|
|
|
|
items: processedItems,
|
|
|
|
|
groupedItems,
|
|
|
|
|
errors: postProcessorErrors,
|
2026-03-01 17:10:55 +00:00
|
|
|
} = await this.applyPostProcessors(items as TItems[], context, errors)
|
2026-02-28 15:57:01 +00:00
|
|
|
|
|
|
|
|
const result: FeedResult<TItems> = {
|
|
|
|
|
context,
|
|
|
|
|
items: processedItems,
|
|
|
|
|
errors: postProcessorErrors,
|
|
|
|
|
...(groupedItems.length > 0 ? { groupedItems } : {}),
|
|
|
|
|
}
|
2026-02-24 01:13:41 +00:00
|
|
|
this.updateCache(result)
|
|
|
|
|
|
|
|
|
|
return result
|
2026-01-24 22:42:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Subscribes to feed updates. Returns unsubscribe function.
|
|
|
|
|
*/
|
|
|
|
|
subscribe(callback: FeedSubscriber<TItems>): () => void {
|
|
|
|
|
this.subscribers.add(callback)
|
|
|
|
|
return () => {
|
|
|
|
|
this.subscribers.delete(callback)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2026-02-24 01:13:41 +00:00
|
|
|
* Starts reactive subscriptions on all sources and begins periodic refresh.
|
2026-01-24 22:42:00 +00:00
|
|
|
* Sources with onContextUpdate will trigger re-computation of dependents.
|
|
|
|
|
*/
|
|
|
|
|
start(): void {
|
|
|
|
|
if (this.started) return
|
|
|
|
|
|
|
|
|
|
this.started = true
|
|
|
|
|
const graph = this.ensureGraph()
|
|
|
|
|
|
|
|
|
|
for (const source of graph.sorted) {
|
|
|
|
|
if (source.onContextUpdate) {
|
|
|
|
|
const cleanup = source.onContextUpdate(
|
2026-03-01 22:52:41 +00:00
|
|
|
(entries) => {
|
|
|
|
|
this.handleContextUpdate(source.id, entries)
|
2026-01-24 22:42:00 +00:00
|
|
|
},
|
|
|
|
|
() => this.context,
|
|
|
|
|
)
|
|
|
|
|
this.cleanups.push(cleanup)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (source.onItemsUpdate) {
|
|
|
|
|
const cleanup = source.onItemsUpdate(
|
|
|
|
|
() => {
|
|
|
|
|
this.scheduleRefresh()
|
|
|
|
|
},
|
|
|
|
|
() => this.context,
|
|
|
|
|
)
|
|
|
|
|
this.cleanups.push(cleanup)
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-02-24 01:13:41 +00:00
|
|
|
|
|
|
|
|
this.scheduleNextRefresh()
|
2026-01-24 22:42:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2026-02-24 01:13:41 +00:00
|
|
|
* Stops all reactive subscriptions and the periodic refresh timer.
|
2026-01-24 22:42:00 +00:00
|
|
|
*/
|
|
|
|
|
stop(): void {
|
|
|
|
|
this.started = false
|
2026-02-24 01:13:41 +00:00
|
|
|
this.cancelScheduledRefresh()
|
2026-01-24 22:42:00 +00:00
|
|
|
for (const cleanup of this.cleanups) {
|
|
|
|
|
cleanup()
|
|
|
|
|
}
|
|
|
|
|
this.cleanups = []
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Returns the current accumulated context.
|
|
|
|
|
*/
|
|
|
|
|
currentContext(): Context {
|
|
|
|
|
return this.context
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-15 12:26:23 +00:00
|
|
|
/**
|
|
|
|
|
* Execute an action on a registered source.
|
|
|
|
|
* Validates the action exists before dispatching.
|
|
|
|
|
*
|
|
|
|
|
* In pull-only mode (before `start()` is called), the action mutates source
|
|
|
|
|
* state but does not automatically refresh dependents. Call `refresh()`
|
|
|
|
|
* after to propagate changes. In reactive mode (`start()` called), sources
|
|
|
|
|
* that push context updates (e.g., LocationSource) will trigger dependent
|
|
|
|
|
* refresh automatically.
|
|
|
|
|
*/
|
|
|
|
|
async executeAction(sourceId: string, actionId: string, params: unknown): Promise<unknown> {
|
|
|
|
|
const actions = await this.listActions(sourceId)
|
|
|
|
|
if (!(actionId in actions)) {
|
|
|
|
|
throw new Error(`Action "${actionId}" not found on source "${sourceId}"`)
|
|
|
|
|
}
|
|
|
|
|
return this.sources.get(sourceId)!.executeAction(actionId, params)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* List actions available on a specific source.
|
|
|
|
|
* Validates that action definition IDs match their record keys.
|
|
|
|
|
*/
|
|
|
|
|
async listActions(sourceId: string): Promise<Record<string, ActionDefinition>> {
|
|
|
|
|
const source = this.sources.get(sourceId)
|
|
|
|
|
if (!source) {
|
|
|
|
|
throw new Error(`Source not found: ${sourceId}`)
|
|
|
|
|
}
|
|
|
|
|
const actions = await source.listActions()
|
|
|
|
|
for (const [key, definition] of Object.entries(actions)) {
|
|
|
|
|
if (key !== definition.id) {
|
|
|
|
|
throw new Error(
|
|
|
|
|
`Action ID mismatch on source "${sourceId}": key "${key}" !== definition.id "${definition.id}"`,
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return actions
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-28 15:57:01 +00:00
|
|
|
private async applyPostProcessors(
|
|
|
|
|
items: TItems[],
|
2026-03-01 17:10:55 +00:00
|
|
|
context: Context,
|
2026-02-28 15:57:01 +00:00
|
|
|
errors: SourceError[],
|
|
|
|
|
): Promise<{ items: TItems[]; groupedItems: ItemGroup[]; errors: SourceError[] }> {
|
|
|
|
|
let currentItems = items
|
|
|
|
|
const allGroupedItems: ItemGroup[] = []
|
|
|
|
|
const allErrors = [...errors]
|
2026-03-01 17:26:25 +00:00
|
|
|
const boostScores = new Map<string, number>()
|
2026-02-28 15:57:01 +00:00
|
|
|
|
|
|
|
|
for (const processor of this.postProcessors) {
|
|
|
|
|
const snapshot = currentItems
|
|
|
|
|
try {
|
2026-03-01 17:10:55 +00:00
|
|
|
const enhancement = await processor(currentItems, context)
|
2026-02-28 15:57:01 +00:00
|
|
|
|
|
|
|
|
if (enhancement.additionalItems?.length) {
|
|
|
|
|
// Post-processors operate on FeedItem[] without knowledge of TItems.
|
|
|
|
|
// Additional items are merged untyped — this is intentional. The
|
|
|
|
|
// processor contract is "FeedItem in, FeedItem out"; type narrowing
|
|
|
|
|
// is the caller's responsibility when consuming FeedResult.
|
|
|
|
|
currentItems = [...currentItems, ...(enhancement.additionalItems as TItems[])]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (enhancement.suppress?.length) {
|
|
|
|
|
const suppressSet = new Set(enhancement.suppress)
|
|
|
|
|
currentItems = currentItems.filter((item) => !suppressSet.has(item.id))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (enhancement.groupedItems?.length) {
|
|
|
|
|
allGroupedItems.push(...enhancement.groupedItems)
|
|
|
|
|
}
|
2026-03-01 17:26:25 +00:00
|
|
|
|
|
|
|
|
if (enhancement.boost) {
|
|
|
|
|
for (const [id, score] of Object.entries(enhancement.boost)) {
|
|
|
|
|
boostScores.set(id, (boostScores.get(id) ?? 0) + score)
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-02-28 15:57:01 +00:00
|
|
|
} catch (err) {
|
|
|
|
|
const sourceId = processor.name || "anonymous"
|
|
|
|
|
allErrors.push({
|
|
|
|
|
sourceId,
|
|
|
|
|
error: err instanceof Error ? err : new Error(String(err)),
|
|
|
|
|
})
|
|
|
|
|
currentItems = snapshot
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-01 17:26:25 +00:00
|
|
|
// Apply boost reordering: positive-boost first (desc), then zero, then negative (desc).
|
|
|
|
|
// Stable sort within each tier preserves original relative order.
|
|
|
|
|
if (boostScores.size > 0) {
|
|
|
|
|
currentItems = applyBoostOrder(currentItems, boostScores)
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-28 15:57:01 +00:00
|
|
|
// Remove stale item IDs from groups and drop empty groups
|
|
|
|
|
const itemIds = new Set(currentItems.map((item) => item.id))
|
|
|
|
|
const validGroups = allGroupedItems.reduce<ItemGroup[]>((acc, group) => {
|
|
|
|
|
const ids = group.itemIds.filter((id) => itemIds.has(id))
|
|
|
|
|
if (ids.length > 0) {
|
|
|
|
|
acc.push({ ...group, itemIds: ids })
|
|
|
|
|
}
|
|
|
|
|
return acc
|
|
|
|
|
}, [])
|
|
|
|
|
|
|
|
|
|
return { items: currentItems, groupedItems: validGroups, errors: allErrors }
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-24 22:42:00 +00:00
|
|
|
private ensureGraph(): SourceGraph {
|
|
|
|
|
if (!this.graph) {
|
|
|
|
|
this.graph = buildGraph(Array.from(this.sources.values()))
|
|
|
|
|
}
|
|
|
|
|
return this.graph
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-01 22:52:41 +00:00
|
|
|
private handleContextUpdate(sourceId: string, entries: readonly ContextEntry[]): void {
|
|
|
|
|
this.context.time = new Date()
|
|
|
|
|
this.context.set(entries)
|
2026-01-24 22:42:00 +00:00
|
|
|
|
|
|
|
|
// Re-run dependents and notify
|
|
|
|
|
this.refreshDependents(sourceId)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private async refreshDependents(sourceId: string): Promise<void> {
|
|
|
|
|
const graph = this.ensureGraph()
|
|
|
|
|
const toRefresh = this.collectDependents(sourceId, graph)
|
|
|
|
|
|
|
|
|
|
// Re-run fetchContext for dependents in order
|
|
|
|
|
for (const id of toRefresh) {
|
|
|
|
|
const source = graph.sources.get(id)
|
2026-02-14 16:20:24 +00:00
|
|
|
if (source) {
|
2026-01-24 22:42:00 +00:00
|
|
|
try {
|
2026-03-01 22:52:41 +00:00
|
|
|
const entries = await source.fetchContext(this.context)
|
|
|
|
|
if (entries) {
|
|
|
|
|
this.context.set(entries)
|
2026-02-14 16:20:24 +00:00
|
|
|
}
|
2026-01-24 22:42:00 +00:00
|
|
|
} catch {
|
|
|
|
|
// Errors during reactive updates are logged but don't stop propagation
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Collect items from all sources
|
|
|
|
|
const items: FeedItem[] = []
|
|
|
|
|
const errors: SourceError[] = []
|
|
|
|
|
|
|
|
|
|
for (const source of graph.sorted) {
|
|
|
|
|
if (source.fetchItems) {
|
|
|
|
|
try {
|
|
|
|
|
const sourceItems = await source.fetchItems(this.context)
|
|
|
|
|
items.push(...sourceItems)
|
|
|
|
|
} catch (err) {
|
|
|
|
|
errors.push({
|
|
|
|
|
sourceId: source.id,
|
|
|
|
|
error: err instanceof Error ? err : new Error(String(err)),
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-28 15:57:01 +00:00
|
|
|
const {
|
|
|
|
|
items: processedItems,
|
|
|
|
|
groupedItems,
|
|
|
|
|
errors: postProcessorErrors,
|
2026-03-01 17:10:55 +00:00
|
|
|
} = await this.applyPostProcessors(items as TItems[], this.context, errors)
|
2026-02-28 15:57:01 +00:00
|
|
|
|
2026-02-24 01:13:41 +00:00
|
|
|
const result: FeedResult<TItems> = {
|
2026-02-15 12:26:23 +00:00
|
|
|
context: this.context,
|
2026-02-28 15:57:01 +00:00
|
|
|
items: processedItems,
|
|
|
|
|
errors: postProcessorErrors,
|
|
|
|
|
...(groupedItems.length > 0 ? { groupedItems } : {}),
|
2026-02-24 01:13:41 +00:00
|
|
|
}
|
|
|
|
|
this.updateCache(result)
|
|
|
|
|
|
|
|
|
|
this.notifySubscribers(result)
|
2026-01-24 22:42:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private collectDependents(sourceId: string, graph: SourceGraph): string[] {
|
|
|
|
|
const result: string[] = []
|
|
|
|
|
const visited = new Set<string>()
|
|
|
|
|
|
|
|
|
|
const collect = (id: string): void => {
|
|
|
|
|
const deps = graph.dependents.get(id) ?? []
|
|
|
|
|
for (const dep of deps) {
|
|
|
|
|
if (!visited.has(dep)) {
|
|
|
|
|
visited.add(dep)
|
|
|
|
|
result.push(dep)
|
|
|
|
|
collect(dep)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
collect(sourceId)
|
|
|
|
|
|
|
|
|
|
// Return in topological order
|
|
|
|
|
return graph.sorted.filter((s) => result.includes(s.id)).map((s) => s.id)
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-24 01:13:41 +00:00
|
|
|
private updateCache(result: FeedResult<TItems>): void {
|
|
|
|
|
this.cachedResult = result
|
|
|
|
|
this.cachedAt = Date.now()
|
|
|
|
|
if (this.started) {
|
|
|
|
|
this.scheduleNextRefresh()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private scheduleNextRefresh(): void {
|
|
|
|
|
this.cancelScheduledRefresh()
|
|
|
|
|
this.refreshTimer = setTimeout(() => {
|
|
|
|
|
this.refresh()
|
|
|
|
|
.then((result) => {
|
|
|
|
|
this.notifySubscribers(result)
|
|
|
|
|
})
|
|
|
|
|
.catch(() => {
|
|
|
|
|
// Periodic refresh errors are non-fatal; schedule next attempt
|
|
|
|
|
if (this.started) {
|
|
|
|
|
this.scheduleNextRefresh()
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
}, this.cacheTtlMs)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private cancelScheduledRefresh(): void {
|
|
|
|
|
if (this.refreshTimer !== null) {
|
|
|
|
|
clearTimeout(this.refreshTimer)
|
|
|
|
|
this.refreshTimer = null
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-24 22:42:00 +00:00
|
|
|
private scheduleRefresh(): void {
|
|
|
|
|
// Simple immediate refresh for now - could add debouncing later
|
2026-02-24 01:13:41 +00:00
|
|
|
this.refresh()
|
|
|
|
|
.then((result) => {
|
|
|
|
|
this.notifySubscribers(result)
|
|
|
|
|
})
|
|
|
|
|
.catch(() => {
|
|
|
|
|
// Reactive refresh errors are non-fatal
|
|
|
|
|
})
|
2026-01-24 22:42:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private notifySubscribers(result: FeedResult<TItems>): void {
|
|
|
|
|
this.subscribers.forEach((callback) => {
|
|
|
|
|
try {
|
|
|
|
|
callback(result)
|
|
|
|
|
} catch {
|
|
|
|
|
// Subscriber errors shouldn't break other subscribers
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-01 17:26:25 +00:00
|
|
|
function clamp(value: number, min: number, max: number): number {
|
|
|
|
|
return Math.min(max, Math.max(min, value))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function applyBoostOrder<T extends FeedItem>(items: T[], boostScores: Map<string, number>): T[] {
|
|
|
|
|
const positive: T[] = []
|
|
|
|
|
const neutral: T[] = []
|
|
|
|
|
const negative: T[] = []
|
|
|
|
|
|
|
|
|
|
for (const item of items) {
|
|
|
|
|
const raw = boostScores.get(item.id)
|
|
|
|
|
if (raw === undefined || raw === 0) {
|
|
|
|
|
neutral.push(item)
|
|
|
|
|
} else {
|
|
|
|
|
const clamped = clamp(raw, -1, 1)
|
|
|
|
|
if (clamped > 0) {
|
|
|
|
|
positive.push(item)
|
|
|
|
|
} else if (clamped < 0) {
|
|
|
|
|
negative.push(item)
|
|
|
|
|
} else {
|
|
|
|
|
neutral.push(item)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Sort positive descending by boost, negative descending (least negative first, most negative last)
|
|
|
|
|
positive.sort((a, b) => {
|
|
|
|
|
const aScore = clamp(boostScores.get(a.id) ?? 0, -1, 1)
|
|
|
|
|
const bScore = clamp(boostScores.get(b.id) ?? 0, -1, 1)
|
|
|
|
|
return bScore - aScore
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
negative.sort((a, b) => {
|
|
|
|
|
const aScore = clamp(boostScores.get(a.id) ?? 0, -1, 1)
|
|
|
|
|
const bScore = clamp(boostScores.get(b.id) ?? 0, -1, 1)
|
|
|
|
|
return bScore - aScore
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
return [...positive, ...neutral, ...negative]
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-24 22:42:00 +00:00
|
|
|
function buildGraph(sources: FeedSource[]): SourceGraph {
|
|
|
|
|
const byId = new Map<string, FeedSource>()
|
|
|
|
|
for (const source of sources) {
|
|
|
|
|
byId.set(source.id, source)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Validate dependencies exist
|
|
|
|
|
for (const source of sources) {
|
|
|
|
|
for (const dep of source.dependencies ?? []) {
|
|
|
|
|
if (!byId.has(dep)) {
|
|
|
|
|
throw new Error(`Source "${source.id}" depends on "${dep}" which is not registered`)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check for cycles and topologically sort
|
|
|
|
|
const visited = new Set<string>()
|
|
|
|
|
const visiting = new Set<string>()
|
|
|
|
|
const sorted: FeedSource[] = []
|
|
|
|
|
|
|
|
|
|
function visit(id: string, path: string[]): void {
|
|
|
|
|
if (visiting.has(id)) {
|
|
|
|
|
const cycle = [...path.slice(path.indexOf(id)), id].join(" → ")
|
|
|
|
|
throw new Error(`Circular dependency detected: ${cycle}`)
|
|
|
|
|
}
|
|
|
|
|
if (visited.has(id)) return
|
|
|
|
|
|
|
|
|
|
visiting.add(id)
|
|
|
|
|
const source = byId.get(id)!
|
|
|
|
|
for (const dep of source.dependencies ?? []) {
|
|
|
|
|
visit(dep, [...path, id])
|
|
|
|
|
}
|
|
|
|
|
visiting.delete(id)
|
|
|
|
|
visited.add(id)
|
|
|
|
|
sorted.push(source)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (const source of sources) {
|
|
|
|
|
visit(source.id, [])
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Build reverse dependency map
|
|
|
|
|
const dependents = new Map<string, string[]>()
|
|
|
|
|
for (const source of sources) {
|
|
|
|
|
for (const dep of source.dependencies ?? []) {
|
|
|
|
|
const list = dependents.get(dep) ?? []
|
|
|
|
|
list.push(source.id)
|
|
|
|
|
dependents.set(dep, list)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return { sources: byId, sorted, dependents }
|
|
|
|
|
}
|