Compare commits

..

1 Commits

Author SHA1 Message Date
851e6af951 feat: add generic CalDAV calendar data source
Add @aris/source-caldav package that fetches calendar events from any
CalDAV server via tsdav + ical.js.

- Supports Basic auth and OAuth via explicit authMethod discriminant
- serverUrl provided at construction time, not hardcoded
- Optional timeZone for correct local day boundaries
- Credentials cleared from memory after client login
- Failed calendar fetches logged, not silently dropped
- Login promise cached with retry on failure

Co-authored-by: Ona <no-reply@ona.com>
2026-02-28 15:35:22 +00:00
7 changed files with 16 additions and 669 deletions

View File

@@ -1,43 +0,0 @@
---
name: gpg-commit-signing
description: Sign git commits with GPG in non-interactive environments. Use when committing code and the `GPG_PRIVATE_KEY_PASSPHRASE` environment variable is available. Triggers on "commit", "sign commit", "GPG", "git commit -S", or any git operation requiring signed commits.
---
# GPG Commit Signing
Sign commits in headless/non-interactive environments where `/dev/tty` is unavailable.
## Workflow
1. Check whether `GPG_PRIVATE_KEY_PASSPHRASE` is set:
```bash
test -n "$GPG_PRIVATE_KEY_PASSPHRASE" && echo "available" || echo "not set"
```
If not set, skip signing — commit without `-S`.
2. Try a direct signed commit first — the environment may already have loopback pinentry configured:
```bash
git commit -S -m "message"
```
If this succeeds, no further steps are needed.
3. If step 2 fails with a `/dev/tty` error, use `--pinentry-mode loopback` via a wrapper script:
```bash
printf '#!/bin/sh\ngpg --batch --pinentry-mode loopback --passphrase "$GPG_PRIVATE_KEY_PASSPHRASE" "$@"\n' > /tmp/gpg-sign.sh
chmod +x /tmp/gpg-sign.sh
git -c gpg.program=/tmp/gpg-sign.sh commit -S -m "message"
rm /tmp/gpg-sign.sh
```
This passes the passphrase directly to gpg on each signing invocation, bypassing the need for a configured gpg-agent.
## Anti-patterns
- Do not echo or log `GPG_PRIVATE_KEY_PASSPHRASE`.
- Do not commit without `-S` when the passphrase is available — the project expects signed commits.
- Do not leave wrapper scripts on disk after committing.

View File

@@ -1,7 +1,6 @@
import type { ActionDefinition } from "./action"
import type { Context } from "./context"
import type { FeedItem } from "./feed"
import type { FeedPostProcessor, ItemGroup } from "./feed-post-processor"
import type { FeedSource } from "./feed-source"
export interface SourceError {
@@ -13,8 +12,6 @@ export interface FeedResult<TItem extends FeedItem = FeedItem> {
context: Context
items: TItem[]
errors: SourceError[]
/** Item groups produced by post-processors */
groupedItems?: ItemGroup[]
}
export type FeedSubscriber<TItem extends FeedItem = FeedItem> = (result: FeedResult<TItem>) => void
@@ -69,7 +66,6 @@ export class FeedEngine<TItems extends FeedItem = FeedItem> {
private subscribers = new Set<FeedSubscriber<TItems>>()
private cleanups: Array<() => void> = []
private started = false
private postProcessors: FeedPostProcessor[] = []
private readonly cacheTtlMs: number
private cachedResult: FeedResult<TItems> | null = null
@@ -112,23 +108,6 @@ export class FeedEngine<TItems extends FeedItem = FeedItem> {
return this
}
/**
* Registers a post-processor. Processors run in registration order
* after items are collected, on every update path.
*/
registerPostProcessor(processor: FeedPostProcessor): this {
this.postProcessors.push(processor)
return this
}
/**
* Unregisters a post-processor by reference.
*/
unregisterPostProcessor(processor: FeedPostProcessor): this {
this.postProcessors = this.postProcessors.filter((p) => p !== processor)
return this
}
/**
* Refreshes the feed by running all sources in dependency order.
* Calls fetchContext() then fetchItems() on each source.
@@ -173,18 +152,7 @@ export class FeedEngine<TItems extends FeedItem = FeedItem> {
this.context = context
const {
items: processedItems,
groupedItems,
errors: postProcessorErrors,
} = await this.applyPostProcessors(items as TItems[], errors)
const result: FeedResult<TItems> = {
context,
items: processedItems,
errors: postProcessorErrors,
...(groupedItems.length > 0 ? { groupedItems } : {}),
}
const result: FeedResult<TItems> = { context, items: items as TItems[], errors }
this.updateCache(result)
return result
@@ -292,58 +260,6 @@ export class FeedEngine<TItems extends FeedItem = FeedItem> {
return actions
}
private async applyPostProcessors(
items: TItems[],
errors: SourceError[],
): Promise<{ items: TItems[]; groupedItems: ItemGroup[]; errors: SourceError[] }> {
let currentItems = items
const allGroupedItems: ItemGroup[] = []
const allErrors = [...errors]
for (const processor of this.postProcessors) {
const snapshot = currentItems
try {
const enhancement = await processor(currentItems)
if (enhancement.additionalItems?.length) {
// Post-processors operate on FeedItem[] without knowledge of TItems.
// Additional items are merged untyped — this is intentional. The
// processor contract is "FeedItem in, FeedItem out"; type narrowing
// is the caller's responsibility when consuming FeedResult.
currentItems = [...currentItems, ...(enhancement.additionalItems as TItems[])]
}
if (enhancement.suppress?.length) {
const suppressSet = new Set(enhancement.suppress)
currentItems = currentItems.filter((item) => !suppressSet.has(item.id))
}
if (enhancement.groupedItems?.length) {
allGroupedItems.push(...enhancement.groupedItems)
}
} catch (err) {
const sourceId = processor.name || "anonymous"
allErrors.push({
sourceId,
error: err instanceof Error ? err : new Error(String(err)),
})
currentItems = snapshot
}
}
// Remove stale item IDs from groups and drop empty groups
const itemIds = new Set(currentItems.map((item) => item.id))
const validGroups = allGroupedItems.reduce<ItemGroup[]>((acc, group) => {
const ids = group.itemIds.filter((id) => itemIds.has(id))
if (ids.length > 0) {
acc.push({ ...group, itemIds: ids })
}
return acc
}, [])
return { items: currentItems, groupedItems: validGroups, errors: allErrors }
}
private ensureGraph(): SourceGraph {
if (!this.graph) {
this.graph = buildGraph(Array.from(this.sources.values()))
@@ -395,17 +311,10 @@ export class FeedEngine<TItems extends FeedItem = FeedItem> {
}
}
const {
items: processedItems,
groupedItems,
errors: postProcessorErrors,
} = await this.applyPostProcessors(items as TItems[], errors)
const result: FeedResult<TItems> = {
context: this.context,
items: processedItems,
errors: postProcessorErrors,
...(groupedItems.length > 0 ? { groupedItems } : {}),
items: items as TItems[],
errors,
}
this.updateCache(result)

View File

@@ -1,443 +0,0 @@
import { describe, expect, mock, test } from "bun:test"
import type { ActionDefinition, FeedItem, FeedPostProcessor, FeedSource } from "./index"
import { FeedEngine } from "./feed-engine"
import { UnknownActionError } from "./index"
// No-op action methods for test sources
const noActions = {
async listActions(): Promise<Record<string, ActionDefinition>> {
return {}
},
async executeAction(actionId: string): Promise<void> {
throw new UnknownActionError(actionId)
},
}
// =============================================================================
// FEED ITEMS
// =============================================================================
type WeatherItem = FeedItem<"weather", { temp: number }>
type CalendarItem = FeedItem<"calendar", { title: string }>
function weatherItem(id: string, temp: number): WeatherItem {
return { id, type: "weather", timestamp: new Date(), data: { temp } }
}
function calendarItem(id: string, title: string): CalendarItem {
return { id, type: "calendar", timestamp: new Date(), data: { title } }
}
// =============================================================================
// TEST SOURCES
// =============================================================================
function createWeatherSource(items: WeatherItem[]) {
return {
id: "aris.weather",
...noActions,
async fetchContext() {
return null
},
async fetchItems(): Promise<WeatherItem[]> {
return items
},
}
}
function createCalendarSource(items: CalendarItem[]) {
return {
id: "aris.calendar",
...noActions,
async fetchContext() {
return null
},
async fetchItems(): Promise<CalendarItem[]> {
return items
},
}
}
// =============================================================================
// REGISTRATION
// =============================================================================
describe("FeedPostProcessor", () => {
describe("registration", () => {
test("registerPostProcessor is chainable", () => {
const engine = new FeedEngine()
const processor: FeedPostProcessor = async () => ({})
const result = engine.registerPostProcessor(processor)
expect(result).toBe(engine)
})
test("unregisterPostProcessor is chainable", () => {
const engine = new FeedEngine()
const processor: FeedPostProcessor = async () => ({})
const result = engine.unregisterPostProcessor(processor)
expect(result).toBe(engine)
})
test("unregistered processor does not run", async () => {
const processor = mock(async () => ({}))
const engine = new FeedEngine()
.register(createWeatherSource([weatherItem("w1", 20)]))
.registerPostProcessor(processor)
.unregisterPostProcessor(processor)
await engine.refresh()
expect(processor).not.toHaveBeenCalled()
})
})
// =============================================================================
// ADDITIONAL ITEMS
// =============================================================================
describe("additionalItems", () => {
test("injects additional items into the feed", async () => {
const extra = calendarItem("c1", "Meeting")
const engine = new FeedEngine()
.register(createWeatherSource([weatherItem("w1", 20)]))
.registerPostProcessor(async () => ({ additionalItems: [extra] }))
const result = await engine.refresh()
expect(result.items).toHaveLength(2)
expect(result.items.find((i) => i.id === "c1")).toBeDefined()
})
})
// =============================================================================
// SUPPRESS
// =============================================================================
describe("suppress", () => {
test("removes suppressed items from the feed", async () => {
const engine = new FeedEngine()
.register(createWeatherSource([weatherItem("w1", 20), weatherItem("w2", 25)]))
.registerPostProcessor(async () => ({ suppress: ["w1"] }))
const result = await engine.refresh()
expect(result.items).toHaveLength(1)
expect(result.items[0].id).toBe("w2")
})
test("suppressing nonexistent ID is a no-op", async () => {
const engine = new FeedEngine()
.register(createWeatherSource([weatherItem("w1", 20)]))
.registerPostProcessor(async () => ({ suppress: ["nonexistent"] }))
const result = await engine.refresh()
expect(result.items).toHaveLength(1)
})
})
// =============================================================================
// GROUPED ITEMS
// =============================================================================
describe("groupedItems", () => {
test("accumulates grouped items on FeedResult", async () => {
const engine = new FeedEngine()
.register(
createCalendarSource([calendarItem("c1", "Meeting A"), calendarItem("c2", "Meeting B")]),
)
.registerPostProcessor(async () => ({
groupedItems: [{ itemIds: ["c1", "c2"], summary: "Busy afternoon" }],
}))
const result = await engine.refresh()
expect(result.groupedItems).toEqual([{ itemIds: ["c1", "c2"], summary: "Busy afternoon" }])
})
test("multiple processors accumulate groups", async () => {
const engine = new FeedEngine()
.register(
createCalendarSource([calendarItem("c1", "Meeting A"), calendarItem("c2", "Meeting B")]),
)
.registerPostProcessor(async () => ({
groupedItems: [{ itemIds: ["c1"], summary: "Group A" }],
}))
.registerPostProcessor(async () => ({
groupedItems: [{ itemIds: ["c2"], summary: "Group B" }],
}))
const result = await engine.refresh()
expect(result.groupedItems).toEqual([
{ itemIds: ["c1"], summary: "Group A" },
{ itemIds: ["c2"], summary: "Group B" },
])
})
test("stale item IDs are removed from groups after suppression", async () => {
const engine = new FeedEngine()
.register(
createCalendarSource([calendarItem("c1", "Meeting A"), calendarItem("c2", "Meeting B")]),
)
.registerPostProcessor(async () => ({
groupedItems: [{ itemIds: ["c1", "c2"], summary: "Afternoon" }],
}))
.registerPostProcessor(async () => ({ suppress: ["c1"] }))
const result = await engine.refresh()
expect(result.groupedItems).toEqual([{ itemIds: ["c2"], summary: "Afternoon" }])
})
test("groups with all items suppressed are dropped", async () => {
const engine = new FeedEngine()
.register(createCalendarSource([calendarItem("c1", "Meeting A")]))
.registerPostProcessor(async () => ({
groupedItems: [{ itemIds: ["c1"], summary: "Solo" }],
}))
.registerPostProcessor(async () => ({ suppress: ["c1"] }))
const result = await engine.refresh()
expect(result.groupedItems).toBeUndefined()
})
test("groupedItems is omitted when no processors produce groups", async () => {
const engine = new FeedEngine()
.register(createWeatherSource([weatherItem("w1", 20)]))
.registerPostProcessor(async () => ({}))
const result = await engine.refresh()
expect(result.groupedItems).toBeUndefined()
})
})
// =============================================================================
// PIPELINE ORDERING
// =============================================================================
describe("pipeline ordering", () => {
test("each processor sees items as modified by the previous processor", async () => {
const seen: string[] = []
const engine = new FeedEngine()
.register(createWeatherSource([weatherItem("w1", 20)]))
.registerPostProcessor(async () => ({
additionalItems: [calendarItem("c1", "Injected")],
}))
.registerPostProcessor(async (items) => {
seen.push(...items.map((i) => i.id))
return {}
})
await engine.refresh()
expect(seen).toEqual(["w1", "c1"])
})
test("suppression in first processor affects second processor", async () => {
const seen: string[] = []
const engine = new FeedEngine()
.register(createWeatherSource([weatherItem("w1", 20), weatherItem("w2", 25)]))
.registerPostProcessor(async () => ({ suppress: ["w1"] }))
.registerPostProcessor(async (items) => {
seen.push(...items.map((i) => i.id))
return {}
})
await engine.refresh()
expect(seen).toEqual(["w2"])
})
})
// =============================================================================
// ERROR HANDLING
// =============================================================================
describe("error handling", () => {
test("throwing processor is recorded in errors and pipeline continues", async () => {
const seen: string[] = []
async function failingProcessor(): Promise<never> {
throw new Error("processor failed")
}
const engine = new FeedEngine()
.register(createWeatherSource([weatherItem("w1", 20)]))
.registerPostProcessor(failingProcessor)
.registerPostProcessor(async (items) => {
seen.push(...items.map((i) => i.id))
return {}
})
const result = await engine.refresh()
const ppError = result.errors.find((e) => e.sourceId === "failingProcessor")
expect(ppError).toBeDefined()
expect(ppError!.error.message).toBe("processor failed")
// Pipeline continued — observer still saw the original item
expect(seen).toEqual(["w1"])
expect(result.items).toHaveLength(1)
})
test("anonymous throwing processor uses 'anonymous' as sourceId", async () => {
const engine = new FeedEngine()
.register(createWeatherSource([weatherItem("w1", 20)]))
.registerPostProcessor(async () => {
throw new Error("anon failed")
})
const result = await engine.refresh()
const ppError = result.errors.find((e) => e.sourceId === "anonymous")
expect(ppError).toBeDefined()
})
test("non-Error throw is wrapped", async () => {
async function failingProcessor(): Promise<never> {
throw "string error"
}
const engine = new FeedEngine()
.register(createWeatherSource([weatherItem("w1", 20)]))
.registerPostProcessor(failingProcessor)
const result = await engine.refresh()
const ppError = result.errors.find((e) => e.sourceId === "failingProcessor")
expect(ppError).toBeDefined()
expect(ppError!.error).toBeInstanceOf(Error)
})
})
// =============================================================================
// REACTIVE PATHS
// =============================================================================
describe("reactive updates", () => {
test("post-processors run during reactive context updates", async () => {
let callCount = 0
let triggerUpdate: ((update: Record<string, unknown>) => void) | null = null
const source: FeedSource = {
id: "aris.reactive",
...noActions,
async fetchContext() {
return null
},
async fetchItems() {
return [weatherItem("w1", 20)]
},
onContextUpdate(callback, _getContext) {
triggerUpdate = callback
return () => {
triggerUpdate = null
}
},
}
const engine = new FeedEngine()
.register(source)
.registerPostProcessor(async () => {
callCount++
return {}
})
engine.start()
// Wait for initial periodic refresh
await new Promise((resolve) => setTimeout(resolve, 50))
const countAfterStart = callCount
// Trigger a reactive context update
triggerUpdate!({ foo: "bar" })
await new Promise((resolve) => setTimeout(resolve, 50))
expect(callCount).toBeGreaterThan(countAfterStart)
engine.stop()
})
test("post-processors run during reactive item updates", async () => {
let callCount = 0
let triggerItemsUpdate: (() => void) | null = null
const source: FeedSource = {
id: "aris.reactive",
...noActions,
async fetchContext() {
return null
},
async fetchItems() {
return [weatherItem("w1", 20)]
},
onItemsUpdate(callback, _getContext) {
triggerItemsUpdate = callback
return () => {
triggerItemsUpdate = null
}
},
}
const engine = new FeedEngine()
.register(source)
.registerPostProcessor(async () => {
callCount++
return {}
})
engine.start()
await new Promise((resolve) => setTimeout(resolve, 50))
const countAfterStart = callCount
// Trigger a reactive items update
triggerItemsUpdate!()
await new Promise((resolve) => setTimeout(resolve, 50))
expect(callCount).toBeGreaterThan(countAfterStart)
engine.stop()
})
})
// =============================================================================
// NO PROCESSORS = NO CHANGE
// =============================================================================
describe("no processors", () => {
test("engine without post-processors returns raw items unchanged", async () => {
const items = [weatherItem("w1", 20), weatherItem("w2", 25)]
const engine = new FeedEngine().register(createWeatherSource(items))
const result = await engine.refresh()
expect(result.items).toHaveLength(2)
expect(result.items[0].id).toBe("w1")
expect(result.items[1].id).toBe("w2")
expect(result.groupedItems).toBeUndefined()
})
})
// =============================================================================
// COMBINED ENHANCEMENT
// =============================================================================
describe("combined enhancement", () => {
test("single processor can use all enhancement fields at once", async () => {
const engine = new FeedEngine()
.register(createWeatherSource([weatherItem("w1", 20), weatherItem("w2", 25)]))
.registerPostProcessor(async () => ({
additionalItems: [calendarItem("c1", "Injected")],
suppress: ["w2"],
groupedItems: [{ itemIds: ["w1", "c1"], summary: "Related" }],
}))
const result = await engine.refresh()
// w2 suppressed, c1 injected → w1 + c1
expect(result.items).toHaveLength(2)
expect(result.items.map((i) => i.id)).toEqual(["w1", "c1"])
// Groups on result
expect(result.groupedItems).toEqual([{ itemIds: ["w1", "c1"], summary: "Related" }])
})
})
})

View File

@@ -1,23 +0,0 @@
import type { FeedItem } from "./feed"
export interface ItemGroup {
/** IDs of items to present together */
itemIds: string[]
/** Summary text for the group */
summary: string
}
export interface FeedEnhancement {
/** New items to inject into the feed */
additionalItems?: FeedItem[]
/** Groups of items to present together with a summary */
groupedItems?: ItemGroup[]
/** Item IDs to remove from the feed */
suppress?: string[]
}
/**
* A function that transforms feed items and produces enhancement directives.
* Use named functions for meaningful error attribution.
*/
export type FeedPostProcessor = (items: FeedItem[]) => Promise<FeedEnhancement>

View File

@@ -13,9 +13,6 @@ export { TimeRelevance } from "./feed"
// Feed Source
export type { FeedSource } from "./feed-source"
// Feed Post-Processor
export type { FeedEnhancement, FeedPostProcessor, ItemGroup } from "./feed-post-processor"
// Feed Engine
export type { FeedEngineConfig, FeedResult, FeedSubscriber, SourceError } from "./feed-engine"
export { FeedEngine } from "./feed-engine"

View File

@@ -479,29 +479,4 @@ describe("computeSignals", () => {
})
expect(computeSignals(event2h1m, now).urgency).toBe(0.5)
})
test("cancelled events get urgency 0.1 regardless of timing", () => {
const event = makeEvent({
status: "cancelled",
startDate: new Date("2026-01-15T12:20:00Z"), // would be 0.9 if not cancelled
})
const signals = computeSignals(event, now)
expect(signals.urgency).toBe(0.1)
expect(signals.timeRelevance).toBe(TimeRelevance.Ambient)
})
test("uses timezone for 'later today' boundary", () => {
// now = 2026-01-15T12:00:00Z = 2026-01-15T21:00:00 JST (UTC+9)
// event at 2026-01-15T15:30:00Z = 2026-01-16T00:30:00 JST — next day in JST
const event = makeEvent({
startDate: new Date("2026-01-15T15:30:00Z"),
})
// Without timezone: UTC day ends at 2026-01-16T00:00:00Z, event is before that → "later today"
expect(computeSignals(event, now).urgency).toBe(0.5)
// With Asia/Tokyo: local day ends at 2026-01-15T15:00:00Z (midnight Jan 16 JST),
// event is after that → "future days"
expect(computeSignals(event, now, "Asia/Tokyo").urgency).toBe(0.2)
})
})

View File

@@ -4,7 +4,6 @@ import { TimeRelevance, UnknownActionError } from "@aris/core"
import { DAVClient } from "tsdav"
import type { CalDavDAVClient, CalDavEventData, CalDavFeedItem } from "./types.ts"
import { CalDavEventStatus } from "./types.ts"
import { CalDavCalendarKey, type CalendarContext } from "./calendar-context.ts"
import { parseICalEvents } from "./ical-parser.ts"
@@ -76,7 +75,6 @@ export class CalDavSource implements FeedSource<CalDavFeedItem> {
private readonly injectedClient: CalDavDAVClient | null
private clientPromise: Promise<CalDavDAVClient> | null = null
private cachedEvents: { time: Date; events: CalDavEventData[] } | null = null
private pendingFetch: { time: Date; promise: Promise<CalDavEventData[]> } | null = null
constructor(options: CalDavSourceOptions) {
this.options = options
@@ -107,10 +105,9 @@ export class CalDavSource implements FeedSource<CalDavFeedItem> {
}
const now = context.time
const active = events.filter((e) => e.status !== CalDavEventStatus.Cancelled)
const inProgress = active.filter((e) => !e.isAllDay && e.startDate <= now && e.endDate > now)
const inProgress = events.filter((e) => !e.isAllDay && e.startDate <= now && e.endDate > now)
const upcoming = active
const upcoming = events
.filter((e) => !e.isAllDay && e.startDate > now)
.sort((a, b) => a.startDate.getTime() - b.startDate.getTime())
@@ -127,30 +124,14 @@ export class CalDavSource implements FeedSource<CalDavFeedItem> {
async fetchItems(context: Context): Promise<CalDavFeedItem[]> {
const now = context.time
const events = await this.fetchEvents(context)
return events.map((event) => createFeedItem(event, now, this.timeZone))
return events.map((event) => createFeedItem(event, now))
}
private fetchEvents(context: Context): Promise<CalDavEventData[]> {
private async fetchEvents(context: Context): Promise<CalDavEventData[]> {
if (this.cachedEvents && this.cachedEvents.time === context.time) {
return Promise.resolve(this.cachedEvents.events)
return this.cachedEvents.events
}
// Deduplicate concurrent fetches for the same context.time reference
if (this.pendingFetch && this.pendingFetch.time === context.time) {
return this.pendingFetch.promise
}
const promise = this.doFetchEvents(context).finally(() => {
if (this.pendingFetch?.promise === promise) {
this.pendingFetch = null
}
})
this.pendingFetch = { time: context.time, promise }
return promise
}
private async doFetchEvents(context: Context): Promise<CalDavEventData[]> {
const client = await this.connectClient()
const calendars = await client.fetchCalendars()
@@ -292,15 +273,7 @@ function startOfDay(date: Date, timeZone?: string): Date {
return new Date(Date.UTC(year, month - 1, day) - offsetMs)
}
export function computeSignals(
event: CalDavEventData,
now: Date,
timeZone?: string,
): FeedItemSignals {
if (event.status === CalDavEventStatus.Cancelled) {
return { urgency: 0.1, timeRelevance: TimeRelevance.Ambient }
}
export function computeSignals(event: CalDavEventData, now: Date): FeedItemSignals {
if (event.isAllDay) {
return { urgency: 0.3, timeRelevance: TimeRelevance.Ambient }
}
@@ -325,9 +298,11 @@ export function computeSignals(
return { urgency: 0.7, timeRelevance: TimeRelevance.Upcoming }
}
// Later today (using local day boundary when timeZone is set)
const todayStart = startOfDay(now, timeZone)
const endOfDay = new Date(todayStart.getTime() + 24 * 60 * 60 * 1000)
// Later today
const startOfDay = new Date(now)
startOfDay.setUTCHours(0, 0, 0, 0)
const endOfDay = new Date(startOfDay)
endOfDay.setUTCDate(endOfDay.getUTCDate() + 1)
if (event.startDate.getTime() < endOfDay.getTime()) {
return { urgency: 0.5, timeRelevance: TimeRelevance.Upcoming }
@@ -337,12 +312,12 @@ export function computeSignals(
return { urgency: 0.2, timeRelevance: TimeRelevance.Ambient }
}
function createFeedItem(event: CalDavEventData, now: Date, timeZone?: string): CalDavFeedItem {
function createFeedItem(event: CalDavEventData, now: Date): CalDavFeedItem {
return {
id: `caldav-event-${event.uid}${event.recurrenceId ? `-${event.recurrenceId}` : ""}`,
type: "caldav-event",
timestamp: now,
data: event,
signals: computeSignals(event, now, timeZone),
signals: computeSignals(event, now),
}
}