Add upstream feed watcher
All checks were successful
check / check (push) Successful in 35s

This commit is contained in:
matamune 2026-05-12 22:37:26 +00:00
parent b455647580
commit 816881c2cc
Signed by: matamune
GPG key ID: 3BB8E7D3B968A324
10 changed files with 654 additions and 6 deletions

View file

@ -7,6 +7,7 @@ RUN bun install --frozen-lockfile
COPY tsconfig.json ./
COPY src ./src
COPY feed-sources.json ./
ENV HOST=0.0.0.0
ENV PORT=3000

View file

@ -20,6 +20,7 @@ JOJO_WEBHOOK_SECRET=...
GITHUB_WEBHOOK_SECRET=...
DISCORD_WEBHOOK_URL=
DISCORD_NOTIFY_EVENTS=push,pull_request,release
FEED_SOURCES_PATH=./feed-sources.json
```
Discord notifications are optional. When `DISCORD_WEBHOOK_URL` is unset, the
@ -36,3 +37,8 @@ bun run dev
Accepted webhook events are appended to `DATA_DIR/events.jsonl`; queued work
items are appended to `DATA_DIR/jobs.jsonl`.
Feed watcher events are configured in `feed-sources.json`. The first poll primes
`DATA_DIR/feed-state.json`; later polls append upstream activity to
`DATA_DIR/feed-events.jsonl` and release-triggered fork sync work to
`DATA_DIR/feed-jobs.jsonl`.

84
feed-sources.json Normal file
View file

@ -0,0 +1,84 @@
{
"sources": [
{
"id": "codeberg-forgejo-branch",
"provider": "codeberg",
"url": "https://codeberg.org/forgejo/forgejo/rss/branch/forgejo",
"event": "push",
"repo": {
"owner": "forgejo",
"name": "forgejo",
"fullName": "forgejo/forgejo",
"webUrl": "https://codeberg.org/forgejo/forgejo",
"defaultBranch": "forgejo"
},
"target": {
"provider": "jojo",
"repoFullName": "peezy-tech/jojo",
"branch": "forgejo",
"mode": "notify_only"
},
"pollIntervalSeconds": 300
},
{
"id": "codeberg-forgejo-releases",
"provider": "codeberg",
"url": "https://codeberg.org/forgejo/forgejo/releases.atom",
"event": "release",
"repo": {
"owner": "forgejo",
"name": "forgejo",
"fullName": "forgejo/forgejo",
"webUrl": "https://codeberg.org/forgejo/forgejo",
"defaultBranch": "forgejo"
},
"target": {
"provider": "jojo",
"repoFullName": "peezy-tech/jojo",
"branch": "forgejo",
"mode": "fork_sync"
},
"pollIntervalSeconds": 300
},
{
"id": "github-openai-codex-main",
"provider": "github",
"url": "https://github.com/openai/codex/commits/main.atom",
"event": "push",
"repo": {
"owner": "openai",
"name": "codex",
"fullName": "openai/codex",
"webUrl": "https://github.com/openai/codex",
"defaultBranch": "main"
},
"target": {
"provider": "github",
"repoFullName": "peezy-tech/codex",
"branch": "main",
"mode": "notify_only"
},
"pollIntervalSeconds": 300
},
{
"id": "github-openai-codex-releases",
"provider": "github",
"url": "https://github.com/openai/codex/releases.atom",
"event": "release",
"repo": {
"owner": "openai",
"name": "codex",
"fullName": "openai/codex",
"webUrl": "https://github.com/openai/codex",
"defaultBranch": "main"
},
"target": {
"provider": "github",
"repoFullName": "peezy-tech/codex",
"branch": "main",
"mode": "fork_sync"
},
"pollIntervalSeconds": 300
}
]
}

View file

@ -1,4 +1,4 @@
import type { GitWebhookEvent, QueuedJob } from "./types";
import type { FeedJob, FeedSignal, GitWebhookEvent, QueuedJob } from "./types";
type DiscordEmbedField = {
name: string;
@ -31,8 +31,9 @@ export type DiscordConfig = {
};
export type DiscordNotification = {
event: GitWebhookEvent;
job?: QueuedJob | null;
event?: GitWebhookEvent;
job?: QueuedJob | FeedJob | null;
signal?: FeedSignal;
};
const defaultNotifyEvents = ["push", "pull_request", "release"];
@ -76,6 +77,14 @@ function eventTitle(event: GitWebhookEvent): string {
return `[${event.provider}] ${repo} ${event.event}`;
}
function feedTitle(signal: FeedSignal): string {
const branch = signal.ref?.startsWith("refs/heads/") ? signal.ref.slice("refs/heads/".length) : undefined;
if (signal.event === "push") {
return `[${signal.provider}] ${signal.repo.fullName} upstream update${branch ? ` on ${branch}` : ""}`;
}
return `[${signal.provider}] ${signal.repo.fullName} release ${signal.title}`;
}
function rawRecord(event: GitWebhookEvent): Record<string, unknown> {
return typeof event.raw === "object" && event.raw !== null ? event.raw as Record<string, unknown> : {};
}
@ -109,6 +118,41 @@ function field(name: string, value?: string, inline = true): DiscordEmbedField |
}
export function buildDiscordPayload(input: DiscordNotification): DiscordPayload {
if (input.signal) {
const { signal, job } = input;
const fields = [
field("Provider", signal.provider),
field("Repo", signal.repo.fullName),
field("Event", signal.event),
field("Branch", branchName(signal.ref)),
field("Author", signal.author),
field("SHA", shortSha(signal.sha)),
field("Queued", job ? job.kind : undefined),
field("Source", signal.sourceId, false),
].filter((item): item is DiscordEmbedField => item !== null);
return {
username: "git-webhooks",
embeds: [
{
title: feedTitle(signal).slice(0, 256),
description: signal.title.slice(0, 2048),
url: signal.url,
color: signal.provider === "github" ? 0x24292f : 0x2185d0,
fields,
timestamp: signal.publishedAt,
footer: {
text: "feed watcher",
},
},
],
};
}
if (!input.event) {
throw new Error("Discord notification missing event or signal");
}
const { event, job } = input;
const fields = [
field("Provider", event.provider),
@ -143,7 +187,8 @@ export async function notifyDiscord(
notification: DiscordNotification,
fetchImpl: FetchLike = fetch,
): Promise<void> {
if (!config.webhookUrl || !config.notifyEvents.has(notification.event.event)) {
const eventName = notification.signal?.event ?? notification.event?.event;
if (!config.webhookUrl || !eventName || !config.notifyEvents.has(eventName)) {
return;
}

236
src/feed.ts Normal file
View file

@ -0,0 +1,236 @@
import { readFile, writeFile } from "node:fs/promises";
import { dirname, join } from "node:path";
import { mkdir } from "node:fs/promises";
import { notifyDiscord, type DiscordConfig } from "./discord";
import { EventStore, jobForFeedSignal } from "./queue";
import type { FeedEventName, FeedSignal, FeedSourceConfig } from "./types";
type FeedEntry = {
id: string;
title: string;
url?: string;
author?: string;
publishedAt: string;
raw: string;
};
type FeedState = Record<string, {
lastSeenId?: string;
lastCheckedAt?: string;
}>;
type FeedPollerConfig = {
dataDir: string;
sourcesPath: string;
discord?: DiscordConfig;
};
type FetchLike = (url: string, init?: RequestInit) => Promise<Response>;
const defaultIntervalSeconds = 300;
function decodeXml(value: string): string {
return value
.replaceAll("&lt;", "<")
.replaceAll("&gt;", ">")
.replaceAll("&quot;", "\"")
.replaceAll("&#34;", "\"")
.replaceAll("&#39;", "'")
.replaceAll("&apos;", "'")
.replaceAll("&amp;", "&")
.replace(/&#x([0-9a-f]+);/gi, (_match, hex: string) => String.fromCodePoint(Number.parseInt(hex, 16)))
.replace(/&#([0-9]+);/g, (_match, code: string) => String.fromCodePoint(Number.parseInt(code, 10)))
.trim();
}
function firstTag(block: string, tag: string): string | undefined {
const match = block.match(new RegExp(`<${tag}(?:\\s[^>]*)?>([\\s\\S]*?)</${tag}>`, "i"));
return match ? decodeXml(match[1].replace(/^<!\[CDATA\[([\s\S]*)\]\]>$/, "$1")) : undefined;
}
function firstAttr(block: string, tag: string, attr: string): string | undefined {
const tagMatch = block.match(new RegExp(`<${tag}\\b([^>]*)>`, "i"));
if (!tagMatch) return undefined;
const attrMatch = tagMatch[1].match(new RegExp(`${attr}=["']([^"']+)["']`, "i"));
return attrMatch ? decodeXml(attrMatch[1]) : undefined;
}
function blocks(xml: string, tag: string): string[] {
return [...xml.matchAll(new RegExp(`<${tag}\\b[\\s\\S]*?</${tag}>`, "gi"))].map((match) => match[0]);
}
export function parseFeedEntries(xml: string): FeedEntry[] {
const atomEntries = blocks(xml, "entry").map((entry) => {
const updated = firstTag(entry, "updated") ?? firstTag(entry, "published");
return {
id: firstTag(entry, "id") ?? firstAttr(entry, "link", "href") ?? firstTag(entry, "title") ?? "",
title: firstTag(entry, "title") ?? "Untitled feed entry",
url: firstAttr(entry, "link", "href"),
author: firstTag(firstTag(entry, "author") ?? "", "name"),
publishedAt: updated ?? new Date().toISOString(),
raw: entry,
};
});
if (atomEntries.length > 0) {
return atomEntries.filter((entry) => entry.id);
}
return blocks(xml, "item").map((item) => {
const url = firstTag(item, "link");
return {
id: firstTag(item, "guid") ?? url ?? firstTag(item, "title") ?? "",
title: firstTag(item, "title") ?? "Untitled feed item",
url,
author: firstTag(item, "author"),
publishedAt: firstTag(item, "pubDate") ?? new Date().toISOString(),
raw: item,
};
}).filter((entry) => entry.id);
}
function shaFromEntry(entry: FeedEntry): string | undefined {
const value = entry.url ?? entry.id;
return value.match(/[0-9a-f]{40}/i)?.[0];
}
function refFromEntry(source: FeedSourceConfig, entry: FeedEntry): string | undefined {
if (source.event === "push" && source.repo.defaultBranch) {
return `refs/heads/${source.repo.defaultBranch}`;
}
if (source.event === "release") {
return entry.title;
}
return undefined;
}
export function signalFromEntry(source: FeedSourceConfig, entry: FeedEntry): FeedSignal {
return {
sourceId: source.id,
provider: source.provider,
event: source.event,
entryId: entry.id,
title: entry.title,
url: entry.url,
author: entry.author,
publishedAt: new Date(entry.publishedAt).toISOString(),
repo: source.repo,
ref: refFromEntry(source, entry),
sha: shaFromEntry(entry),
target: source.target,
raw: {
id: entry.id,
title: entry.title,
url: entry.url,
author: entry.author,
publishedAt: entry.publishedAt,
},
};
}
export async function loadSources(path: string): Promise<FeedSourceConfig[]> {
const raw = await readFile(path, "utf8");
const parsed = JSON.parse(raw) as { sources?: FeedSourceConfig[] };
return parsed.sources ?? [];
}
async function loadState(path: string): Promise<FeedState> {
try {
return JSON.parse(await readFile(path, "utf8")) as FeedState;
} catch (error) {
if (error && typeof error === "object" && "code" in error && error.code === "ENOENT") {
return {};
}
throw error;
}
}
async function saveState(path: string, state: FeedState): Promise<void> {
await mkdir(dirname(path), { recursive: true });
await writeFile(path, `${JSON.stringify(state, null, 2)}\n`, "utf8");
}
function unseenEntries(entries: FeedEntry[], lastSeenId?: string): FeedEntry[] {
if (!lastSeenId) return [];
const index = entries.findIndex((entry) => entry.id === lastSeenId);
return (index === -1 ? entries : entries.slice(0, index)).reverse();
}
export async function pollFeedSource(input: {
source: FeedSourceConfig;
state: FeedState;
statePath: string;
store: EventStore;
discord?: DiscordConfig;
fetchImpl?: FetchLike;
}): Promise<{ signals: FeedSignal[]; jobs: number; primed: boolean }> {
const response = await (input.fetchImpl ?? fetch)(input.source.url, {
headers: { accept: "application/atom+xml, application/rss+xml, application/xml, text/xml;q=0.9" },
});
if (!response.ok) {
throw new Error(`Feed ${input.source.id} returned ${response.status}`);
}
const entries = parseFeedEntries(await response.text());
const newestId = entries[0]?.id;
const previous = input.state[input.source.id];
const primed = !previous?.lastSeenId;
const selectedEntries = primed && input.source.primeOnly !== false ? [] : unseenEntries(entries, previous?.lastSeenId);
const signals: FeedSignal[] = [];
let jobs = 0;
for (const entry of selectedEntries) {
const signal = signalFromEntry(input.source, entry);
const job = jobForFeedSignal(signal);
await input.store.appendFeedSignal(signal);
if (job) {
await input.store.appendFeedJob(job);
jobs += 1;
}
await notifyDiscord(input.discord ?? { notifyEvents: new Set() }, { signal, job });
signals.push(signal);
console.log(JSON.stringify({ type: "feed.accepted", sourceId: signal.sourceId, provider: signal.provider, event: signal.event, entryId: signal.entryId, job: job?.id }));
}
if (newestId) {
input.state[input.source.id] = {
lastSeenId: newestId,
lastCheckedAt: new Date().toISOString(),
};
await saveState(input.statePath, input.state);
}
return { signals, jobs, primed };
}
export async function pollFeedsOnce(config: FeedPollerConfig, fetchImpl?: FetchLike): Promise<void> {
const sources = await loadSources(config.sourcesPath);
const statePath = join(config.dataDir, "feed-state.json");
const state = await loadState(statePath);
const store = new EventStore(config.dataDir);
for (const source of sources) {
try {
await pollFeedSource({ source, state, statePath, store, discord: config.discord, fetchImpl });
} catch (error) {
console.error(JSON.stringify({
type: "feed.poll_failed",
sourceId: source.id,
error: error instanceof Error ? error.message : String(error),
}));
}
}
}
export async function startFeedPolling(config: FeedPollerConfig): Promise<void> {
const sources = await loadSources(config.sourcesPath);
if (sources.length === 0) return;
await pollFeedsOnce(config);
const intervalSeconds = Math.min(...sources.map((source) => Math.max(30, source.pollIntervalSeconds ?? defaultIntervalSeconds)));
setInterval(() => {
pollFeedsOnce(config).catch((error) => {
console.error(JSON.stringify({ type: "feed.poll_loop_failed", error: error instanceof Error ? error.message : String(error) }));
});
}, intervalSeconds * 1000);
}

View file

@ -1,6 +1,6 @@
import { appendFile, mkdir } from "node:fs/promises";
import { dirname, join } from "node:path";
import type { GitWebhookEvent, QueuedJob } from "./types";
import type { FeedJob, FeedSignal, GitWebhookEvent, QueuedJob } from "./types";
async function appendJsonLine(path: string, value: unknown): Promise<void> {
await mkdir(dirname(path), { recursive: true });
@ -10,10 +10,14 @@ async function appendJsonLine(path: string, value: unknown): Promise<void> {
export class EventStore {
readonly eventsPath: string;
readonly jobsPath: string;
readonly feedEventsPath: string;
readonly feedJobsPath: string;
constructor(dataDir: string) {
this.eventsPath = join(dataDir, "events.jsonl");
this.jobsPath = join(dataDir, "jobs.jsonl");
this.feedEventsPath = join(dataDir, "feed-events.jsonl");
this.feedJobsPath = join(dataDir, "feed-jobs.jsonl");
}
async appendEvent(event: GitWebhookEvent): Promise<void> {
@ -23,6 +27,14 @@ export class EventStore {
async appendJob(job: QueuedJob): Promise<void> {
await appendJsonLine(this.jobsPath, job);
}
async appendFeedSignal(signal: FeedSignal): Promise<void> {
await appendJsonLine(this.feedEventsPath, signal);
}
async appendFeedJob(job: FeedJob): Promise<void> {
await appendJsonLine(this.feedJobsPath, job);
}
}
export function jobForEvent(event: GitWebhookEvent): QueuedJob | null {
@ -41,3 +53,24 @@ export function jobForEvent(event: GitWebhookEvent): QueuedJob | null {
createdAt: event.receivedAt,
};
}
export function jobForFeedSignal(signal: FeedSignal): FeedJob | null {
if (signal.event !== "release" || signal.target?.mode !== "fork_sync") {
return null;
}
return {
id: `${signal.provider}:${signal.sourceId}:${signal.entryId}:fork_sync`,
kind: "fork_sync",
sourceId: signal.sourceId,
provider: signal.provider,
upstreamRepoFullName: signal.repo.fullName,
targetRepoFullName: signal.target.repoFullName,
branch: signal.target.branch,
upstreamRef: signal.ref,
upstreamSha: signal.sha,
entryId: signal.entryId,
url: signal.url,
createdAt: new Date().toISOString(),
};
}

View file

@ -1,5 +1,6 @@
import { randomUUID } from "node:crypto";
import { notifyDiscord, parseDiscordConfig, type DiscordConfig } from "./discord";
import { startFeedPolling } from "./feed";
import { jsonResponse, methodNotAllowed, textResponse } from "./http";
import { normalizeGithubEvent } from "./providers/github";
import { normalizeJojoEvent } from "./providers/jojo";
@ -123,6 +124,16 @@ if (import.meta.main) {
}),
};
if (process.env.FEED_SOURCES_PATH) {
startFeedPolling({
dataDir: config.dataDir,
sourcesPath: process.env.FEED_SOURCES_PATH,
discord: config.discord,
}).catch((error) => {
console.error(JSON.stringify({ type: "feed.start_failed", error: error instanceof Error ? error.message : String(error) }));
});
}
Bun.serve({
hostname,
port,

View file

@ -43,3 +43,60 @@ export type QueuedJob = {
deliveryId: string;
createdAt: string;
};
export type FeedProvider = "codeberg" | "github" | "jojo";
export type FeedEventName = "push" | "release";
export type FeedSourceConfig = {
id: string;
provider: FeedProvider;
url: string;
event: FeedEventName;
repo: {
owner: string;
name: string;
fullName: string;
webUrl: string;
defaultBranch?: string;
};
target?: {
provider: FeedProvider;
repoFullName: string;
branch: string;
mode: "notify_only" | "fork_sync";
};
pollIntervalSeconds?: number;
primeOnly?: boolean;
};
export type FeedSignal = {
sourceId: string;
provider: FeedProvider;
event: FeedEventName;
entryId: string;
title: string;
url?: string;
author?: string;
publishedAt: string;
repo: FeedSourceConfig["repo"];
ref?: string;
sha?: string;
target?: FeedSourceConfig["target"];
raw: unknown;
};
export type FeedJob = {
id: string;
kind: "fork_sync";
sourceId: string;
provider: FeedProvider;
upstreamRepoFullName: string;
targetRepoFullName: string;
branch: string;
upstreamRef?: string;
upstreamSha?: string;
entryId: string;
url?: string;
createdAt: string;
};

View file

@ -1,6 +1,6 @@
import { describe, expect, test } from "bun:test";
import { buildDiscordPayload, notifyDiscord, parseDiscordConfig } from "../src/discord";
import type { GitWebhookEvent } from "../src/types";
import type { FeedSignal, GitWebhookEvent } from "../src/types";
const pushEvent: GitWebhookEvent = {
provider: "jojo",
@ -25,6 +25,33 @@ const pushEvent: GitWebhookEvent = {
},
};
const feedSignal: FeedSignal = {
sourceId: "github-openai-codex-main",
provider: "github",
event: "push",
entryId: "tag:github.com,2008:Grit::Commit/0123456789abcdef0123456789abcdef01234567",
title: "Tighten sandbox setup",
url: "https://github.com/openai/codex/commit/0123456789abcdef0123456789abcdef01234567",
author: "bookholt-oai",
publishedAt: "2026-05-12T21:00:00.000Z",
repo: {
owner: "openai",
name: "codex",
fullName: "openai/codex",
webUrl: "https://github.com/openai/codex",
defaultBranch: "main",
},
ref: "refs/heads/main",
sha: "0123456789abcdef0123456789abcdef01234567",
target: {
provider: "github",
repoFullName: "peezy-tech/codex",
branch: "main",
mode: "notify_only",
},
raw: {},
};
describe("discord notifications", () => {
test("parses default notify events", () => {
const config = parseDiscordConfig({});
@ -55,6 +82,14 @@ describe("discord notifications", () => {
expect(payload.embeds[0].fields).toContainEqual({ name: "Queued", value: "main_push", inline: true });
});
test("builds readable feed embeds", () => {
const payload = buildDiscordPayload({ signal: feedSignal });
expect(payload.embeds[0].title).toBe("[github] openai/codex upstream update on main");
expect(payload.embeds[0].description).toBe("Tighten sandbox setup");
expect(payload.embeds[0].url).toBe("https://github.com/openai/codex/commit/0123456789abcdef0123456789abcdef01234567");
expect(payload.embeds[0].footer.text).toBe("feed watcher");
});
test("does nothing without a webhook URL", async () => {
let calls = 0;
await notifyDiscord(parseDiscordConfig({}), { event: pushEvent }, async () => {

140
test/feed.test.ts Normal file
View file

@ -0,0 +1,140 @@
import { mkdtemp, readFile, writeFile } from "node:fs/promises";
import { join } from "node:path";
import { tmpdir } from "node:os";
import { describe, expect, test } from "bun:test";
import { loadSources, parseFeedEntries, pollFeedsOnce, signalFromEntry } from "../src/feed";
import type { FeedSourceConfig } from "../src/types";
const atom = `<?xml version="1.0"?>
<feed>
<entry>
<id>tag:github.com,2008:Grit::Commit/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa</id>
<link type="text/html" rel="alternate" href="https://github.com/openai/codex/commit/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"/>
<title>Update main</title>
<updated>2026-05-12T10:00:00Z</updated>
<author><name>alice</name></author>
</entry>
<entry>
<id>tag:github.com,2008:Grit::Commit/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb</id>
<link type="text/html" rel="alternate" href="https://github.com/openai/codex/commit/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"/>
<title>Older update</title>
<updated>2026-05-12T09:00:00Z</updated>
<author><name>bob</name></author>
</entry>
</feed>`;
const rss = `<?xml version="1.0"?>
<rss><channel>
<item>
<title>v1.2.3</title>
<link>https://codeberg.org/forgejo/forgejo/releases/tag/v1.2.3</link>
<guid>release-123</guid>
<author>release-team</author>
<pubDate>Tue, 12 May 2026 10:00:00 +0000</pubDate>
</item>
</channel></rss>`;
const source: FeedSourceConfig = {
id: "github-openai-codex-main",
provider: "github",
url: "https://github.com/openai/codex/commits/main.atom",
event: "push",
repo: {
owner: "openai",
name: "codex",
fullName: "openai/codex",
webUrl: "https://github.com/openai/codex",
defaultBranch: "main",
},
target: {
provider: "github",
repoFullName: "peezy-tech/codex",
branch: "main",
mode: "notify_only",
},
};
describe("feed watcher", () => {
test("parses Atom and RSS feed entries", () => {
expect(parseFeedEntries(atom)[0]).toMatchObject({
title: "Update main",
author: "alice",
url: "https://github.com/openai/codex/commit/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
});
expect(parseFeedEntries(rss)[0]).toMatchObject({
id: "release-123",
title: "v1.2.3",
author: "release-team",
});
});
test("normalizes commit feed entries into push signals", () => {
const signal = signalFromEntry(source, parseFeedEntries(atom)[0]);
expect(signal).toMatchObject({
sourceId: "github-openai-codex-main",
provider: "github",
event: "push",
ref: "refs/heads/main",
sha: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
repo: { fullName: "openai/codex" },
});
});
test("loads configured feed sources", async () => {
const sources = await loadSources(join(import.meta.dir, "..", "feed-sources.json"));
expect(sources.map((item) => item.id)).toEqual([
"codeberg-forgejo-branch",
"codeberg-forgejo-releases",
"github-openai-codex-main",
"github-openai-codex-releases",
]);
});
test("first poll primes state without emitting old entries", async () => {
const dataDir = await mkdtemp(join(tmpdir(), "git-webhooks-feed-"));
const sourcesPath = join(dataDir, "sources.json");
await writeFile(sourcesPath, JSON.stringify({ sources: [source] }), "utf8");
await pollFeedsOnce({ dataDir, sourcesPath, discord: { webhookUrl: "https://discord.example/webhook", notifyEvents: new Set(["push"]) } }, async () => {
return new Response(atom, { status: 200 });
});
const state = JSON.parse(await readFile(join(dataDir, "feed-state.json"), "utf8"));
expect(state["github-openai-codex-main"].lastSeenId).toBe("tag:github.com,2008:Grit::Commit/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
await expect(readFile(join(dataDir, "feed-events.jsonl"), "utf8")).rejects.toThrow();
});
test("later polls emit new entries and release fork sync jobs", async () => {
const dataDir = await mkdtemp(join(tmpdir(), "git-webhooks-feed-"));
const sourcesPath = join(dataDir, "sources.json");
const releaseSource: FeedSourceConfig = {
...source,
id: "github-openai-codex-releases",
url: "https://github.com/openai/codex/releases.atom",
event: "release",
target: {
provider: "github",
repoFullName: "peezy-tech/codex",
branch: "main",
mode: "fork_sync",
},
};
await writeFile(sourcesPath, JSON.stringify({ sources: [releaseSource] }), "utf8");
await writeFile(join(dataDir, "feed-state.json"), JSON.stringify({
"github-openai-codex-releases": {
lastSeenId: "older-release",
lastCheckedAt: "2026-05-12T09:00:00.000Z",
},
}), "utf8");
let feedCalls = 0;
await pollFeedsOnce({ dataDir, sourcesPath, discord: { notifyEvents: new Set(["release"]) } }, async () => {
feedCalls += 1;
return new Response(rss, { status: 200 });
});
expect(await readFile(join(dataDir, "feed-events.jsonl"), "utf8")).toContain("\"event\":\"release\"");
expect(await readFile(join(dataDir, "feed-jobs.jsonl"), "utf8")).toContain("\"kind\":\"fork_sync\"");
expect(feedCalls).toBe(1);
});
});