Merge branch 'main' into claude/fix-activitypub-og-image-CrCGI
This commit is contained in:
61
scripts/delete-posts.mjs
Normal file
61
scripts/delete-posts.mjs
Normal file
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* Delete specific posts from MongoDB by URL.
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/delete-posts.mjs
|
||||
*
|
||||
* Add --dry-run to preview without deleting.
|
||||
*/
|
||||
|
||||
import { MongoClient } from "mongodb";
|
||||
import config from "../indiekit.config.mjs";
|
||||
|
||||
const DRY_RUN = process.argv.includes("--dry-run");
|
||||
|
||||
const URLS_TO_DELETE = [
|
||||
"https://blog.giersig.eu/notes/3f6c2/",
|
||||
"https://blog.giersig.eu/notes/c60c0/",
|
||||
"https://blog.giersig.eu/notes/221cc/",
|
||||
"https://blog.giersig.eu/notes/b7efe/",
|
||||
"https://blog.giersig.eu/photos/reallohn-produktivitaet-ein-strukturelles-raetsel/",
|
||||
"https://blog.giersig.eu/replies/22d5d/",
|
||||
"https://blog.giersig.eu/notes/dff1f/",
|
||||
];
|
||||
|
||||
// Normalise: ensure trailing slash for all URLs
|
||||
const targets = URLS_TO_DELETE.map((u) => u.replace(/\/?$/, "/"));
|
||||
|
||||
const mongodbUrl = config.application?.mongodbUrl;
|
||||
if (!mongodbUrl) {
|
||||
console.error("[delete-posts] Could not resolve MongoDB URL from config");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const client = new MongoClient(mongodbUrl);
|
||||
|
||||
try {
|
||||
await client.connect();
|
||||
const db = client.db();
|
||||
const posts = db.collection("posts");
|
||||
|
||||
for (const url of targets) {
|
||||
const doc = await posts.findOne({ "properties.url": url });
|
||||
|
||||
if (!doc) {
|
||||
console.log(`[delete-posts] NOT FOUND: ${url}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const type = doc.properties["post-type"] ?? doc.type ?? "unknown";
|
||||
const published = doc.properties.published ?? "(no date)";
|
||||
|
||||
if (DRY_RUN) {
|
||||
console.log(`[delete-posts] DRY RUN — would delete: ${url} (${type}, ${published})`);
|
||||
} else {
|
||||
await posts.deleteOne({ _id: doc._id });
|
||||
console.log(`[delete-posts] Deleted: ${url} (${type}, ${published})`);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
await client.close();
|
||||
}
|
||||
113
scripts/patch-ap-like-activity-dispatcher.mjs
Normal file
113
scripts/patch-ap-like-activity-dispatcher.mjs
Normal file
@@ -0,0 +1,113 @@
|
||||
/**
|
||||
* Patch: register a Fedify Like activity dispatcher in federation-setup.js.
|
||||
*
|
||||
* Per ActivityPub §3.1, objects with an `id` MUST be dereferenceable at that
|
||||
* URI. The Like activities produced by jf2ToAS2Activity (after patch-ap-like-
|
||||
* activity-id.mjs adds an id) need a corresponding Fedify object dispatcher so
|
||||
* that fetching /activitypub/activities/like/{id} returns the Like activity.
|
||||
*
|
||||
* Fix:
|
||||
* Add federation.setObjectDispatcher(Like, ...) after the Article dispatcher
|
||||
* in setupObjectDispatchers(). The handler looks up the post, calls
|
||||
* jf2ToAS2Activity, and returns the Like if that's what was produced.
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const candidates = [
|
||||
"node_modules/@rmdes/indiekit-endpoint-activitypub/lib/federation-setup.js",
|
||||
"node_modules/@indiekit/indiekit/node_modules/@rmdes/indiekit-endpoint-activitypub/lib/federation-setup.js",
|
||||
];
|
||||
|
||||
const MARKER = "// ap-like-activity-dispatcher patch";
|
||||
|
||||
const OLD_SNIPPET = ` // Article dispatcher
|
||||
federation.setObjectDispatcher(
|
||||
Article,
|
||||
\`\${mountPath}/objects/article/{+id}\`,
|
||||
async (ctx, { id }) => {
|
||||
const obj = await resolvePost(ctx, id);
|
||||
return obj instanceof Article ? obj : null;
|
||||
},
|
||||
);
|
||||
}`;
|
||||
|
||||
const NEW_SNIPPET = ` // Article dispatcher
|
||||
federation.setObjectDispatcher(
|
||||
Article,
|
||||
\`\${mountPath}/objects/article/{+id}\`,
|
||||
async (ctx, { id }) => {
|
||||
const obj = await resolvePost(ctx, id);
|
||||
return obj instanceof Article ? obj : null;
|
||||
},
|
||||
);
|
||||
|
||||
// Like activity dispatcher — makes AP-like activities dereferenceable (AP §3.1)
|
||||
// ap-like-activity-dispatcher patch
|
||||
federation.setObjectDispatcher(
|
||||
Like,
|
||||
\`\${mountPath}/activities/like/{+id}\`,
|
||||
async (ctx, { id }) => {
|
||||
if (!collections.posts || !publicationUrl) return null;
|
||||
const postUrl = \`\${publicationUrl.replace(/\\/$/, "")}/\${id}\`;
|
||||
const post = await collections.posts.findOne({
|
||||
"properties.url": { $in: [postUrl, postUrl + "/"] },
|
||||
});
|
||||
if (!post) return null;
|
||||
if (post?.properties?.["post-status"] === "draft") return null;
|
||||
if (post?.properties?.visibility === "unlisted") return null;
|
||||
if (post.properties?.deleted) return null;
|
||||
const actorUrl = ctx.getActorUri(handle).href;
|
||||
const activity = await jf2ToAS2Activity(post.properties, actorUrl, publicationUrl);
|
||||
return activity instanceof Like ? activity : null;
|
||||
},
|
||||
);
|
||||
}`;
|
||||
|
||||
async function exists(filePath) {
|
||||
try {
|
||||
await access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let checked = 0;
|
||||
let patched = 0;
|
||||
|
||||
for (const filePath of candidates) {
|
||||
if (!(await exists(filePath))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
checked += 1;
|
||||
let source = await readFile(filePath, "utf8");
|
||||
|
||||
if (source.includes(MARKER)) {
|
||||
continue; // already patched
|
||||
}
|
||||
|
||||
if (!source.includes(OLD_SNIPPET)) {
|
||||
console.log(`[postinstall] patch-ap-like-activity-dispatcher: snippet not found in ${filePath}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Ensure Like is imported from @fedify/fedify/vocab (may be absent on fresh installs)
|
||||
if (!source.includes(" Like,")) {
|
||||
source = source.replace(" Note,", " Like,\n Note,");
|
||||
}
|
||||
|
||||
source = source.replace(OLD_SNIPPET, NEW_SNIPPET);
|
||||
await writeFile(filePath, source, "utf8");
|
||||
patched += 1;
|
||||
console.log(`[postinstall] Applied patch-ap-like-activity-dispatcher to ${filePath}`);
|
||||
}
|
||||
|
||||
if (checked === 0) {
|
||||
console.log("[postinstall] patch-ap-like-activity-dispatcher: no target files found");
|
||||
} else if (patched === 0) {
|
||||
console.log("[postinstall] patch-ap-like-activity-dispatcher: already up to date");
|
||||
} else {
|
||||
console.log(`[postinstall] patch-ap-like-activity-dispatcher: patched ${patched}/${checked} file(s)`);
|
||||
}
|
||||
91
scripts/patch-ap-like-activity-id.mjs
Normal file
91
scripts/patch-ap-like-activity-id.mjs
Normal file
@@ -0,0 +1,91 @@
|
||||
/**
|
||||
* Patch: add a canonical `id` to the Like activity produced by jf2ToAS2Activity.
|
||||
*
|
||||
* Per ActivityPub §6.2.1, activities sent from a server SHOULD have an `id`
|
||||
* URI so that remote servers can dereference them. The current Like activity
|
||||
* has no `id`, which means it cannot be looked up by its URL.
|
||||
*
|
||||
* Fix:
|
||||
* In jf2-to-as2.js, derive the mount path from the actor URL and construct
|
||||
* a canonical id at /activitypub/activities/like/{post-path}.
|
||||
*
|
||||
* This enables:
|
||||
* - The Like activity dispatcher (patch-ap-like-activity-dispatcher.mjs) to
|
||||
* serve the Like at its canonical URL.
|
||||
* - Remote servers to dereference the Like activity by its id.
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const candidates = [
|
||||
"node_modules/@rmdes/indiekit-endpoint-activitypub/lib/jf2-to-as2.js",
|
||||
"node_modules/@indiekit/indiekit/node_modules/@rmdes/indiekit-endpoint-activitypub/lib/jf2-to-as2.js",
|
||||
];
|
||||
|
||||
const MARKER = "// ap-like-activity-id patch";
|
||||
|
||||
const OLD_SNIPPET = ` return new Like({
|
||||
actor: actorUri,
|
||||
object: new URL(likeOfUrl),
|
||||
to: new URL("https://www.w3.org/ns/activitystreams#Public"),
|
||||
});`;
|
||||
|
||||
const NEW_SNIPPET = ` // ap-like-activity-id patch
|
||||
// Derive mount path from actor URL (e.g. "/activitypub") so we can
|
||||
// construct the canonical id without needing mountPath in options.
|
||||
const actorPath = new URL(actorUrl).pathname; // e.g. "/activitypub/users/sven"
|
||||
const mp = actorPath.replace(/\\/users\\/[^/]+$/, ""); // → "/activitypub"
|
||||
const postRelPath = (properties.url || "")
|
||||
.replace(publicationUrl.replace(/\\/$/, ""), "")
|
||||
.replace(/^\\//, "")
|
||||
.replace(/\\/$/, ""); // e.g. "likes/9acc3"
|
||||
const likeActivityId = \`\${publicationUrl.replace(/\\/$/, "")}\${mp}/activities/like/\${postRelPath}\`;
|
||||
return new Like({
|
||||
id: new URL(likeActivityId),
|
||||
actor: actorUri,
|
||||
object: new URL(likeOfUrl),
|
||||
to: new URL("https://www.w3.org/ns/activitystreams#Public"),
|
||||
});`;
|
||||
|
||||
async function exists(filePath) {
|
||||
try {
|
||||
await access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let checked = 0;
|
||||
let patched = 0;
|
||||
|
||||
for (const filePath of candidates) {
|
||||
if (!(await exists(filePath))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
checked += 1;
|
||||
let source = await readFile(filePath, "utf8");
|
||||
|
||||
if (source.includes(MARKER)) {
|
||||
continue; // already patched
|
||||
}
|
||||
|
||||
if (!source.includes(OLD_SNIPPET)) {
|
||||
console.log(`[postinstall] patch-ap-like-activity-id: snippet not found in ${filePath}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
source = source.replace(OLD_SNIPPET, NEW_SNIPPET);
|
||||
await writeFile(filePath, source, "utf8");
|
||||
patched += 1;
|
||||
console.log(`[postinstall] Applied patch-ap-like-activity-id to ${filePath}`);
|
||||
}
|
||||
|
||||
if (checked === 0) {
|
||||
console.log("[postinstall] patch-ap-like-activity-id: no target files found");
|
||||
} else if (patched === 0) {
|
||||
console.log("[postinstall] patch-ap-like-activity-id: already up to date");
|
||||
} else {
|
||||
console.log(`[postinstall] patch-ap-like-activity-id: patched ${patched}/${checked} file(s)`);
|
||||
}
|
||||
87
scripts/patch-ap-like-note-dispatcher.mjs
Normal file
87
scripts/patch-ap-like-note-dispatcher.mjs
Normal file
@@ -0,0 +1,87 @@
|
||||
/**
|
||||
* Patch: REVERT the wrong ap-like-note-dispatcher change in federation-setup.js.
|
||||
*
|
||||
* The previous version of this script served AP-likes as fake Notes at the
|
||||
* Note dispatcher URL, which violated ActivityPub semantics (Like activities
|
||||
* should not be served as Notes).
|
||||
*
|
||||
* This rewritten version removes that fake-Note block and restores the original
|
||||
* resolvePost() logic. The correct AP-compliant fixes are handled by:
|
||||
* - patch-ap-like-activity-id.mjs (adds id to Like activity)
|
||||
* - patch-ap-like-activity-dispatcher.mjs (registers Like object dispatcher)
|
||||
* - patch-ap-url-lookup-api-like.mjs (returns likeOf URL for AP-likes in widget)
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const candidates = [
|
||||
"node_modules/@rmdes/indiekit-endpoint-activitypub/lib/federation-setup.js",
|
||||
"node_modules/@indiekit/indiekit/node_modules/@rmdes/indiekit-endpoint-activitypub/lib/federation-setup.js",
|
||||
];
|
||||
|
||||
// Marker from the old wrong patch — if this is present, we need to revert
|
||||
const WRONG_PATCH_MARKER = "// ap-like-note-dispatcher patch";
|
||||
|
||||
// Clean up the Like import comment added by the old patch
|
||||
const OLD_IMPORT = ` Like, // Like import for ap-like-note-dispatcher patch`;
|
||||
const NEW_IMPORT = ` Like,`;
|
||||
|
||||
async function exists(filePath) {
|
||||
try {
|
||||
await access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let checked = 0;
|
||||
let patched = 0;
|
||||
|
||||
for (const filePath of candidates) {
|
||||
if (!(await exists(filePath))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
checked += 1;
|
||||
let source = await readFile(filePath, "utf8");
|
||||
|
||||
if (!source.includes(WRONG_PATCH_MARKER)) {
|
||||
// Already reverted (or never applied)
|
||||
continue;
|
||||
}
|
||||
|
||||
let modified = false;
|
||||
|
||||
// 1. Clean up Like import comment
|
||||
if (source.includes(OLD_IMPORT)) {
|
||||
source = source.replace(OLD_IMPORT, NEW_IMPORT);
|
||||
modified = true;
|
||||
}
|
||||
|
||||
// 2. Remove fake Note block — use regex to avoid escaping issues with
|
||||
// unicode escapes and template literals inside the block.
|
||||
// Match from the opening comment through `return await activity.getObject();`
|
||||
const fakeNoteBlock = / \/\/ Only Create activities wrap Note\/Article objects\.\n[\s\S]*? return await activity\.getObject\(\);/;
|
||||
if (fakeNoteBlock.test(source)) {
|
||||
source = source.replace(
|
||||
fakeNoteBlock,
|
||||
` // Only Create activities wrap Note/Article objects\n if (!(activity instanceof Create)) return null;\n return await activity.getObject();`,
|
||||
);
|
||||
modified = true;
|
||||
}
|
||||
|
||||
if (modified) {
|
||||
await writeFile(filePath, source, "utf8");
|
||||
patched += 1;
|
||||
console.log(`[postinstall] Reverted ap-like-note-dispatcher patch in ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (checked === 0) {
|
||||
console.log("[postinstall] patch-ap-like-note-dispatcher: no target files found");
|
||||
} else if (patched === 0) {
|
||||
console.log("[postinstall] patch-ap-like-note-dispatcher: already up to date");
|
||||
} else {
|
||||
console.log(`[postinstall] patch-ap-like-note-dispatcher: reverted ${patched}/${checked} file(s)`);
|
||||
}
|
||||
70
scripts/patch-ap-remove-federation-diag.mjs
Normal file
70
scripts/patch-ap-remove-federation-diag.mjs
Normal file
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* Patch: remove federation-diag inbox logging from the ActivityPub endpoint.
|
||||
*
|
||||
* The diagnostic block logs every inbox POST to detect federation stalls.
|
||||
* It is no longer needed and produces noise in indiekit.log.
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const candidates = [
|
||||
"node_modules/@rmdes/indiekit-endpoint-activitypub/index.js",
|
||||
"node_modules/@indiekit/indiekit/node_modules/@rmdes/indiekit-endpoint-activitypub/index.js",
|
||||
];
|
||||
|
||||
const MARKER = "// ap-remove-federation-diag patch";
|
||||
|
||||
const OLD_SNIPPET = ` // Diagnostic: log inbox POSTs to detect federation stalls
|
||||
if (req.method === "POST" && req.path.includes("inbox")) {
|
||||
const ua = req.get("user-agent") || "unknown";
|
||||
const bodyParsed = req.body !== undefined && Object.keys(req.body || {}).length > 0;
|
||||
console.info(\`[federation-diag] POST \${req.path} from=\${ua.slice(0, 60)} bodyParsed=\${bodyParsed} readable=\${req.readable}\`);
|
||||
}
|
||||
|
||||
return self._fedifyMiddleware(req, res, next);`;
|
||||
|
||||
const NEW_SNIPPET = ` // ap-remove-federation-diag patch
|
||||
return self._fedifyMiddleware(req, res, next);`;
|
||||
|
||||
async function exists(filePath) {
|
||||
try {
|
||||
await access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let checked = 0;
|
||||
let patched = 0;
|
||||
|
||||
for (const filePath of candidates) {
|
||||
if (!(await exists(filePath))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
checked += 1;
|
||||
let source = await readFile(filePath, "utf8");
|
||||
|
||||
if (source.includes(MARKER)) {
|
||||
continue; // already patched
|
||||
}
|
||||
|
||||
if (!source.includes(OLD_SNIPPET)) {
|
||||
console.log(`[postinstall] patch-ap-remove-federation-diag: snippet not found in ${filePath}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
source = source.replace(OLD_SNIPPET, NEW_SNIPPET);
|
||||
await writeFile(filePath, source, "utf8");
|
||||
patched += 1;
|
||||
console.log(`[postinstall] Applied patch-ap-remove-federation-diag to ${filePath}`);
|
||||
}
|
||||
|
||||
if (checked === 0) {
|
||||
console.log("[postinstall] patch-ap-remove-federation-diag: no target files found");
|
||||
} else if (patched === 0) {
|
||||
console.log("[postinstall] patch-ap-remove-federation-diag: already up to date");
|
||||
} else {
|
||||
console.log(`[postinstall] patch-ap-remove-federation-diag: patched ${patched}/${checked} file(s)`);
|
||||
}
|
||||
110
scripts/patch-ap-url-lookup-api-like.mjs
Normal file
110
scripts/patch-ap-url-lookup-api-like.mjs
Normal file
@@ -0,0 +1,110 @@
|
||||
/**
|
||||
* Patch: make the /api/ap-url endpoint return the liked post URL for AP-likes.
|
||||
*
|
||||
* Root cause:
|
||||
* For like posts where like-of is an ActivityPub URL (e.g. a Mastodon status),
|
||||
* the "Also on: Fediverse" widget's authorize_interaction flow needs to send
|
||||
* the user to the original AP object, not to a blog-side Note URL.
|
||||
*
|
||||
* The current handler always returns a /activitypub/objects/note/{id} URL,
|
||||
* which 404s for AP-likes (because jf2ToAS2Activity returns a Like activity,
|
||||
* not a Create(Note), so the Note dispatcher returns null).
|
||||
*
|
||||
* Fix:
|
||||
* Before building the Note/Article URL, check whether the post is an AP-like
|
||||
* (like-of is a URL that responds with application/activity+json). If it is,
|
||||
* return { apUrl: likeOf } so that authorize_interaction opens the original
|
||||
* AP object on the remote instance, where the user can interact with it.
|
||||
*
|
||||
* Non-AP likes (like-of is a plain web URL) fall through to the existing
|
||||
* Note URL logic unchanged.
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const candidates = [
|
||||
"node_modules/@rmdes/indiekit-endpoint-activitypub/index.js",
|
||||
"node_modules/@indiekit/indiekit/node_modules/@rmdes/indiekit-endpoint-activitypub/index.js",
|
||||
];
|
||||
|
||||
const MARKER = "// ap-url-lookup-api-like patch";
|
||||
|
||||
const OLD_SNIPPET = ` // Determine the AP object type (mirrors jf2-to-as2.js logic)
|
||||
const postType = post.properties?.["post-type"];
|
||||
const isArticle = postType === "article" && !!post.properties?.name;
|
||||
const objectType = isArticle ? "article" : "note";`;
|
||||
|
||||
const NEW_SNIPPET = ` // Determine the AP object type (mirrors jf2-to-as2.js logic)
|
||||
const postType = post.properties?.["post-type"];
|
||||
|
||||
// For AP-likes: the widget should open the liked post on the remote instance.
|
||||
// We detect AP URLs the same way as jf2-to-as2.js: HEAD with activity+json Accept.
|
||||
// ap-url-lookup-api-like patch
|
||||
if (postType === "like") {
|
||||
const likeOf = post.properties?.["like-of"] || "";
|
||||
if (likeOf) {
|
||||
let isAp = false;
|
||||
try {
|
||||
const ctrl = new AbortController();
|
||||
const tid = setTimeout(() => ctrl.abort(), 3000);
|
||||
const r = await fetch(likeOf, {
|
||||
method: "HEAD",
|
||||
headers: { Accept: "application/activity+json, application/ld+json" },
|
||||
signal: ctrl.signal,
|
||||
});
|
||||
clearTimeout(tid);
|
||||
const ct = r.headers.get("content-type") || "";
|
||||
isAp = ct.includes("activity+json") || ct.includes("ld+json");
|
||||
} catch { /* network error — treat as non-AP */ }
|
||||
if (isAp) {
|
||||
res.set("Cache-Control", "public, max-age=60");
|
||||
return res.json({ apUrl: likeOf });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const isArticle = postType === "article" && !!post.properties?.name;
|
||||
const objectType = isArticle ? "article" : "note";`;
|
||||
|
||||
async function exists(filePath) {
|
||||
try {
|
||||
await access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let checked = 0;
|
||||
let patched = 0;
|
||||
|
||||
for (const filePath of candidates) {
|
||||
if (!(await exists(filePath))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
checked += 1;
|
||||
let source = await readFile(filePath, "utf8");
|
||||
|
||||
if (source.includes(MARKER)) {
|
||||
continue; // already patched
|
||||
}
|
||||
|
||||
if (!source.includes(OLD_SNIPPET)) {
|
||||
console.log(`[postinstall] patch-ap-url-lookup-api-like: snippet not found in ${filePath}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
source = source.replace(OLD_SNIPPET, NEW_SNIPPET);
|
||||
await writeFile(filePath, source, "utf8");
|
||||
patched += 1;
|
||||
console.log(`[postinstall] Applied patch-ap-url-lookup-api-like to ${filePath}`);
|
||||
}
|
||||
|
||||
if (checked === 0) {
|
||||
console.log("[postinstall] patch-ap-url-lookup-api-like: no target files found");
|
||||
} else if (patched === 0) {
|
||||
console.log("[postinstall] patch-ap-url-lookup-api-like: already up to date");
|
||||
} else {
|
||||
console.log(`[postinstall] patch-ap-url-lookup-api-like: patched ${patched}/${checked} file(s)`);
|
||||
}
|
||||
61
scripts/patch-endpoint-github-contributions-log.mjs
Normal file
61
scripts/patch-endpoint-github-contributions-log.mjs
Normal file
@@ -0,0 +1,61 @@
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const candidates = [
|
||||
"node_modules/@rmdes/indiekit-endpoint-github/lib/controllers/contributions.js",
|
||||
];
|
||||
|
||||
// Marker: present once the patch has already been applied
|
||||
const marker = "// [patched] suppress contributions fallback log";
|
||||
|
||||
const oldLog1 = ` console.log("[contributions] Events API returned no contributions, using Search API");`;
|
||||
const newLog1 = ` // [patched] suppress contributions fallback log`;
|
||||
|
||||
const oldLog2 = ` console.log("[contributions API] Events API returned no contributions, using Search API");`;
|
||||
const newLog2 = ` // [patched] suppress contributions fallback log`;
|
||||
|
||||
async function exists(path) {
|
||||
try {
|
||||
await access(path);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let checked = 0;
|
||||
let patched = 0;
|
||||
|
||||
for (const filePath of candidates) {
|
||||
if (!(await exists(filePath))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
checked += 1;
|
||||
|
||||
const source = await readFile(filePath, "utf8");
|
||||
|
||||
if (source.includes(marker)) {
|
||||
console.log("[postinstall] endpoint-github contributions log already suppressed");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!source.includes(oldLog1) && !source.includes(oldLog2)) {
|
||||
console.log("[postinstall] endpoint-github contributions: unexpected source layout, skipping");
|
||||
continue;
|
||||
}
|
||||
|
||||
const updated = source
|
||||
.replace(oldLog1, newLog1)
|
||||
.replace(oldLog2, newLog2);
|
||||
|
||||
await writeFile(filePath, updated, "utf8");
|
||||
patched += 1;
|
||||
}
|
||||
|
||||
if (checked === 0) {
|
||||
console.log("[postinstall] No endpoint-github contributions file found");
|
||||
} else if (patched > 0) {
|
||||
console.log(
|
||||
`[postinstall] Suppressed contributions fallback log in ${patched} file(s)`,
|
||||
);
|
||||
}
|
||||
89
scripts/patch-syndicate-normalize-syndication-array.mjs
Normal file
89
scripts/patch-syndicate-normalize-syndication-array.mjs
Normal file
@@ -0,0 +1,89 @@
|
||||
/**
|
||||
* Patch: normalize `properties.syndication` to always be an array before
|
||||
* using it in syndicateToTargets().
|
||||
*
|
||||
* Root cause: Micropub's replaceEntries() stores a single-value array as a
|
||||
* plain scalar (JF2 normalization). So after the first successful syndication,
|
||||
* `properties.syndication` in the DB is a string like "https://bsky.app/..."
|
||||
* rather than ["https://bsky.app/..."]. Spreading a string gives individual
|
||||
* characters, so hasSyndicationUrl() never matches and alreadySyndicated is
|
||||
* always false — causing posts to be re-syndicated on every webhook trigger.
|
||||
*
|
||||
* Fix: use [].concat() instead of [...spread] to safely handle both string
|
||||
* and array values.
|
||||
*/
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const candidates = [
|
||||
"node_modules/@indiekit/endpoint-syndicate/lib/utils.js",
|
||||
"node_modules/@rmdes/indiekit-endpoint-syndicate/lib/utils.js",
|
||||
"node_modules/@indiekit/indiekit/node_modules/@indiekit/endpoint-syndicate/lib/utils.js",
|
||||
"node_modules/@indiekit/indiekit/node_modules/@rmdes/indiekit-endpoint-syndicate/lib/utils.js",
|
||||
];
|
||||
|
||||
const marker = "// syndicate-normalize-syndication-array patch";
|
||||
|
||||
// Two replacements needed in the same file.
|
||||
const replacements = [
|
||||
{
|
||||
old: ` let syndicatedUrls = [...(properties.syndication || [])];`,
|
||||
new: ` let syndicatedUrls = [].concat(properties.syndication || []); // syndicate-normalize-syndication-array patch`,
|
||||
},
|
||||
{
|
||||
old: ` const existingSyndication = properties.syndication || [];`,
|
||||
new: ` const existingSyndication = [].concat(properties.syndication || []); // syndicate-normalize-syndication-array patch`,
|
||||
},
|
||||
];
|
||||
|
||||
async function exists(filePath) {
|
||||
try {
|
||||
await access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let checked = 0;
|
||||
let patched = 0;
|
||||
|
||||
for (const filePath of candidates) {
|
||||
if (!(await exists(filePath))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
checked += 1;
|
||||
|
||||
let source = await readFile(filePath, "utf8");
|
||||
|
||||
if (source.includes(marker)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let changed = false;
|
||||
for (const { old: oldSnippet, new: newSnippet } of replacements) {
|
||||
if (!source.includes(oldSnippet)) {
|
||||
console.warn(
|
||||
`[postinstall] Skipping syndicate-normalize-syndication-array patch for ${filePath}: snippet not found: ${oldSnippet.slice(0, 60)}`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
source = source.replace(oldSnippet, newSnippet);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if (changed) {
|
||||
await writeFile(filePath, source, "utf8");
|
||||
patched += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (checked === 0) {
|
||||
console.log("[postinstall] No endpoint-syndicate utils files found");
|
||||
} else if (patched === 0) {
|
||||
console.log("[postinstall] syndicate-normalize-syndication-array patch already applied");
|
||||
} else {
|
||||
console.log(
|
||||
`[postinstall] Patched syndicate-normalize-syndication-array in ${patched} file(s)`,
|
||||
);
|
||||
}
|
||||
@@ -1,25 +1,20 @@
|
||||
/**
|
||||
* Patch @rmdes/indiekit-endpoint-webmention-sender controller to:
|
||||
*
|
||||
* 1. Always fetch the live page instead of using stored post content.
|
||||
* The stored content (post.properties.content.html) is just the post body —
|
||||
* it never contains template-rendered links like u-in-reply-to, u-like-of,
|
||||
* u-bookmark-of, u-repost-of. Only the live HTML has those.
|
||||
* 1. Build synthetic h-entry HTML from stored post properties instead of
|
||||
* fetching the live page. The stored properties already contain all
|
||||
* microformat target URLs (in-reply-to, like-of, bookmark-of, repost-of)
|
||||
* and content.html has inline links — no live page fetch needed.
|
||||
*
|
||||
* 2. Don't permanently mark a post as webmention-sent when the live page
|
||||
* is unreachable (e.g. deploy still in progress). Skip it silently so
|
||||
* the next poll retries it.
|
||||
* This fixes unreliable live fetches caused by internal DNS routing
|
||||
* blog.giersig.eu to the indiekit admin nginx (10.100.0.10) which
|
||||
* returns a login page for post URLs.
|
||||
*
|
||||
* 3. When fetching via an internal URL (nginx reverse proxy), send the public
|
||||
* Host header so nginx can route to the correct virtual host.
|
||||
* Without this, nginx sees the internal IP as Host and may serve the wrong
|
||||
* vhost, returning a page with no .h-entry.
|
||||
* 2. Don't permanently mark a post as webmention-sent when processing
|
||||
* fails. Skip it silently so the next poll retries.
|
||||
*
|
||||
* 4. Log the actual fetchUrl and response preview when h-entry check fails,
|
||||
* so the cause (wrong vhost, indiekit page, etc.) is visible in the logs.
|
||||
*
|
||||
* Handles the original upstream code, the older retry patch, the v1 livefetch
|
||||
* patch, and upgrades v2 → v3 (adds Host header + better diagnostics).
|
||||
* Handles the original upstream code, the older retry patch, and all
|
||||
* prior livefetch patch versions (v1–v4) via full block replacement.
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
@@ -27,9 +22,7 @@ import { access, readFile, writeFile } from "node:fs/promises";
|
||||
const filePath =
|
||||
"node_modules/@rmdes/indiekit-endpoint-webmention-sender/lib/controllers/webmention-sender.js";
|
||||
|
||||
const patchMarker = "// [patched:livefetch:v3]";
|
||||
const v2PatchMarker = "// [patched:livefetch:v2]";
|
||||
const oldPatchMarker = "// [patched:livefetch]";
|
||||
const patchMarker = "// [patched:livefetch:v5]";
|
||||
|
||||
// Original upstream code
|
||||
const originalBlock = ` // If no content, try fetching the published page
|
||||
@@ -51,8 +44,7 @@ const originalBlock = ` // If no content, try fetching the published page
|
||||
continue;
|
||||
}`;
|
||||
|
||||
// State left by older patch-webmention-sender-retry.mjs (which only fixed the
|
||||
// fetch-failure path but not the live-fetch-always path)
|
||||
// State left by older patch-webmention-sender-retry.mjs
|
||||
const retryPatchedBlock = ` // If no content, try fetching the published page
|
||||
let contentToProcess = postContent;
|
||||
let fetchFailed = false;
|
||||
@@ -72,8 +64,6 @@ const retryPatchedBlock = ` // If no content, try fetching the published
|
||||
|
||||
if (!contentToProcess) {
|
||||
if (fetchFailed) {
|
||||
// Page not yet available — skip and retry on next poll rather than
|
||||
// permanently marking this post as sent with zero webmentions.
|
||||
console.log(\`[webmention] Page not yet available for \${postUrl}, will retry next poll\`);
|
||||
continue;
|
||||
}
|
||||
@@ -82,88 +72,30 @@ const retryPatchedBlock = ` // If no content, try fetching the published
|
||||
continue;
|
||||
}`;
|
||||
|
||||
const newBlock = ` // [patched:livefetch:v3] Always fetch the live page so template-rendered links
|
||||
// (u-in-reply-to, u-like-of, u-bookmark-of, u-repost-of, etc.) are included.
|
||||
// Stored content only has the post body, not these microformat links.
|
||||
// Rewrite public URL to internal URL for jailed setups where the server
|
||||
// can't reach its own public HTTPS URL.
|
||||
// Send public Host header on internal fetches so nginx routes to the right vhost.
|
||||
let contentToProcess = "";
|
||||
try {
|
||||
const _wmInternalBase = (() => {
|
||||
if (process.env.INTERNAL_FETCH_URL) return process.env.INTERNAL_FETCH_URL.replace(/\\/+$/, "");
|
||||
const port = process.env.PORT || "3000";
|
||||
return \`http://localhost:\${port}\`;
|
||||
})();
|
||||
const _wmPublicBase = (process.env.PUBLICATION_URL || process.env.SITE_URL || "").replace(/\\/+$/, "");
|
||||
const fetchUrl = (_wmPublicBase && postUrl.startsWith(_wmPublicBase))
|
||||
? _wmInternalBase + postUrl.slice(_wmPublicBase.length)
|
||||
: postUrl;
|
||||
if (fetchUrl !== postUrl) {
|
||||
console.log(\`[webmention] Fetching \${postUrl} via internal URL: \${fetchUrl}\`);
|
||||
}
|
||||
const _ac = new AbortController();
|
||||
const _timeout = setTimeout(() => _ac.abort(), 15000);
|
||||
// When fetching via internal URL (nginx), send the public Host header so
|
||||
// nginx can route to the correct virtual host.
|
||||
// Without this, nginx sees the internal IP as Host and serves the wrong vhost.
|
||||
const _fetchOpts = { signal: _ac.signal };
|
||||
if (fetchUrl !== postUrl && _wmPublicBase) {
|
||||
_fetchOpts.headers = { host: new URL(_wmPublicBase).hostname };
|
||||
}
|
||||
const pageResponse = await fetch(fetchUrl, _fetchOpts);
|
||||
clearTimeout(_timeout);
|
||||
if (pageResponse.ok) {
|
||||
const _html = await pageResponse.text();
|
||||
// Validate the response is a real post page, not an error/502 page.
|
||||
// extractLinks scopes to .h-entry, so if there's no .h-entry the page
|
||||
// is not a valid post (e.g. nginx 502, login redirect, error template).
|
||||
if (_html.includes("h-entry") /* [patched:hentry-syntax] */ || _html.includes("h-entry ")) {
|
||||
contentToProcess = _html;
|
||||
} else {
|
||||
console.log(\`[webmention] Live page for \${postUrl} has no .h-entry — skipping (fetched: \${fetchUrl}, host-sent: \${_fetchOpts.headers?.host ?? "(none)"}, preview: \${_html.slice(0, 200).replace(/[\\n\\r]+/g, " ")})\`);
|
||||
const newBlock = ` // [patched:livefetch:v5] Build synthetic h-entry HTML from stored post properties.
|
||||
// The stored properties already contain all microformat target URLs
|
||||
// (in-reply-to, like-of, bookmark-of, repost-of) and content.html has inline
|
||||
// links — no live page fetch needed, and no exposure to internal DNS issues.
|
||||
const _propLinks = {
|
||||
"in-reply-to": "u-in-reply-to",
|
||||
"like-of": "u-like-of",
|
||||
"bookmark-of": "u-bookmark-of",
|
||||
"repost-of": "u-repost-of",
|
||||
"syndication": "u-syndication",
|
||||
};
|
||||
const _anchors = [];
|
||||
for (const [_prop, _cls] of Object.entries(_propLinks)) {
|
||||
const _vals = post.properties[_prop];
|
||||
if (!_vals) continue;
|
||||
for (const _v of (Array.isArray(_vals) ? _vals : [_vals])) {
|
||||
const _href = (typeof _v === "string") ? _v : (_v?.properties?.url?.[0] ?? _v?.value ?? null);
|
||||
if (_href && /^https?:\\/\\//.test(_href)) {
|
||||
_anchors.push(\`<a class="\${_cls}" href="\${_href}"></a>\`);
|
||||
}
|
||||
} else {
|
||||
console.log(\`[webmention] Live page returned \${pageResponse.status} for \${fetchUrl}\`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(\`[webmention] Could not fetch live page for \${postUrl}: \${error.message}\`);
|
||||
}
|
||||
|
||||
if (!contentToProcess) {
|
||||
// Live page missing or invalid — skip without marking sent so the next
|
||||
// poll retries. Don't fall back to stored content because it lacks the
|
||||
// template-rendered microformat links we need.
|
||||
console.log(\`[webmention] No valid page for \${postUrl}, will retry next poll\`);
|
||||
continue;
|
||||
}`;
|
||||
|
||||
// Lines changed in v2 → v3: fetch call + log message.
|
||||
// Match just the fetch call so we can upgrade without re-matching the whole block.
|
||||
const v2FetchLine = ` const pageResponse = await fetch(fetchUrl, { signal: _ac.signal });`;
|
||||
const v3FetchLines = ` // When fetching via internal URL (nginx), send the public Host header so
|
||||
// nginx can route to the correct virtual host.
|
||||
// Without this, nginx sees the internal IP as Host and serves the wrong vhost.
|
||||
const _fetchOpts = { signal: _ac.signal };
|
||||
if (fetchUrl !== postUrl && _wmPublicBase) {
|
||||
_fetchOpts.headers = { host: new URL(_wmPublicBase).hostname };
|
||||
}
|
||||
const pageResponse = await fetch(fetchUrl, _fetchOpts);`;
|
||||
|
||||
const v2DiagLine = ` console.log(\`[webmention] Live page for \${postUrl} has no .h-entry — skipping (error page?)\`);`;
|
||||
const v3DiagLine = ` console.log(\`[webmention] Live page for \${postUrl} has no .h-entry — skipping (fetched: \${fetchUrl}, host-sent: \${_fetchOpts.headers?.host ?? "(none)"}, preview: \${_html.slice(0, 200).replace(/[\\n\\r]+/g, " ")})\`);`;
|
||||
|
||||
const v2FetchUrlLog = ` const fetchUrl = (_wmPublicBase && postUrl.startsWith(_wmPublicBase))
|
||||
? _wmInternalBase + postUrl.slice(_wmPublicBase.length)
|
||||
: postUrl;
|
||||
const _ac = new AbortController();`;
|
||||
const v3FetchUrlLog = ` const fetchUrl = (_wmPublicBase && postUrl.startsWith(_wmPublicBase))
|
||||
? _wmInternalBase + postUrl.slice(_wmPublicBase.length)
|
||||
: postUrl;
|
||||
if (fetchUrl !== postUrl) {
|
||||
console.log(\`[webmention] Fetching \${postUrl} via internal URL: \${fetchUrl}\`);
|
||||
}
|
||||
const _ac = new AbortController();`;
|
||||
const _bodyHtml = post.properties.content?.html || post.properties.content?.value || "";
|
||||
const contentToProcess = \`<div class="h-entry">\${_anchors.join("")}\${_bodyHtml ? \`<div class="e-content">\${_bodyHtml}</div>\` : ""}</div>\`;`;
|
||||
|
||||
async function exists(p) {
|
||||
try {
|
||||
@@ -182,37 +114,28 @@ if (!(await exists(filePath))) {
|
||||
const source = await readFile(filePath, "utf8");
|
||||
|
||||
if (source.includes(patchMarker)) {
|
||||
console.log("[patch-webmention-sender-livefetch] Already patched (v3)");
|
||||
console.log("[patch-webmention-sender-livefetch] Already patched (v5)");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Upgrade v2 → v3: apply targeted line replacements within the existing v2 block.
|
||||
if (source.includes(v2PatchMarker)) {
|
||||
let upgraded = source
|
||||
.replace(v2PatchMarker, patchMarker)
|
||||
.replace(v2FetchUrlLog, v3FetchUrlLog)
|
||||
.replace(v2FetchLine, v3FetchLines)
|
||||
.replace(v2DiagLine, v3DiagLine);
|
||||
// For v1–v4: extract the old patched block by finding the marker and the
|
||||
// closing "continue;\n }" that ends the if (!contentToProcess) block.
|
||||
const priorMarkers = [
|
||||
"// [patched:livefetch:v4]",
|
||||
"// [patched:livefetch:v3]",
|
||||
"// [patched:livefetch:v2]",
|
||||
"// [patched:livefetch]",
|
||||
];
|
||||
|
||||
if (!upgraded.includes(patchMarker)) {
|
||||
console.warn("[patch-webmention-sender-livefetch] v2→v3 upgrade validation failed, skipping");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
await writeFile(filePath, upgraded, "utf8");
|
||||
console.log("[patch-webmention-sender-livefetch] Upgraded v2 → v3 (Host header + diagnostics)");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// If old v1 patch is applied, we need to replace it with v3.
|
||||
// Extract the old patched block by matching from its marker to the "continue;" that ends it.
|
||||
let oldPatchBlock = null;
|
||||
if (source.includes(oldPatchMarker) && !source.includes(v2PatchMarker)) {
|
||||
const startIdx = source.lastIndexOf(" // [patched:livefetch]");
|
||||
for (const marker of priorMarkers) {
|
||||
if (!source.includes(marker)) continue;
|
||||
const startIdx = source.lastIndexOf(` ${marker}`);
|
||||
const endMarker = " continue;\n }";
|
||||
const endSearch = source.indexOf(endMarker, startIdx);
|
||||
if (startIdx !== -1 && endSearch !== -1) {
|
||||
oldPatchBlock = source.slice(startIdx, endSearch + endMarker.length);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -239,4 +162,4 @@ if (!patched.includes(patchMarker)) {
|
||||
}
|
||||
|
||||
await writeFile(filePath, patched, "utf8");
|
||||
console.log("[patch-webmention-sender-livefetch] Patched successfully (v3)");
|
||||
console.log("[patch-webmention-sender-livefetch] Patched successfully (v5)");
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
import { MongoClient } from "mongodb";
|
||||
import config from "../indiekit.config.mjs";
|
||||
|
||||
const MIGRATION_ID = "webmention-sender-reset-stale-v9";
|
||||
const MIGRATION_ID = "webmention-sender-reset-stale-v10";
|
||||
|
||||
const mongodbUrl = config.application?.mongodbUrl;
|
||||
if (!mongodbUrl) {
|
||||
|
||||
@@ -91,9 +91,9 @@ for (const filePath of candidates) {
|
||||
}
|
||||
|
||||
if (!source.includes(oldSnippet)) {
|
||||
// livefetch v2 replaces the same block — this patch is intentionally superseded.
|
||||
if (source.includes("[patched:livefetch:v2]")) {
|
||||
continue; // silently skip; livefetch v2 is a superset of this patch
|
||||
// Any livefetch version replaces the same block — this patch is superseded.
|
||||
if (/\[patched:livefetch(?::v\d+)?\]/.test(source)) {
|
||||
continue; // silently skip; livefetch is a superset of this patch
|
||||
}
|
||||
console.log(`[patch] webmention-sender-retry: target snippet not found in ${filePath} (package updated?)`);
|
||||
continue;
|
||||
|
||||
Reference in New Issue
Block a user