fix: Mastodon API status creation — links, CW, timeline timing

- Provide content as {text, html} with linkified URLs (Micropub's
  markdown-it doesn't have linkify enabled)
- Use content-warning field (not summary) to match native reader and
  AP syndicator expectations
- Remove premature addTimelineItem — post appears in timeline after
  syndication round-trip, not immediately
- Remove processStatusContent (unused after addTimelineItem removal)
- Remove addTimelineItem import
This commit is contained in:
Ricardo
2026-03-26 15:33:38 +01:00
parent 80ef9bca11
commit 1bfeabeaf3
2 changed files with 61 additions and 126 deletions

View File

@@ -21,7 +21,6 @@ import {
boostPost, unboostPost,
bookmarkPost, unbookmarkPost,
} from "../helpers/interactions.js";
import { addTimelineItem } from "../../storage/timeline.js";
import { tokenRequired } from "../middleware/token-required.js";
import { scopeRequired } from "../middleware/scope-required.js";
@@ -166,130 +165,105 @@ router.post("/api/v1/statuses", tokenRequired, scopeRequired("write", "write:sta
}
}
// Build JF2 properties for the Micropub pipeline
// Build JF2 properties for the Micropub pipeline.
// Provide both text and html — linkify URLs since Micropub's markdown-it
// doesn't have linkify enabled. Mentions are preserved as plain text;
// the AP syndicator resolves them via WebFinger for federation delivery.
const contentText = statusText || "";
const contentHtml = contentText
.replace(/&/g, "&")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/(https?:\/\/[^\s<>&"')\]]+)/g, '<a href="$1">$1</a>')
.replace(/\n/g, "<br>");
const jf2 = {
type: "entry",
content: statusText || "",
content: { text: contentText, html: `<p>${contentHtml}</p>` },
};
if (inReplyTo) {
jf2["in-reply-to"] = inReplyTo;
}
if (spoilerText) {
jf2.summary = spoilerText;
}
if (sensitive === true || sensitive === "true") {
jf2.sensitive = "true";
}
if (visibility && visibility !== "public") {
jf2.visibility = visibility;
}
// Use content-warning (not summary) to match native reader behavior
if (spoilerText) {
jf2["content-warning"] = spoilerText;
jf2.sensitive = "true";
}
if (language) {
jf2["mp-language"] = language;
}
// Syndicate to AP only — posts from Mastodon clients belong to the fediverse.
// Never cross-post to Bluesky (conversations stay in their protocol).
// The publication URL is the AP syndicator's uid.
// Syndicate to AP — posts from Mastodon clients belong to the fediverse
const publicationUrl = pluginOptions.publicationUrl || baseUrl;
jf2["mp-syndicate-to"] = [publicationUrl.replace(/\/$/, "") + "/"];
// Create post via Micropub pipeline (same functions the Micropub endpoint uses)
// postData.create() handles: normalization, post type detection, path rendering,
// mp-syndicate-to validated against configured syndicators, MongoDB posts collection
// Create post via Micropub pipeline (same internal functions)
const { postData } = await import("@indiekit/endpoint-micropub/lib/post-data.js");
const { postContent } = await import("@indiekit/endpoint-micropub/lib/post-content.js");
const data = await postData.create(application, publication, jf2);
// postContent.create() handles: template rendering, file creation in store
await postContent.create(publication, data);
const postUrl = data.properties.url;
console.info(`[Mastodon API] Created post via Micropub: ${postUrl}`);
// Add to ap_timeline so the post is visible in the Mastodon Client API
// Return a minimal status to the Mastodon client.
// No timeline entry is created here — the post will appear in the timeline
// after the normal flow: Eleventy rebuild → syndication webhook → AP delivery.
const profile = await collections.ap_profile.findOne({});
const handle = pluginOptions.handle || "user";
const actorUrl = profile?.url || `${publicationUrl}/users/${handle}`;
// Extract hashtags from status text and merge with any Micropub categories
const categories = data.properties.category || [];
const inlineHashtags = (statusText || "").match(/(?:^|\s)#([a-zA-Z_]\w*)/g);
if (inlineHashtags) {
const existing = new Set(categories.map((c) => c.toLowerCase()));
for (const match of inlineHashtags) {
const tag = match.trim().slice(1).toLowerCase();
if (!existing.has(tag)) {
existing.add(tag);
categories.push(tag);
}
}
}
// Resolve relative media URLs to absolute
const resolveMedia = (items) => {
if (!items || !items.length) return [];
return items.map((item) => {
if (typeof item === "string") {
return item.startsWith("http") ? item : `${publicationUrl.replace(/\/$/, "")}/${item.replace(/^\//, "")}`;
}
if (item?.url && !item.url.startsWith("http")) {
return { ...item, url: `${publicationUrl.replace(/\/$/, "")}/${item.url.replace(/^\//, "")}` };
}
return item;
});
};
// Process content: linkify URLs and extract @mentions
const rawContent = data.properties.content || { text: statusText || "", html: "" };
const processedContent = processStatusContent(rawContent, statusText || "");
const mentions = extractMentions(statusText || "");
const now = new Date().toISOString();
const timelineItem = await addTimelineItem(collections, {
uid: postUrl,
res.json({
id: String(Date.now()),
created_at: new Date().toISOString(),
content: `<p>${contentHtml}</p>`,
url: postUrl,
type: data.properties["post-type"] || "note",
content: processedContent,
summary: spoilerText || "",
sensitive: sensitive === true || sensitive === "true",
uri: postUrl,
visibility: visibility || "public",
sensitive: sensitive === true || sensitive === "true",
spoiler_text: spoilerText || "",
in_reply_to_id: inReplyToId || null,
in_reply_to_account_id: null,
language: language || null,
inReplyTo,
published: data.properties.published || now,
createdAt: now,
author: {
name: profile?.name || handle,
replies_count: 0,
reblogs_count: 0,
favourites_count: 0,
favourited: false,
reblogged: false,
bookmarked: false,
account: {
id: "owner",
username: handle,
acct: handle,
display_name: profile?.name || handle,
url: profile?.url || publicationUrl,
photo: profile?.icon || "",
handle: `@${handle}`,
avatar: profile?.icon || "",
avatar_static: profile?.icon || "",
header: "",
header_static: "",
followers_count: 0,
following_count: 0,
statuses_count: 0,
emojis: [],
bot: false,
fields: [],
},
photo: resolveMedia(data.properties.photo || []),
video: resolveMedia(data.properties.video || []),
audio: resolveMedia(data.properties.audio || []),
category: categories,
counts: { replies: 0, boosts: 0, likes: 0 },
linkPreviews: [],
mentions,
media_attachments: [],
mentions: extractMentions(contentText).map(m => ({
id: "0",
username: m.name.split("@")[1] || m.name,
acct: m.name.replace(/^@/, ""),
url: m.url,
})),
tags: [],
emojis: [],
});
// Serialize and return
const serialized = serializeStatus(timelineItem, {
baseUrl,
favouritedIds: new Set(),
rebloggedIds: new Set(),
bookmarkedIds: new Set(),
pinnedIds: new Set(),
});
res.json(serialized);
} catch (error) {
next(error);
}
@@ -604,45 +578,6 @@ async function loadItemInteractions(collections, item) {
return { favouritedIds, rebloggedIds, bookmarkedIds };
}
/**
* Process status content: linkify bare URLs and convert @mentions to links.
*
* Mastodon clients send plain text — the server is responsible for
* converting URLs and mentions into HTML links.
*
* @param {object} content - { text, html } from Micropub pipeline
* @param {string} rawText - Original status text from client
* @returns {object} { text, html } with linkified content
*/
function processStatusContent(content, rawText) {
let html = content.html || content.text || rawText || "";
// If the HTML is just plain text wrapped in <p>, process it
// Don't touch HTML that already has links (from Micropub rendering)
if (!html.includes("<a ")) {
// Linkify bare URLs (http/https)
html = html.replace(
/(https?:\/\/[^\s<>"')\]]+)/g,
'<a href="$1" rel="nofollow noopener noreferrer" target="_blank">$1</a>',
);
// Convert @user@domain mentions to profile links
html = html.replace(
/(?:^|\s)(@([a-zA-Z0-9_]+)@([a-zA-Z0-9.-]+\.[a-zA-Z]{2,}))/g,
(match, full, username, domain) =>
match.replace(
full,
`<span class="h-card"><a href="https://${domain}/@${username}" class="u-url mention" rel="nofollow noopener noreferrer" target="_blank">@${username}@${domain}</a></span>`,
),
);
}
return {
text: content.text || rawText || "",
html,
};
}
/**
* Extract @user@domain mentions from text into mention objects.
*