Gérer le statut draft pour Lemmy et la météo
This commit is contained in:
@@ -6,6 +6,7 @@ const { extractRawDate, readFrontmatter, writeFrontmatter } = require("./lib/wea
|
||||
const { resolveArticleDate } = require("./lib/weather/time");
|
||||
const { fetchWeather, hasConfiguredProvider, mergeWeather } = require("./lib/weather/providers");
|
||||
const { loadWeatherConfig } = require("./lib/weather/config");
|
||||
const { isEffectivelyPublishedDocument } = require("./lib/publication");
|
||||
|
||||
const CONTENT_ROOT = path.resolve("content");
|
||||
|
||||
@@ -16,6 +17,10 @@ async function processFile(filePath, config, { force = false } = {}) {
|
||||
return { status: "no-frontmatter" };
|
||||
}
|
||||
|
||||
if (isEffectivelyPublishedDocument(frontmatter.doc) === false) {
|
||||
return { status: "draft" };
|
||||
}
|
||||
|
||||
let existingWeather = null;
|
||||
if (frontmatter.doc.has("weather")) {
|
||||
existingWeather = frontmatter.doc.get("weather");
|
||||
@@ -124,6 +129,10 @@ async function main() {
|
||||
updated += 1;
|
||||
console.log(`• Added empty weather to ${relativePath}`);
|
||||
break;
|
||||
case "draft":
|
||||
skipped += 1;
|
||||
console.log(`• Skipped draft article ${relativePath}`);
|
||||
break;
|
||||
default:
|
||||
skipped += 1;
|
||||
}
|
||||
|
||||
71
tools/lib/publication.js
Normal file
71
tools/lib/publication.js
Normal file
@@ -0,0 +1,71 @@
|
||||
/**
|
||||
* Interprète une valeur booléenne potentiellement sérialisée.
|
||||
* @param {unknown} value Valeur brute issue du frontmatter.
|
||||
* @returns {boolean|null} true/false si interprétable, sinon null.
|
||||
*/
|
||||
function parseBoolean(value) {
|
||||
if (typeof value === "boolean") {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (typeof value !== "string") {
|
||||
return null;
|
||||
}
|
||||
|
||||
const normalized = value.trim().toLowerCase();
|
||||
if (!normalized) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (normalized === "true" || normalized === "1" || normalized === "yes" || normalized === "on") {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (normalized === "false" || normalized === "0" || normalized === "no" || normalized === "off") {
|
||||
return false;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Détermine si la valeur `draft` représente un brouillon.
|
||||
* @param {unknown} value Valeur brute de l'attribut `draft`.
|
||||
* @returns {boolean} true si l'article est un brouillon.
|
||||
*/
|
||||
function isDraftValue(value) {
|
||||
return parseBoolean(value) === true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indique si un frontmatter objet correspond à un article publié.
|
||||
* @param {Record<string, unknown>|null|undefined} frontmatterData Données frontmatter sérialisées.
|
||||
* @returns {boolean} true si l'article est considéré comme publié.
|
||||
*/
|
||||
function isEffectivelyPublished(frontmatterData) {
|
||||
if (!frontmatterData || typeof frontmatterData !== "object") {
|
||||
return true;
|
||||
}
|
||||
|
||||
return isDraftValue(frontmatterData.draft) === false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indique si un document YAML frontmatter correspond à un article publié.
|
||||
* @param {{ get: (key: string) => unknown }|null|undefined} doc Document YAML.
|
||||
* @returns {boolean} true si l'article est considéré comme publié.
|
||||
*/
|
||||
function isEffectivelyPublishedDocument(doc) {
|
||||
if (!doc || typeof doc.get !== "function") {
|
||||
return true;
|
||||
}
|
||||
|
||||
return isDraftValue(doc.get("draft")) === false;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseBoolean,
|
||||
isDraftValue,
|
||||
isEffectivelyPublished,
|
||||
isEffectivelyPublishedDocument,
|
||||
};
|
||||
@@ -6,6 +6,7 @@ const { Pool } = require("pg");
|
||||
const { loadEnv } = require("./lib/env");
|
||||
const { loadToolsConfig } = require("./lib/config");
|
||||
const { readFrontmatterFile } = require("./lib/frontmatter");
|
||||
const { isEffectivelyPublished } = require("./lib/publication");
|
||||
const {
|
||||
resolveBundlePath,
|
||||
ensureBundleExists,
|
||||
@@ -93,6 +94,10 @@ async function main() {
|
||||
* @param {string} bundleDir Chemin du bundle après déplacement.
|
||||
*/
|
||||
async function updateLemmyIfNeeded(frontmatterData, bundleDir) {
|
||||
if (isEffectivelyPublished(frontmatterData) === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
const commentsUrl = extractCommentsUrl(frontmatterData);
|
||||
if (!commentsUrl) {
|
||||
return;
|
||||
|
||||
@@ -5,14 +5,18 @@ const fs = require("node:fs");
|
||||
const path = require("node:path");
|
||||
const sharp = require("sharp");
|
||||
const { LemmyHttp } = require("lemmy-js-client");
|
||||
const { Pool } = require("pg");
|
||||
const { collectBundles } = require("./lib/content");
|
||||
const { loadToolsConfig } = require("./lib/config");
|
||||
const { parseFrontmatterDate } = require("./lib/datetime");
|
||||
const { loadEnv } = require("./lib/env");
|
||||
const { readFrontmatterFile, writeFrontmatterFile } = require("./lib/frontmatter");
|
||||
const { isEffectivelyPublished } = require("./lib/publication");
|
||||
|
||||
const CONTENT_ROOT = path.join(__dirname, "..", "content");
|
||||
const FRONTMATTER_COMMENT_FIELD = "comments_url";
|
||||
const FRONTMATTER_COVER_FIELD = "cover";
|
||||
const DEFAULT_DATABASE_URL = "postgres:///lemmy?host=/run/postgresql&user=richard";
|
||||
const MAX_COMMUNITY_NAME_LENGTH = 20;
|
||||
const MIN_COMMUNITY_NAME_LENGTH = 3;
|
||||
const MAX_THUMBNAIL_WIDTH = 320;
|
||||
@@ -35,10 +39,12 @@ main().then(
|
||||
* Point d'entrée principal : charge la configuration, collecte les articles et orchestre la synchronisation.
|
||||
*/
|
||||
async function main() {
|
||||
loadEnv();
|
||||
const toolsConfig = await loadToolsConfig(path.join(__dirname, "config", "config.json"));
|
||||
const lemmyConfig = normalizeLemmyConfig(toolsConfig.lemmy);
|
||||
const client = await createLemmyClient(lemmyConfig);
|
||||
const bundles = await collectBundles(CONTENT_ROOT);
|
||||
await purgeDraftPosts(bundles, lemmyConfig, client);
|
||||
console.log("Vérification des communautés Lemmy pour les fils existants…");
|
||||
await ensureRestrictedCommunitiesForExistingThreads(bundles, lemmyConfig, client);
|
||||
const articles = selectArticles(bundles);
|
||||
@@ -128,6 +134,274 @@ async function createLemmyClient(lemmyConfig) {
|
||||
return client;
|
||||
}
|
||||
|
||||
/**
|
||||
* Purge les posts Lemmy liés aux articles en brouillon.
|
||||
* Règles appliquées :
|
||||
* - un brouillon n'est jamais synchronisé ;
|
||||
* - si un post existe déjà sur Lemmy, il est supprimé physiquement de la base ;
|
||||
* - le champ comments_url est retiré du frontmatter du brouillon.
|
||||
* @param {Array<object>} bundles Bundles Hugo collectés.
|
||||
* @param {object} lemmyConfig Configuration Lemmy.
|
||||
* @param {LemmyHttp} client Client Lemmy authentifié.
|
||||
* @returns {Promise<void>} Promesse résolue une fois la purge terminée.
|
||||
*/
|
||||
async function purgeDraftPosts(bundles, lemmyConfig, client) {
|
||||
const draftArticles = collectDraftArticles(bundles, lemmyConfig);
|
||||
if (draftArticles.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const postIds = new Set();
|
||||
let cleanedFrontmatters = 0;
|
||||
|
||||
for (const article of draftArticles) {
|
||||
const commentsUrl = extractCommentsUrl(article.frontmatter.data);
|
||||
const postId = extractPostId(commentsUrl);
|
||||
if (postId !== null) {
|
||||
postIds.add(postId);
|
||||
}
|
||||
|
||||
if (article.title) {
|
||||
const matchingIds = await searchDraftPostsByTitle(
|
||||
client,
|
||||
article.title,
|
||||
article.articleUrl,
|
||||
lemmyConfig.siteUrl
|
||||
);
|
||||
for (const matchingId of matchingIds) {
|
||||
postIds.add(matchingId);
|
||||
}
|
||||
} else {
|
||||
console.warn(`⚠️ ${article.bundle.relativePath} : titre manquant, recherche Lemmy par titre ignorée.`);
|
||||
}
|
||||
|
||||
if (clearDraftCommentsUrl(article)) {
|
||||
cleanedFrontmatters += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const targetIds = Array.from(postIds.values());
|
||||
let deletedCount = 0;
|
||||
|
||||
if (targetIds.length > 0) {
|
||||
const pool = new Pool({ connectionString: resolveDatabaseUrl() });
|
||||
deletedCount = await deletePostsPermanently(pool, targetIds);
|
||||
await pool.end();
|
||||
}
|
||||
|
||||
if (deletedCount > 0 || cleanedFrontmatters > 0) {
|
||||
console.log(
|
||||
`🧹 Brouillons Lemmy : ${deletedCount} post(s) supprimé(s), ${cleanedFrontmatters} frontmatter(s) nettoyé(s).`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Construit la liste des articles en brouillon.
|
||||
* @param {Array<object>} bundles Bundles Hugo collectés.
|
||||
* @param {object} lemmyConfig Configuration Lemmy.
|
||||
* @returns {Array<object>} Brouillons accompagnés de leur contexte Lemmy.
|
||||
*/
|
||||
function collectDraftArticles(bundles, lemmyConfig) {
|
||||
const drafts = [];
|
||||
|
||||
for (const bundle of bundles) {
|
||||
const frontmatter = readFrontmatterFile(bundle.indexPath);
|
||||
if (!frontmatter) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isEffectivelyPublished(frontmatter.data)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let title = "";
|
||||
if (typeof frontmatter.data?.title === "string") {
|
||||
title = frontmatter.data.title.trim();
|
||||
}
|
||||
|
||||
drafts.push({
|
||||
bundle,
|
||||
frontmatter,
|
||||
title,
|
||||
articleUrl: buildArticleUrl(lemmyConfig.siteUrl, bundle.parts),
|
||||
});
|
||||
}
|
||||
|
||||
return drafts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recherche les posts Lemmy correspondant au titre et à l'URL d'un brouillon.
|
||||
* @param {LemmyHttp} client Client Lemmy.
|
||||
* @param {string} title Titre Hugo du brouillon.
|
||||
* @param {string} articleUrl URL publique de l'article Hugo.
|
||||
* @param {string} siteUrl URL racine du site Hugo.
|
||||
* @returns {Promise<number[]>} Identifiants de posts correspondants.
|
||||
*/
|
||||
async function searchDraftPostsByTitle(client, title, articleUrl, siteUrl) {
|
||||
const response = await client.search({
|
||||
q: title,
|
||||
type_: "Posts",
|
||||
limit: 50,
|
||||
});
|
||||
|
||||
if (!response.posts || response.posts.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const normalizedArticleUrl = normalizeUrl(articleUrl);
|
||||
const normalizedSiteUrl = normalizeUrl(siteUrl);
|
||||
const matches = [];
|
||||
|
||||
for (const postView of response.posts) {
|
||||
const postTitle = readPostTitle(postView);
|
||||
if (postTitle !== title) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const postUrl = readPostUrl(postView);
|
||||
if (!postUrl) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const normalizedPostUrl = normalizeUrl(postUrl);
|
||||
if (!normalizedPostUrl) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (normalizedPostUrl === normalizedArticleUrl) {
|
||||
matches.push(postView.post.id);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (normalizedSiteUrl && normalizedPostUrl.startsWith(`${normalizedSiteUrl}/`)) {
|
||||
matches.push(postView.post.id);
|
||||
}
|
||||
}
|
||||
|
||||
return matches;
|
||||
}
|
||||
|
||||
/**
|
||||
* Supprime définitivement les posts Lemmy ciblés depuis la base.
|
||||
* @param {Pool} pool Pool Postgres.
|
||||
* @param {number[]} postIds Identifiants de posts à supprimer.
|
||||
* @returns {Promise<number>} Nombre de posts affectés.
|
||||
*/
|
||||
async function deletePostsPermanently(pool, postIds) {
|
||||
const query = "delete from post where id = any($1::int[])";
|
||||
const result = await pool.query(query, [postIds]);
|
||||
return result.rowCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extrait un titre de post Lemmy nettoyé.
|
||||
* @param {object} postView Résultat de recherche Lemmy.
|
||||
* @returns {string} Titre nettoyé.
|
||||
*/
|
||||
function readPostTitle(postView) {
|
||||
if (!postView || !postView.post) {
|
||||
return "";
|
||||
}
|
||||
|
||||
if (typeof postView.post.name !== "string") {
|
||||
return "";
|
||||
}
|
||||
|
||||
return postView.post.name.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extrait l'URL d'un post Lemmy nettoyée.
|
||||
* @param {object} postView Résultat de recherche Lemmy.
|
||||
* @returns {string} URL nettoyée.
|
||||
*/
|
||||
function readPostUrl(postView) {
|
||||
if (!postView || !postView.post) {
|
||||
return "";
|
||||
}
|
||||
|
||||
if (typeof postView.post.url !== "string") {
|
||||
return "";
|
||||
}
|
||||
|
||||
return postView.post.url.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Nettoie comments_url d'un brouillon pour empêcher toute synchronisation future.
|
||||
* @param {object} article Brouillon collecté.
|
||||
* @returns {boolean} true si le frontmatter a été modifié.
|
||||
*/
|
||||
function clearDraftCommentsUrl(article) {
|
||||
const commentsUrl = extractCommentsUrl(article.frontmatter.data);
|
||||
if (!commentsUrl) {
|
||||
return false;
|
||||
}
|
||||
|
||||
delete article.frontmatter.data[FRONTMATTER_COMMENT_FIELD];
|
||||
writeFrontmatterFile(article.bundle.indexPath, article.frontmatter.data, article.frontmatter.body);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extrait la valeur comments_url depuis un frontmatter sérialisé.
|
||||
* @param {Record<string, unknown>|null|undefined} frontmatterData Données frontmatter.
|
||||
* @returns {string} URL nettoyée ou chaîne vide.
|
||||
*/
|
||||
function extractCommentsUrl(frontmatterData) {
|
||||
if (!frontmatterData || typeof frontmatterData !== "object") {
|
||||
return "";
|
||||
}
|
||||
|
||||
if (typeof frontmatterData[FRONTMATTER_COMMENT_FIELD] !== "string") {
|
||||
return "";
|
||||
}
|
||||
|
||||
return frontmatterData[FRONTMATTER_COMMENT_FIELD].trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extrait l'identifiant numérique d'un comments_url Lemmy.
|
||||
* @param {string} url URL issue du frontmatter.
|
||||
* @returns {number|null} Identifiant ou null si non reconnu.
|
||||
*/
|
||||
function extractPostId(url) {
|
||||
if (typeof url !== "string") {
|
||||
return null;
|
||||
}
|
||||
|
||||
const trimmed = url.trim();
|
||||
if (!trimmed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const normalized = trimmed.replace(/\/+$/, "");
|
||||
const match = normalized.match(/\/(?:post|c\/[^/]+\/post)\/(\d+)(?:$|\?)/i);
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return Number.parseInt(match[1], 10);
|
||||
}
|
||||
|
||||
/**
|
||||
* Détermine l'URL de connexion Postgres.
|
||||
* @returns {string} Chaîne de connexion.
|
||||
*/
|
||||
function resolveDatabaseUrl() {
|
||||
if (typeof process.env.LEMMY_DATABASE_URL === "string" && process.env.LEMMY_DATABASE_URL.trim()) {
|
||||
return process.env.LEMMY_DATABASE_URL.trim();
|
||||
}
|
||||
|
||||
if (typeof process.env.DATABASE_URL === "string" && process.env.DATABASE_URL.trim()) {
|
||||
return process.env.DATABASE_URL.trim();
|
||||
}
|
||||
|
||||
return DEFAULT_DATABASE_URL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prépare la liste des articles à synchroniser : frontmatter présent, date valide, comments_url absent.
|
||||
* Le tri est effectué par date croissante, puis par chemin en cas d'égalité.
|
||||
@@ -144,10 +418,11 @@ function selectArticles(bundles) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const existingComments =
|
||||
typeof frontmatter.data?.[FRONTMATTER_COMMENT_FIELD] === "string"
|
||||
? frontmatter.data[FRONTMATTER_COMMENT_FIELD].trim()
|
||||
: "";
|
||||
if (isEffectivelyPublished(frontmatter.data) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const existingComments = extractCommentsUrl(frontmatter.data);
|
||||
if (existingComments) {
|
||||
continue;
|
||||
}
|
||||
@@ -205,10 +480,11 @@ async function ensureRestrictedCommunitiesForExistingThreads(bundles, lemmyConfi
|
||||
continue;
|
||||
}
|
||||
|
||||
const existingComments =
|
||||
typeof frontmatter.data?.[FRONTMATTER_COMMENT_FIELD] === "string"
|
||||
? frontmatter.data[FRONTMATTER_COMMENT_FIELD].trim()
|
||||
: "";
|
||||
if (isEffectivelyPublished(frontmatter.data) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const existingComments = extractCommentsUrl(frontmatter.data);
|
||||
if (!existingComments) {
|
||||
continue;
|
||||
}
|
||||
|
||||
63
tools/tests/publication.test.js
Normal file
63
tools/tests/publication.test.js
Normal file
@@ -0,0 +1,63 @@
|
||||
const test = require("node:test");
|
||||
const assert = require("node:assert/strict");
|
||||
const {
|
||||
parseBoolean,
|
||||
isDraftValue,
|
||||
isEffectivelyPublished,
|
||||
isEffectivelyPublishedDocument,
|
||||
} = require("../lib/publication");
|
||||
|
||||
test("parseBoolean converts common boolean representations", () => {
|
||||
assert.equal(parseBoolean(true), true);
|
||||
assert.equal(parseBoolean(false), false);
|
||||
assert.equal(parseBoolean("true"), true);
|
||||
assert.equal(parseBoolean("TRUE"), true);
|
||||
assert.equal(parseBoolean("1"), true);
|
||||
assert.equal(parseBoolean("on"), true);
|
||||
assert.equal(parseBoolean("false"), false);
|
||||
assert.equal(parseBoolean("0"), false);
|
||||
assert.equal(parseBoolean("off"), false);
|
||||
assert.equal(parseBoolean(""), null);
|
||||
assert.equal(parseBoolean("unknown"), null);
|
||||
assert.equal(parseBoolean(1), null);
|
||||
});
|
||||
|
||||
test("isDraftValue returns true only for explicit draft values", () => {
|
||||
assert.equal(isDraftValue(true), true);
|
||||
assert.equal(isDraftValue("true"), true);
|
||||
assert.equal(isDraftValue("yes"), true);
|
||||
assert.equal(isDraftValue(false), false);
|
||||
assert.equal(isDraftValue("false"), false);
|
||||
assert.equal(isDraftValue(undefined), false);
|
||||
});
|
||||
|
||||
test("isEffectivelyPublished excludes draft frontmatter", () => {
|
||||
assert.equal(isEffectivelyPublished({ draft: true }), false);
|
||||
assert.equal(isEffectivelyPublished({ draft: "true" }), false);
|
||||
assert.equal(isEffectivelyPublished({ draft: false }), true);
|
||||
assert.equal(isEffectivelyPublished({ title: "Article" }), true);
|
||||
assert.equal(isEffectivelyPublished(null), true);
|
||||
});
|
||||
|
||||
test("isEffectivelyPublishedDocument supports YAML-like get()", () => {
|
||||
const docDraft = {
|
||||
get(key) {
|
||||
if (key === "draft") {
|
||||
return true;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
};
|
||||
const docPublished = {
|
||||
get(key) {
|
||||
if (key === "draft") {
|
||||
return false;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
||||
assert.equal(isEffectivelyPublishedDocument(docDraft), false);
|
||||
assert.equal(isEffectivelyPublishedDocument(docPublished), true);
|
||||
assert.equal(isEffectivelyPublishedDocument(null), true);
|
||||
});
|
||||
@@ -24,6 +24,7 @@ const { parseFrontmatterDate } = require("./lib/datetime");
|
||||
const { readFrontmatterFile } = require("./lib/frontmatter");
|
||||
const { loadEnv } = require("./lib/env");
|
||||
const { loadToolsConfig } = require("./lib/config");
|
||||
const { isEffectivelyPublished } = require("./lib/publication");
|
||||
|
||||
const CONTENT_ROOT = path.join(__dirname, "..", "content");
|
||||
const DEFAULT_DATABASE_URL = "postgres:///lemmy?host=/run/postgresql&user=richard";
|
||||
@@ -207,6 +208,9 @@ function collectArticlesWithPostId(bundles) {
|
||||
if (!frontmatter) {
|
||||
continue;
|
||||
}
|
||||
if (isEffectivelyPublished(frontmatter.data) === false) {
|
||||
continue;
|
||||
}
|
||||
const publication = parseFrontmatterDate(frontmatter.data?.date);
|
||||
if (!publication) {
|
||||
continue;
|
||||
|
||||
Reference in New Issue
Block a user