Correction semi-automatique des liens morts
This commit is contained in:
@@ -285,7 +285,10 @@ async function checkLink(url) {
|
||||
let info = runResults.get(url);
|
||||
if (!info) {
|
||||
const cachedInfo = cache[url];
|
||||
if (!isCacheValid(cachedInfo)) {
|
||||
if (cachedInfo?.manually_killed === true) {
|
||||
// Do not re-test manually killed links
|
||||
info = cachedInfo;
|
||||
} else if (!isCacheValid(cachedInfo)) {
|
||||
const host = extractHost(url);
|
||||
if (host) {
|
||||
await applyHostDelay(host);
|
||||
@@ -304,12 +307,13 @@ async function checkLink(url) {
|
||||
}
|
||||
|
||||
info = {
|
||||
...(cachedInfo || {}),
|
||||
status: result.status ?? null,
|
||||
errorType: result.errorType || null,
|
||||
method: result.method,
|
||||
checked: new Date().toISOString(),
|
||||
};
|
||||
cache[url] = info;
|
||||
cache[url] = info; // preserves files, manual flags, etc.
|
||||
cacheDirty = true;
|
||||
persistCache();
|
||||
} else if (cachedInfo) {
|
||||
@@ -540,10 +544,43 @@ function writeReport(entries) {
|
||||
if (cachePruned) {
|
||||
cacheDirty = true;
|
||||
}
|
||||
// Update file paths, line numbers and ensure manual flags exist
|
||||
for (const entry of uniqueEntries) {
|
||||
const files = Array.from(
|
||||
new Set(entry.occurrences.map((o) => path.relative(SITE_ROOT, o.file)))
|
||||
).sort((a, b) => a.localeCompare(b));
|
||||
const locations = Array.from(
|
||||
new Set(
|
||||
entry.occurrences.map(
|
||||
(o) => `${path.relative(SITE_ROOT, o.file)}:${o.line}`
|
||||
)
|
||||
)
|
||||
).sort((a, b) => a.localeCompare(b));
|
||||
const existing = cache[entry.url] || {};
|
||||
cache[entry.url] = {
|
||||
...existing,
|
||||
manually_validated: existing.manually_validated === true,
|
||||
manually_killed: existing.manually_killed === true,
|
||||
files,
|
||||
locations,
|
||||
};
|
||||
cacheDirty = true;
|
||||
}
|
||||
if (cacheDirty) {
|
||||
ensureDirectoryExists(CACHE_PATH);
|
||||
fs.writeFileSync(CACHE_PATH, yaml.dump(cache));
|
||||
cacheDirty = false;
|
||||
}
|
||||
|
||||
// Exclude manually killed from re-checking and reporting
|
||||
const entriesToCheck = uniqueEntries.filter(
|
||||
(e) => !(cache[e.url] && cache[e.url].manually_killed === true)
|
||||
);
|
||||
|
||||
ensureDirectoryExists(PROGRESS_FILE);
|
||||
fs.writeFileSync(PROGRESS_FILE, `"url","locations","status"\n`, "utf8");
|
||||
|
||||
const total = uniqueEntries.length;
|
||||
const total = entriesToCheck.length;
|
||||
if (total === 0) {
|
||||
process.stdout.write("No external links found.\n");
|
||||
ensureDirectoryExists(CACHE_PATH);
|
||||
@@ -552,7 +589,7 @@ function writeReport(entries) {
|
||||
return;
|
||||
}
|
||||
|
||||
const hostGroups = groupEntriesByHost(uniqueEntries);
|
||||
const hostGroups = groupEntriesByHost(entriesToCheck);
|
||||
const concurrency = Math.max(1, Math.min(MAX_CONCURRENT_HOSTS, hostGroups.length || 1));
|
||||
let processed = 0;
|
||||
await runWithConcurrency(
|
||||
|
||||
Reference in New Issue
Block a user