Uniformisation de la vérification des liens
This commit is contained in:
133
tools/lib/http.js
Normal file
133
tools/lib/http.js
Normal file
@@ -0,0 +1,133 @@
|
||||
const { fetch } = require("undici");
|
||||
const UserAgent = require("user-agents");
|
||||
|
||||
const DEFAULT_ACCEPT =
|
||||
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8";
|
||||
const DEFAULT_ACCEPT_LANGUAGE = "fr-FR,fr;q=0.9,en;q=0.7";
|
||||
const DEFAULT_TIMEOUT_MS = 5000;
|
||||
const DEFAULT_MAX_REDIRECTS = 5;
|
||||
|
||||
function buildUserAgent(preferred) {
|
||||
if (typeof preferred === "string" && preferred.trim()) {
|
||||
return preferred.trim();
|
||||
}
|
||||
const ua = new UserAgent();
|
||||
return ua.toString();
|
||||
}
|
||||
|
||||
async function fetchWithRedirects(targetUrl, options, maxRedirects) {
|
||||
let currentUrl = targetUrl;
|
||||
let response = null;
|
||||
let redirects = 0;
|
||||
|
||||
while (redirects <= maxRedirects) {
|
||||
response = await fetch(currentUrl, { ...options, redirect: "manual" });
|
||||
const location = response.headers.get("location");
|
||||
if (
|
||||
response.status >= 300 &&
|
||||
response.status < 400 &&
|
||||
location &&
|
||||
redirects < maxRedirects
|
||||
) {
|
||||
if (response.body && typeof response.body.cancel === "function") {
|
||||
try {
|
||||
await response.body.cancel();
|
||||
} catch (_) {
|
||||
// Ignore cancellation errors; we're moving to the next hop.
|
||||
}
|
||||
}
|
||||
currentUrl = new URL(location, currentUrl).toString();
|
||||
redirects += 1;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async function probeUrl(url, options = {}) {
|
||||
const method = typeof options.method === "string" ? options.method.toUpperCase() : "HEAD";
|
||||
const timeoutMs = Number.isFinite(options.timeoutMs) ? options.timeoutMs : DEFAULT_TIMEOUT_MS;
|
||||
const maxRedirects = Number.isFinite(options.maxRedirects)
|
||||
? options.maxRedirects
|
||||
: DEFAULT_MAX_REDIRECTS;
|
||||
const userAgent = buildUserAgent(options.userAgent);
|
||||
const headers = {
|
||||
"user-agent": userAgent,
|
||||
"accept-language": DEFAULT_ACCEPT_LANGUAGE,
|
||||
"accept": DEFAULT_ACCEPT,
|
||||
...(options.headers || {}),
|
||||
};
|
||||
|
||||
const controller = new AbortController();
|
||||
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
||||
|
||||
try {
|
||||
const response = await fetchWithRedirects(
|
||||
url,
|
||||
{
|
||||
method,
|
||||
headers,
|
||||
signal: controller.signal,
|
||||
},
|
||||
maxRedirects
|
||||
);
|
||||
const status = response ? response.status : null;
|
||||
const finalUrl = response?.url || url;
|
||||
if (response?.body && typeof response.body.cancel === "function") {
|
||||
try {
|
||||
await response.body.cancel();
|
||||
} catch (_) {
|
||||
// Ignore cancellation errors; the status is all we needed.
|
||||
}
|
||||
}
|
||||
return {
|
||||
status,
|
||||
finalUrl,
|
||||
method,
|
||||
errorType: null,
|
||||
};
|
||||
} catch (error) {
|
||||
if (error.name === "AbortError") {
|
||||
return {
|
||||
status: null,
|
||||
finalUrl: url,
|
||||
method,
|
||||
errorType: "timeout",
|
||||
};
|
||||
}
|
||||
return {
|
||||
status: null,
|
||||
finalUrl: url,
|
||||
method,
|
||||
errorType: "network",
|
||||
message: error.message,
|
||||
};
|
||||
} finally {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
}
|
||||
|
||||
function shouldRetry(result) {
|
||||
if (!result) return true;
|
||||
if (result.errorType) return true;
|
||||
if (typeof result.status !== "number") return true;
|
||||
return result.status >= 400;
|
||||
}
|
||||
|
||||
async function checkUrl(url, options = {}) {
|
||||
const firstMethod = options.firstMethod || "HEAD";
|
||||
let result = await probeUrl(url, { ...options, method: firstMethod });
|
||||
if (options.retryWithGet !== false && shouldRetry(result)) {
|
||||
result = await probeUrl(url, { ...options, method: "GET" });
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
buildUserAgent,
|
||||
checkUrl,
|
||||
probeUrl,
|
||||
shouldRetry,
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
const puppeteer = require("puppeteer-extra");
|
||||
const StealthPlugin = require("puppeteer-extra-plugin-stealth");
|
||||
const UserAgent = require("user-agents");
|
||||
const { buildUserAgent } = require("./http");
|
||||
|
||||
puppeteer.use(StealthPlugin());
|
||||
|
||||
@@ -8,9 +8,11 @@ puppeteer.use(StealthPlugin());
|
||||
* Scrape a webpage to extract metadata and take a screenshot.
|
||||
* @param {string} url - The URL of the page to scrape.
|
||||
* @param {string} screenshotPath - Path where the screenshot should be saved.
|
||||
* @param {object} options
|
||||
* @param {string} [options.userAgent] - Optional user agent to use for the session.
|
||||
* @returns {Promise<object>} - Metadata including title, description, keywords, language, and HTTP status.
|
||||
*/
|
||||
async function scrapePage(url, screenshotPath) {
|
||||
async function scrapePage(url, screenshotPath, options = {}) {
|
||||
console.log(`🔍 Scraping: ${url}`);
|
||||
|
||||
const browser = await puppeteer.launch({
|
||||
@@ -35,9 +37,8 @@ async function scrapePage(url, screenshotPath) {
|
||||
|
||||
const page = await browser.newPage();
|
||||
|
||||
// Generate a fresh, realistic user-agent
|
||||
const userAgent = new UserAgent();
|
||||
await page.setUserAgent(userAgent.toString());
|
||||
const userAgent = buildUserAgent(options.userAgent);
|
||||
await page.setUserAgent(userAgent);
|
||||
|
||||
// Add headers to simulate a real browser
|
||||
await page.setExtraHTTPHeaders({
|
||||
|
||||
Reference in New Issue
Block a user