Page de statistiques
This commit is contained in:
20
tools/lib/config.js
Normal file
20
tools/lib/config.js
Normal file
@@ -0,0 +1,20 @@
|
||||
const fs = require("fs/promises");
|
||||
const path = require("path");
|
||||
|
||||
let cached = null;
|
||||
|
||||
async function loadToolsConfig(configPath = "tools/config.json") {
|
||||
const resolved = path.resolve(configPath);
|
||||
if (cached && cached.path === resolved) {
|
||||
return cached.data;
|
||||
}
|
||||
|
||||
const raw = await fs.readFile(resolved, "utf8");
|
||||
const data = JSON.parse(raw);
|
||||
cached = { path: resolved, data };
|
||||
return data;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
loadToolsConfig,
|
||||
};
|
||||
99
tools/lib/content.js
Normal file
99
tools/lib/content.js
Normal file
@@ -0,0 +1,99 @@
|
||||
const fs = require("fs/promises");
|
||||
const path = require("path");
|
||||
|
||||
async function collectMarkdownFiles(rootDir, { skipIndex = true } = {}) {
|
||||
const entries = await fs.readdir(rootDir, { withFileTypes: true });
|
||||
const files = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(rootDir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const nested = await collectMarkdownFiles(fullPath, { skipIndex });
|
||||
files.push(...nested);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!entry.isFile()) continue;
|
||||
if (!entry.name.toLowerCase().endsWith(".md")) continue;
|
||||
if (skipIndex && entry.name === "_index.md") continue;
|
||||
|
||||
files.push(fullPath);
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
async function collectSectionIndexDirs(rootDir) {
|
||||
const sections = new Set();
|
||||
|
||||
async function walk(dir) {
|
||||
let entries;
|
||||
try {
|
||||
entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
} catch (error) {
|
||||
console.error(`Skipping section scan for ${dir}: ${error.message}`);
|
||||
return;
|
||||
}
|
||||
|
||||
let hasIndex = false;
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile() && entry.name.toLowerCase() === "_index.md") {
|
||||
hasIndex = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasIndex) {
|
||||
sections.add(path.resolve(dir));
|
||||
}
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
await walk(path.join(dir, entry.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await walk(rootDir);
|
||||
return sections;
|
||||
}
|
||||
|
||||
async function resolveMarkdownTargets(inputs, { rootDir = process.cwd(), skipIndex = true } = {}) {
|
||||
if (!inputs || inputs.length === 0) {
|
||||
return collectMarkdownFiles(rootDir, { skipIndex });
|
||||
}
|
||||
|
||||
const targets = new Set();
|
||||
|
||||
for (const input of inputs) {
|
||||
const resolved = path.resolve(input);
|
||||
|
||||
try {
|
||||
const stat = await fs.stat(resolved);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
const nested = await collectMarkdownFiles(resolved, { skipIndex });
|
||||
nested.forEach((file) => targets.add(file));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (stat.isFile()) {
|
||||
const lower = resolved.toLowerCase();
|
||||
if (!lower.endsWith(".md")) continue;
|
||||
if (skipIndex && path.basename(resolved) === "_index.md") continue;
|
||||
targets.add(resolved);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Skipping ${input}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(targets);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
collectMarkdownFiles,
|
||||
collectSectionIndexDirs,
|
||||
resolveMarkdownTargets,
|
||||
};
|
||||
91
tools/lib/stats/articles.js
Normal file
91
tools/lib/stats/articles.js
Normal file
@@ -0,0 +1,91 @@
|
||||
const path = require("path");
|
||||
const { DateTime } = require("luxon");
|
||||
const { collectMarkdownFiles, collectSectionIndexDirs } = require("../content");
|
||||
const { readFrontmatter } = require("../weather/frontmatter");
|
||||
|
||||
function parseDate(value) {
|
||||
if (!value) return null;
|
||||
|
||||
if (value instanceof Date) {
|
||||
return DateTime.fromJSDate(value);
|
||||
}
|
||||
|
||||
if (typeof value === "string") {
|
||||
let parsed = DateTime.fromISO(value);
|
||||
|
||||
if (!parsed.isValid) {
|
||||
parsed = DateTime.fromRFC2822(value);
|
||||
}
|
||||
|
||||
return parsed.isValid ? parsed : null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function countWords(body) {
|
||||
if (!body) return 0;
|
||||
|
||||
const cleaned = body
|
||||
.replace(/```[\s\S]*?```/g, " ") // fenced code blocks
|
||||
.replace(/`[^`]*`/g, " ") // inline code
|
||||
.replace(/<[^>]+>/g, " "); // html tags
|
||||
|
||||
const words = cleaned.match(/[\p{L}\p{N}'-]+/gu);
|
||||
return words ? words.length : 0;
|
||||
}
|
||||
|
||||
async function loadArticles(contentDir) {
|
||||
const files = await collectMarkdownFiles(contentDir);
|
||||
const sectionDirs = await collectSectionIndexDirs(contentDir);
|
||||
const rootDir = path.resolve(contentDir);
|
||||
const articles = [];
|
||||
|
||||
function resolveSection(filePath) {
|
||||
const absolute = path.resolve(filePath);
|
||||
let current = path.dirname(absolute);
|
||||
|
||||
while (current.startsWith(rootDir)) {
|
||||
if (sectionDirs.has(current)) {
|
||||
return path.relative(rootDir, current).replace(/\\/g, "/") || ".";
|
||||
}
|
||||
const parent = path.dirname(current);
|
||||
if (parent === current) break;
|
||||
current = parent;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
for (const file of files) {
|
||||
const frontmatter = await readFrontmatter(file);
|
||||
if (!frontmatter) continue;
|
||||
|
||||
const date = parseDate(frontmatter.doc.get("date"));
|
||||
const title = frontmatter.doc.get("title") || path.basename(file, ".md");
|
||||
const body = frontmatter.body.trim();
|
||||
const wordCount = countWords(body);
|
||||
const relativePath = path.relative(contentDir, file);
|
||||
const section = resolveSection(file);
|
||||
|
||||
articles.push({
|
||||
path: file,
|
||||
relativePath,
|
||||
title,
|
||||
date,
|
||||
body,
|
||||
wordCount,
|
||||
section,
|
||||
frontmatter: frontmatter.doc.toJS ? frontmatter.doc.toJS() : frontmatter.doc.toJSON(),
|
||||
});
|
||||
}
|
||||
|
||||
return articles;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
collectMarkdownFiles,
|
||||
countWords,
|
||||
loadArticles,
|
||||
parseDate,
|
||||
};
|
||||
131
tools/lib/stats/goaccess.js
Normal file
131
tools/lib/stats/goaccess.js
Normal file
@@ -0,0 +1,131 @@
|
||||
const { request } = require("undici");
|
||||
const { DateTime } = require("luxon");
|
||||
|
||||
async function fetchGoAccessJson(url) {
|
||||
const res = await request(url, { method: "GET" });
|
||||
if (res.statusCode < 200 || res.statusCode >= 300) {
|
||||
throw new Error(`HTTP ${res.statusCode}`);
|
||||
}
|
||||
return res.body.json();
|
||||
}
|
||||
|
||||
function crawlerRatios(data) {
|
||||
const browsers = data.browsers?.data || [];
|
||||
const crawler = browsers.find((entry) => entry.data === "Crawlers");
|
||||
if (!crawler) return { hits: 0, visitors: 0 };
|
||||
|
||||
const totalHits = (browsers.reduce((sum, entry) => sum + (entry.hits?.count || 0), 0)) || 0;
|
||||
const totalVisitors = (browsers.reduce((sum, entry) => sum + (entry.visitors?.count || 0), 0)) || 0;
|
||||
|
||||
const hitRatio = totalHits > 0 ? Math.min(1, (crawler.hits?.count || 0) / totalHits) : 0;
|
||||
const visitorRatio = totalVisitors > 0 ? Math.min(1, (crawler.visitors?.count || 0) / totalVisitors) : 0;
|
||||
|
||||
return { hits: hitRatio, visitors: visitorRatio };
|
||||
}
|
||||
|
||||
function groupVisitsByMonth(data, { adjustCrawlers = true } = {}) {
|
||||
const entries = data.visitors?.data || [];
|
||||
const ratios = adjustCrawlers ? crawlerRatios(data) : { hits: 0, visitors: 0 };
|
||||
const months = new Map();
|
||||
|
||||
for (const entry of entries) {
|
||||
const dateStr = entry.data;
|
||||
if (!/^[0-9]{8}$/.test(dateStr)) continue;
|
||||
const year = dateStr.slice(0, 4);
|
||||
const month = dateStr.slice(4, 6);
|
||||
const day = dateStr.slice(6, 8);
|
||||
const key = `${year}-${month}`;
|
||||
|
||||
const hits = entry.hits?.count || 0;
|
||||
const visitors = entry.visitors?.count || 0;
|
||||
|
||||
const current = months.get(key) || { hits: 0, visitors: 0, from: null, to: null };
|
||||
const isoDate = `${year}-${month}-${day}`;
|
||||
|
||||
current.hits += hits;
|
||||
current.visitors += visitors;
|
||||
if (!current.from || isoDate < current.from) current.from = isoDate;
|
||||
if (!current.to || isoDate > current.to) current.to = isoDate;
|
||||
|
||||
months.set(key, current);
|
||||
}
|
||||
|
||||
const adjust = (value, ratio) => {
|
||||
if (!adjustCrawlers) return value;
|
||||
const scaled = value * (1 - ratio);
|
||||
return Math.max(0, Math.round(scaled));
|
||||
};
|
||||
|
||||
const sorted = Array.from(months.entries())
|
||||
.sort((a, b) => a[0].localeCompare(b[0]))
|
||||
.map(([key, value]) => ({
|
||||
month: key,
|
||||
from: value.from,
|
||||
to: value.to,
|
||||
hits: adjust(value.hits, ratios.hits),
|
||||
visitors: adjust(value.visitors, ratios.visitors),
|
||||
}));
|
||||
|
||||
return sorted;
|
||||
}
|
||||
|
||||
function aggregateLastNDays(data, days = 30, { adjustCrawlers = true } = {}) {
|
||||
const entries = data.visitors?.data || [];
|
||||
if (!entries.length || days <= 0) {
|
||||
return { from: null, to: null, hits: 0, visitors: 0 };
|
||||
}
|
||||
|
||||
const valid = entries.filter((entry) => /^[0-9]{8}$/.test(entry.data));
|
||||
if (valid.length === 0) {
|
||||
return { from: null, to: null, hits: 0, visitors: 0 };
|
||||
}
|
||||
|
||||
const sorted = valid.slice().sort((a, b) => a.data.localeCompare(b.data));
|
||||
const last = sorted[sorted.length - 1];
|
||||
const end = DateTime.fromFormat(last.data, "yyyyLLdd", { zone: "UTC" });
|
||||
if (!end.isValid) {
|
||||
return { from: null, to: null, hits: 0, visitors: 0 };
|
||||
}
|
||||
|
||||
const start = end.minus({ days: days - 1 });
|
||||
|
||||
let from = null;
|
||||
let to = null;
|
||||
let hits = 0;
|
||||
let visitors = 0;
|
||||
|
||||
for (const entry of sorted) {
|
||||
const current = DateTime.fromFormat(entry.data, "yyyyLLdd", { zone: "UTC" });
|
||||
if (!current.isValid) continue;
|
||||
if (current < start || current > end) continue;
|
||||
|
||||
const iso = current.toISODate();
|
||||
if (!from || iso < from) from = iso;
|
||||
if (!to || iso > to) to = iso;
|
||||
|
||||
hits += entry.hits?.count || 0;
|
||||
visitors += entry.visitors?.count || 0;
|
||||
}
|
||||
|
||||
const ratios = adjustCrawlers ? crawlerRatios(data) : { hits: 0, visitors: 0 };
|
||||
|
||||
const adjust = (value, ratio) => {
|
||||
if (!adjustCrawlers) return value;
|
||||
const scaled = value * (1 - ratio);
|
||||
return Math.max(0, Math.round(scaled));
|
||||
};
|
||||
|
||||
return {
|
||||
from,
|
||||
to,
|
||||
hits: adjust(hits, ratios.hits),
|
||||
visitors: adjust(visitors, ratios.visitors),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchGoAccessJson,
|
||||
groupVisitsByMonth,
|
||||
aggregateLastNDays,
|
||||
crawlerRatios,
|
||||
};
|
||||
32
tools/lib/stats/python.js
Normal file
32
tools/lib/stats/python.js
Normal file
@@ -0,0 +1,32 @@
|
||||
const { spawn } = require("child_process");
|
||||
const path = require("path");
|
||||
|
||||
async function renderWithPython({ type, data, outputPath }) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const scriptPath = path.resolve(__dirname, "../../render_stats_charts.py");
|
||||
const child = spawn("python3", [scriptPath, "--type", type, "--output", outputPath], {
|
||||
stdio: ["pipe", "inherit", "inherit"],
|
||||
});
|
||||
|
||||
const payload = JSON.stringify(data);
|
||||
child.stdin.write(payload);
|
||||
child.stdin.end();
|
||||
|
||||
child.on("error", (error) => {
|
||||
reject(error);
|
||||
});
|
||||
|
||||
child.on("exit", (code) => {
|
||||
if (code === 0) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(`Python renderer exited with code ${code}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
renderWithPython,
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user