You've already forked etude_lego_jurassic_world
Ajoute l’analyse des catégories de pièces
This commit is contained in:
126
tests/test_part_categories.py
Normal file
126
tests/test_part_categories.py
Normal file
@@ -0,0 +1,126 @@
|
||||
"""Tests des agrégats par catégorie de pièce."""
|
||||
|
||||
import csv
|
||||
from pathlib import Path
|
||||
|
||||
from lib.rebrickable.part_categories import (
|
||||
build_category_totals,
|
||||
build_global_totals,
|
||||
enrich_categories_with_sets,
|
||||
group_rows_by_set,
|
||||
load_category_names,
|
||||
load_parts_catalog,
|
||||
)
|
||||
from lib.rebrickable.stats import read_rows
|
||||
|
||||
|
||||
def write_csv(path: Path, headers: list[str], rows: list[list[str]]) -> None:
|
||||
"""Écrit un CSV simple pour les besoins de tests."""
|
||||
with path.open("w", newline="") as csv_file:
|
||||
writer = csv.writer(csv_file)
|
||||
writer.writerow(headers)
|
||||
writer.writerows(rows)
|
||||
|
||||
|
||||
def test_build_category_totals_and_enrichment(tmp_path: Path) -> None:
|
||||
"""Calcule les parts par catégorie et marque les catégories structurelles."""
|
||||
parts_path = tmp_path / "parts_filtered.csv"
|
||||
write_csv(
|
||||
parts_path,
|
||||
[
|
||||
"part_num",
|
||||
"color_rgb",
|
||||
"is_translucent",
|
||||
"set_num",
|
||||
"set_id",
|
||||
"year",
|
||||
"quantity_in_set",
|
||||
"is_spare",
|
||||
"is_minifig_part",
|
||||
],
|
||||
[
|
||||
["p1", "aaaaaa", "false", "1000-1", "1000", "2020", "3", "false", "false"],
|
||||
["p2", "bbbbbb", "false", "1000-1", "1000", "2020", "2", "false", "true"],
|
||||
["p3", "cccccc", "false", "2000-1", "2000", "2021", "4", "false", "false"],
|
||||
],
|
||||
)
|
||||
parts_catalog_path = tmp_path / "parts.csv"
|
||||
write_csv(parts_catalog_path, ["part_num", "name", "part_cat_id"], [["p1", "Brick A", "1"], ["p2", "Head", "99"], ["p3", "Slope", "99"]])
|
||||
categories_path = tmp_path / "part_categories.csv"
|
||||
write_csv(categories_path, ["id", "name"], [["1", "Baseplates"], ["99", "Bricks"]])
|
||||
sets_path = tmp_path / "sets_enriched.csv"
|
||||
write_csv(
|
||||
sets_path,
|
||||
["set_num", "set_id", "name", "year", "in_collection"],
|
||||
[["1000-1", "1000", "Set A", "2020", "true"], ["2000-1", "2000", "Set B", "2021", "false"]],
|
||||
)
|
||||
|
||||
parts_rows = read_rows(parts_path)
|
||||
grouped = group_rows_by_set(parts_rows)
|
||||
categories_by_set_raw, categories_by_year = build_category_totals(
|
||||
grouped,
|
||||
load_parts_catalog(parts_catalog_path),
|
||||
load_category_names(categories_path),
|
||||
)
|
||||
enriched = enrich_categories_with_sets(categories_by_set_raw, {row["set_num"]: row for row in read_rows(sets_path)})
|
||||
global_rows = build_global_totals(enriched)
|
||||
|
||||
assert categories_by_set_raw == [
|
||||
{
|
||||
"set_num": "1000-1",
|
||||
"category_id": "1",
|
||||
"category_name": "Baseplates",
|
||||
"quantity_non_spare": "3",
|
||||
"quantity_minifig": "0",
|
||||
"quantity_non_minifig": "3",
|
||||
"share_non_spare": "0.6000",
|
||||
},
|
||||
{
|
||||
"set_num": "1000-1",
|
||||
"category_id": "99",
|
||||
"category_name": "Bricks",
|
||||
"quantity_non_spare": "2",
|
||||
"quantity_minifig": "2",
|
||||
"quantity_non_minifig": "0",
|
||||
"share_non_spare": "0.4000",
|
||||
},
|
||||
{
|
||||
"set_num": "2000-1",
|
||||
"category_id": "99",
|
||||
"category_name": "Bricks",
|
||||
"quantity_non_spare": "4",
|
||||
"quantity_minifig": "0",
|
||||
"quantity_non_minifig": "4",
|
||||
"share_non_spare": "1.0000",
|
||||
},
|
||||
]
|
||||
assert categories_by_year == [
|
||||
{
|
||||
"year": "2020",
|
||||
"category_id": "1",
|
||||
"category_name": "Baseplates",
|
||||
"quantity_non_spare": "3",
|
||||
"share_non_spare": "0.6000",
|
||||
"is_structural": "true",
|
||||
},
|
||||
{
|
||||
"year": "2020",
|
||||
"category_id": "99",
|
||||
"category_name": "Bricks",
|
||||
"quantity_non_spare": "2",
|
||||
"share_non_spare": "0.4000",
|
||||
"is_structural": "false",
|
||||
},
|
||||
{
|
||||
"year": "2021",
|
||||
"category_id": "99",
|
||||
"category_name": "Bricks",
|
||||
"quantity_non_spare": "4",
|
||||
"share_non_spare": "1.0000",
|
||||
"is_structural": "false",
|
||||
},
|
||||
]
|
||||
assert enriched[0]["is_structural"] == "true"
|
||||
assert enriched[1]["is_structural"] == "false"
|
||||
assert global_rows[0]["category_id"] == "99"
|
||||
assert global_rows[1]["category_id"] == "1"
|
||||
45
tests/test_part_categories_plot.py
Normal file
45
tests/test_part_categories_plot.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""Tests des graphiques de répartition par catégorie."""
|
||||
|
||||
import matplotlib
|
||||
from pathlib import Path
|
||||
|
||||
from lib.plots.part_categories import (
|
||||
plot_part_categories_heatmap,
|
||||
plot_structural_share_timeline,
|
||||
plot_top_part_categories_area,
|
||||
)
|
||||
|
||||
|
||||
matplotlib.use("Agg")
|
||||
|
||||
|
||||
def test_plot_part_categories_outputs_images(tmp_path: Path) -> None:
|
||||
"""Génère les trois visuels principaux."""
|
||||
by_year = tmp_path / "part_categories_by_year.csv"
|
||||
by_global = tmp_path / "part_categories_global.csv"
|
||||
by_year.write_text(
|
||||
"year,category_id,category_name,quantity_non_spare,share_non_spare,is_structural\n"
|
||||
"2020,1,Baseplates,5,0.5,true\n"
|
||||
"2020,2,Bricks,5,0.5,false\n"
|
||||
"2021,1,Baseplates,2,0.25,true\n"
|
||||
"2021,2,Bricks,6,0.75,false\n"
|
||||
)
|
||||
by_global.write_text(
|
||||
"category_id,category_name,quantity_non_spare,share_non_spare,is_structural\n"
|
||||
"2,Bricks,11,0.6875,false\n"
|
||||
"1,Baseplates,7,0.4375,true\n"
|
||||
)
|
||||
area_dest = tmp_path / "figures" / "step29" / "top_part_categories_area.png"
|
||||
heatmap_dest = tmp_path / "figures" / "step29" / "part_categories_heatmap.png"
|
||||
structural_dest = tmp_path / "figures" / "step29" / "structural_share_timeline.png"
|
||||
|
||||
plot_top_part_categories_area(by_year, by_global, area_dest, top_n=2)
|
||||
plot_part_categories_heatmap(by_year, heatmap_dest)
|
||||
plot_structural_share_timeline(by_year, structural_dest)
|
||||
|
||||
assert area_dest.exists()
|
||||
assert heatmap_dest.exists()
|
||||
assert structural_dest.exists()
|
||||
assert area_dest.stat().st_size > 0
|
||||
assert heatmap_dest.stat().st_size > 0
|
||||
assert structural_dest.stat().st_size > 0
|
||||
Reference in New Issue
Block a user