wip page links and display

This commit is contained in:
kbe
2025-08-19 11:34:40 +02:00
parent dd3a0240f9
commit 140552a35f
4 changed files with 142 additions and 46 deletions

View File

@@ -4,32 +4,32 @@ const fetch = (...args) => import('node-fetch').then(({default: fetch}) => fetch
const WORDPRESS_API = 'https://www.mistergeek.net/wp-json/wp/v2';
const OUTPUT_DIR = path.join(__dirname, '..', 'data', 'wordpress');
const HUGO_DATA_DIR = path.join(__dirname, '..', 'data');
async function fetchPosts(page = 1, perPage = 100) {
const response = await fetch(`${WORDPRESS_API}/posts?page=${page}&per_page=${perPage}&_embed`);
const posts = await response.json();
if (response.headers.get('x-wp-totalpages') > page) {
const nextPosts = await fetchPosts(page + 1, perPage);
return [...posts, ...nextPosts];
async function fetchAll(endpoint, perPage = 100) {
let page = 1;
let items = [];
while (true) {
const url = `${WORDPRESS_API}/${endpoint}?page=${page}&per_page=${perPage}&_embed`;
const response = await fetch(url);
// If endpoint does not support paging or returns empty, break
if (!response.ok) {
// 400 often means "page out of range" for WP; stop paging
if (response.status === 400) break;
throw new Error(`Failed to fetch ${endpoint} (page ${page}): ${response.status} ${response.statusText}`);
}
const batch = await response.json();
items = items.concat(batch);
const totalPages = parseInt(response.headers.get('x-wp-totalpages') || '1', 10);
if (page >= totalPages) break;
page++;
}
return posts;
}
async function fetchCategories() {
const response = await fetch(`${WORDPRESS_API}/categories?per_page=100`);
return response.json();
}
async function fetchTags() {
const response = await fetch(`${WORDPRESS_API}/tags?per_page=100`);
return response.json();
}
async function fetchAuthors() {
const response = await fetch(`${WORDPRESS_API}/users?per_page=100`);
return response.json();
return items;
}
async function generateData() {
@@ -39,21 +39,45 @@ async function generateData() {
}
console.log('Fetching WordPress data...');
const [posts, categories, tags, authors] = await Promise.all([
fetchPosts(),
fetchCategories(),
fetchTags(),
fetchAuthors()
// Fetch all relevant endpoints concurrently (pages + posts + taxonomies + users)
const [
posts,
pages,
categories,
tags,
authors
] = await Promise.all([
fetchAll('posts'),
fetchAll('pages'),
fetchAll('categories'),
fetchAll('tags'),
fetchAll('users')
]);
// Filter pages to only include published pages
const publishedPages = pages.filter(page => page.status === 'publish');
// Create navigation data from published pages
const navigationData = publishedPages.map(page => ({
id: page.id,
title: page.title?.rendered || page.slug,
slug: page.slug,
link: page.link,
date: page.date,
modified: page.modified
}));
// Save data as JSON files
fs.writeFileSync(path.join(OUTPUT_DIR, 'posts.json'), JSON.stringify(posts, null, 2));
fs.writeFileSync(path.join(OUTPUT_DIR, 'pages.json'), JSON.stringify(publishedPages, null, 2));
fs.writeFileSync(path.join(OUTPUT_DIR, 'categories.json'), JSON.stringify(categories, null, 2));
fs.writeFileSync(path.join(OUTPUT_DIR, 'tags.json'), JSON.stringify(tags, null, 2));
fs.writeFileSync(path.join(OUTPUT_DIR, 'authors.json'), JSON.stringify(authors, null, 2));
fs.writeFileSync(path.join(OUTPUT_DIR, 'navigation.json'), JSON.stringify(navigationData, null, 2));
console.log(`✅ Fetched ${posts.length} posts, ${categories.length} categories, ${tags.length} tags, ${authors.length} authors`);
console.log(`✅ Fetched ${posts.length} posts, ${publishedPages.length} pages, ${categories.length} categories, ${tags.length} tags, ${authors.length} authors`);
console.log(`✅ Generated navigation data with ${navigationData.length} items`);
}
generateData().catch(console.error);