generated from fastai/fastpages
-
Notifications
You must be signed in to change notification settings - Fork 3
New blog post structure and api functions #33
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Open
ShocKWavEv1
wants to merge
16
commits into
development
Choose a base branch
from
feat/update-blog-structure
base: development
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Changes from all commits
Commits
Show all changes
16 commits
Select commit
Hold shift + click to select a range
c2bb429
feat: Update new folder structure for posts and update query to retri…
ShocKWavEv1 abf436e
feat: Update extractData function
ShocKWavEv1 0ad75bc
feat: Time got with demo chart communicating from the api route
ShocKWavEv1 6616e30
feat: Update endpoint to retireve data
ShocKWavEv1 328e411
feat: Update file path
ShocKWavEv1 1b34b5d
feat: Add vercel json to include csv files
ShocKWavEv1 8725d97
feat: Use dirname for using csv file
ShocKWavEv1 2ef5127
feat: Get rid of dirname path
ShocKWavEv1 cb2c8bb
feat: Update data for blog charts to its own root folder
ShocKWavEv1 7ba306d
feat: Include blogChart to vercel functions
ShocKWavEv1 e087604
feat: Update blogCharts
ShocKWavEv1 48f9f21
feat: Refactor endpoint for blog detail
ShocKWavEv1 277fc46
feat: No need of a new folder for each posts, because now all the csv…
ShocKWavEv1 3d29015
feat: Add all new charts for all blog posts, include parse for boolea…
ShocKWavEv1 3146f62
feat: Get rid of extra symbol in manual arima post
ShocKWavEv1 20d28d1
feat: Update cursor comment
ShocKWavEv1 File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,67 +1,264 @@ | ||
| import fs from "fs"; | ||
| import path from "path"; | ||
| import Papa from "papaparse"; | ||
|
|
||
| const PAPAPARSE_CONFIG = { | ||
| header: true, | ||
| dynamicTyping: true, | ||
| skipEmptyLines: true, | ||
| transformHeader: (header) => header.trim(), | ||
| transform: (value, header) => { | ||
| // Trim whitespace | ||
| if (typeof value === "string") { | ||
| value = value.trim(); | ||
| } | ||
|
|
||
| // List your actual boolean columns here | ||
| const booleanColumns = ["anomaly", "is_anomaly", "threshold"]; | ||
|
|
||
| // Only convert to boolean for specific columns | ||
| if (booleanColumns.includes(header)) { | ||
| const lowerValue = | ||
| typeof value === "string" ? value.toLowerCase() : String(value); | ||
| if (lowerValue === "true" || lowerValue === "1") return true; | ||
| if (lowerValue === "false" || lowerValue === "0") return false; | ||
| } | ||
|
|
||
| return value; | ||
| }, | ||
| }; | ||
|
|
||
| const CORS_HEADERS = { | ||
| "Access-Control-Allow-Origin": "*", | ||
| "Access-Control-Allow-Methods": "GET,OPTIONS", | ||
| "Access-Control-Allow-Headers": "Content-Type", | ||
| }; | ||
|
|
||
| export default function handler(req, res) { | ||
| res.setHeader("Access-Control-Allow-Origin", "*"); | ||
| res.setHeader("Access-Control-Allow-Methods", "GET,OPTIONS"); | ||
| res.setHeader("Access-Control-Allow-Headers", "Content-Type"); | ||
| Object.entries(CORS_HEADERS).forEach(([key, value]) => { | ||
| res.setHeader(key, value); | ||
| }); | ||
|
|
||
| if (req.method === "OPTIONS") { | ||
| res.status(200).end(); | ||
| return; | ||
| return res.status(200).end(); | ||
| } | ||
|
|
||
| try { | ||
| const { fileName } = req.query; | ||
|
|
||
| if (!fileName) { | ||
| return res.status(400).json({ error: "Missing fileName parameter" }); | ||
| } | ||
|
|
||
| const postSlug = sanitizeFileName(fileName); | ||
| const mdPath = findMarkdownFile(postSlug); | ||
|
|
||
| if (!mdPath) { | ||
| return res.status(404).json({ error: "Markdown file not found" }); | ||
| } | ||
|
|
||
| const markdownContent = fs.readFileSync(mdPath, "utf-8"); | ||
| const { frontmatter, content } = parseFrontmatter(markdownContent); | ||
| const { contentWithPlaceholders, charts } = extractCharts( | ||
| content, | ||
| postSlug | ||
| ); | ||
|
|
||
| const response = { | ||
| title: frontmatter.title || null, | ||
| author_name: frontmatter.author_name || null, | ||
| author_image: frontmatter.author_image || null, | ||
| author_position: frontmatter.author_position || null, | ||
| publication_date: frontmatter.publication_date || null, | ||
| description: frontmatter.description || null, | ||
| image: frontmatter.image || null, | ||
| categories: frontmatter.categories || null, | ||
| tags: frontmatter.tags || null, | ||
| fileName: postSlug, | ||
| readTimeMinutes: calculateReadTime(content), | ||
| content: contentWithPlaceholders, | ||
| charts, | ||
| }; | ||
|
|
||
| return res.json(response); | ||
| } catch (error) { | ||
| console.error("API Error:", error); | ||
| return res.status(500).json({ | ||
| error: "Internal server error", | ||
| message: error.message, | ||
| }); | ||
| } | ||
| } | ||
|
|
||
| function findMarkdownFile(postSlug) { | ||
| const possiblePaths = [ | ||
| path.join(process.cwd(), "posts", `${postSlug}.md`), | ||
| path.join(process.cwd(), "..", "posts", `${postSlug}.md`), | ||
| path.join("/var/task/posts", `${postSlug}.md`), | ||
| ]; | ||
|
|
||
| for (const filePath of possiblePaths) { | ||
| if (fs.existsSync(filePath)) { | ||
| return filePath; | ||
| } | ||
| } | ||
|
|
||
| return null; | ||
| } | ||
|
|
||
| function sanitizeFileName(fileName) { | ||
| if (!fileName || typeof fileName !== "string") { | ||
| throw new Error("Invalid fileName parameter"); | ||
| } | ||
|
|
||
| const sanitized = fileName | ||
| .replace(/\.md$/, "") | ||
| .replace(/[\\/]/g, "") | ||
| .replace(/\.\./g, "") | ||
| .replace(/^\.+/, "") | ||
| .replace(/[^a-zA-Z0-9_-]/g, ""); | ||
|
|
||
| if (!sanitized) { | ||
| throw new Error("Invalid fileName after sanitization"); | ||
| } | ||
|
|
||
| return sanitized; | ||
| } | ||
|
|
||
| function sanitizeDataSource(dataSource) { | ||
| if (!dataSource || typeof dataSource !== "string") { | ||
| throw new Error("Invalid dataSource parameter"); | ||
| } | ||
| const { fileName } = req.query; | ||
| if (!fileName) { | ||
| return res.status(400).json({ error: "Missing fileName parameter" }); | ||
|
|
||
| const sanitized = dataSource | ||
| .replace(/\\/g, "/") | ||
| .replace(/\.\.\/*/g, "") | ||
| .replace(/^\/+/, ""); | ||
|
|
||
| if (!/^[a-zA-Z0-9_\-\/\.]+$/.test(sanitized)) { | ||
| throw new Error("Invalid characters in dataSource"); | ||
| } | ||
| const mdPath = path.join(process.cwd(), "posts", `${fileName}.md`); | ||
| if (!fs.existsSync(mdPath)) { | ||
| return res.status(404).json({ error: "Markdown file not found" }); | ||
|
|
||
| if (!sanitized.endsWith(".csv")) { | ||
| throw new Error("dataSource must be a CSV file"); | ||
| } | ||
| const raw = fs.readFileSync(mdPath, "utf-8"); | ||
| // Parse frontmatter | ||
|
|
||
| return sanitized; | ||
| } | ||
|
|
||
| function parseFrontmatter(raw) { | ||
| const match = raw.match(/^---([\s\S]*?)---\s*([\s\S]*)$/); | ||
|
|
||
| if (!match) { | ||
| return res | ||
| .status(500) | ||
| .json({ error: "Invalid markdown frontmatter format" }); | ||
| throw new Error("Invalid markdown frontmatter format"); | ||
| } | ||
| const frontmatterRaw = match[1]; | ||
| const content = match[2].trim(); | ||
| // Parse YAML frontmatter manually (simple key: value pairs) | ||
|
|
||
| const [, frontmatterRaw, content] = match; | ||
| const frontmatter = {}; | ||
|
|
||
| frontmatterRaw.split("\n").forEach((line) => { | ||
| const m = line.match(/^([a-zA-Z0-9_\-]+):\s*(.*)$/); | ||
| if (m) { | ||
| let key = m[1].trim(); | ||
| let value = m[2].trim(); | ||
| // Remove quotes if present | ||
| if ( | ||
| (value.startsWith('"') && value.endsWith('"')) || | ||
| (value.startsWith("'") && value.endsWith("'")) | ||
| ) { | ||
| value = value.slice(1, -1); | ||
| } | ||
| // Parse arrays (e.g. tags: ["a", "b"]) | ||
| if (value.startsWith("[") && value.endsWith("]")) { | ||
| try { | ||
| value = JSON.parse(value.replace(/'/g, '"')); | ||
| } catch {} | ||
| const lineMatch = line.match(/^([a-zA-Z0-9_\-]+):\s*(.*)$/); | ||
| if (!lineMatch) return; | ||
|
|
||
| const key = lineMatch[1].trim(); | ||
| let value = lineMatch[2].trim(); | ||
|
|
||
| value = removeQuotes(value); | ||
| value = parseArrayValue(value); | ||
|
|
||
| frontmatter[key] = value; | ||
| }); | ||
|
|
||
| return { frontmatter, content: content.trim() }; | ||
| } | ||
|
|
||
| function removeQuotes(value) { | ||
| if ( | ||
| (value.startsWith('"') && value.endsWith('"')) || | ||
| (value.startsWith("'") && value.endsWith("'")) | ||
| ) { | ||
| return value.slice(1, -1); | ||
| } | ||
| return value; | ||
| } | ||
|
|
||
| function parseArrayValue(value) { | ||
| if (value.startsWith("[") && value.endsWith("]")) { | ||
| try { | ||
| return JSON.parse(value.replace(/'/g, '"')); | ||
| } catch { | ||
| return value; | ||
| } | ||
| } | ||
| return value; | ||
| } | ||
|
|
||
| function extractCharts(content, postSlug) { | ||
| if (!content.includes("```chart")) { | ||
| return { contentWithPlaceholders: content, charts: {} }; | ||
| } | ||
|
|
||
| const charts = {}; | ||
| let chartIndex = 0; | ||
|
|
||
| const processChart = (match, chartJson, type) => { | ||
| try { | ||
| const chartData = JSON.parse(chartJson.trim()); | ||
| const chartId = chartData.id || `${type}-${chartIndex++}`; | ||
|
|
||
| if (chartData.dataSource) { | ||
| chartData.data = loadChartData(postSlug, chartData.dataSource); | ||
| } | ||
| frontmatter[key] = value; | ||
|
|
||
| chartData.type = type; | ||
| charts[chartId] = chartData; | ||
|
|
||
| return `{{CHART:${chartId}}}`; | ||
| } catch (error) { | ||
| console.error(`Failed to process ${type}:`, error.message); | ||
| return match; | ||
| } | ||
| }); | ||
| res.json({ | ||
| title: frontmatter.title || null, | ||
| author_name: frontmatter.author_name || null, | ||
| author_image: frontmatter.author_image || null, | ||
| author_position: frontmatter.author_position || null, | ||
| publication_date: frontmatter.publication_date || null, | ||
| description: frontmatter.description || null, | ||
| image: frontmatter.image || null, | ||
| categories: frontmatter.categories || null, | ||
| tags: frontmatter.tags || null, | ||
| fileName: fileName.replace(/\.md$/, ""), | ||
| readTimeMinutes: Math.round(content.split(" ").length / 200), | ||
| content, | ||
| }); | ||
| }; | ||
|
|
||
| let contentWithPlaceholders = content | ||
| .replace(/```chart-multiple\s*\n([\s\S]*?)\n```/g, (match, json) => | ||
| processChart(match, json, "chart-multiple") | ||
| ) | ||
| .replace(/```chart\s*\n([\s\S]*?)\n```/g, (match, json) => | ||
| processChart(match, json, "chart") | ||
| ); | ||
|
|
||
| return { contentWithPlaceholders, charts }; | ||
| } | ||
|
|
||
| function loadChartData(postSlug, dataSource) { | ||
| const sanitizedDataSource = sanitizeDataSource(dataSource); | ||
| const csvPath = path.join( | ||
| process.cwd(), | ||
| "blogCharts", | ||
| postSlug, | ||
| sanitizedDataSource | ||
| ); | ||
|
|
||
| if (!fs.existsSync(csvPath)) { | ||
| throw new Error(`CSV file not found: ${sanitizedDataSource}`); | ||
| } | ||
|
|
||
| const csvContent = fs.readFileSync(csvPath, "utf-8"); | ||
| const result = Papa.parse(csvContent, PAPAPARSE_CONFIG); | ||
|
|
||
| if (result.errors.length > 0) { | ||
| console.warn( | ||
| `CSV parsing warnings for ${sanitizedDataSource}:`, | ||
| result.errors | ||
| ); | ||
| } | ||
|
|
||
| return result.data; | ||
| } | ||
|
|
||
| function calculateReadTime(content) { | ||
| const wordCount = content.split(" ").length; | ||
| const wordsPerMinute = 200; | ||
| return Math.round(wordCount / wordsPerMinute); | ||
| } | ||
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The
chartIndexcounter doesn't increment consistently across all charts, only when a chart lacks an explicit ID. This can cause ID collisions when mixing explicit IDs with auto-generated ones.View Details
Analysis
Chart ID collision in extractCharts() causes data loss when mixing explicit and auto-generated IDs
What fails: The
extractCharts()function inapi/[fileName].jsat line 207 loses charts when mixing explicit chart IDs with auto-generated ones. When a chart has an explicit ID (e.g.,"chart-1") and a subsequent chart requires an auto-generated ID that happens to be the same (e.g.,"chart-1"), the second chart overwrites the first in thechartsobject, causing permanent data loss.How to reproduce:
Result: Only 2 charts are stored in the
chartsobject:chart-0: "Chart Two"chart-1: "Chart Three" (overwrites the explicit "Chart One")Expected: 3 charts stored with all data preserved:
chart-1: "Chart One"chart-0: "Chart Two"chart-2: "Chart Three"Root cause: The original code only increments
chartIndexwhen generating auto-IDs due to short-circuit evaluation of the||operator:When
chartData.idis truthy (explicit ID provided),chartIndex++never executes. This causes auto-generated IDs to potentially collide with explicit IDs that follow thetype-numbernaming pattern.Fix implemented: Added collision detection by checking if a generated ID is already in use before assigning it:
This ensures auto-generated IDs skip any numbers that are already occupied by explicit IDs, preventing collisions entirely.