added basic series posts
This commit is contained in:
parent
8d6347d6ca
commit
a6ac1b4b34
5 changed files with 189 additions and 45 deletions
|
@ -18,7 +18,7 @@ const contentDirectoryPath = `${outPath}/content`
|
|||
const mapFilePath = `${outPath}/map.json`
|
||||
|
||||
interface Map {
|
||||
// key: YYYYMMDD
|
||||
// key: YYYY-MM-DD
|
||||
// value: url
|
||||
date: {
|
||||
[key: string]: string[]
|
||||
|
@ -48,6 +48,17 @@ interface Map {
|
|||
}
|
||||
}
|
||||
|
||||
// series posts have "previous post" and "next post" button so they need to be ordered
|
||||
series: {
|
||||
[key: string]: {
|
||||
title: string
|
||||
toc: string // in series home page and ≡ (3 horizontal line) button
|
||||
length: number
|
||||
order: string[]
|
||||
tags: string[]
|
||||
}
|
||||
}
|
||||
|
||||
// urls of unsearchable posts
|
||||
// it is here to quickly check if a post exists or not
|
||||
unsearchable: {
|
||||
|
@ -61,26 +72,35 @@ interface Map {
|
|||
const map: Map = {
|
||||
date: {},
|
||||
tags: {},
|
||||
posts: {},
|
||||
meta: {
|
||||
tags: [],
|
||||
},
|
||||
posts: {},
|
||||
series: {},
|
||||
unsearchable: {},
|
||||
}
|
||||
|
||||
// converts file path to url
|
||||
function path2URL(pathTpConvert: string): string {
|
||||
return `/${path.relative(markdownPath, pathTpConvert)}`
|
||||
function path2URL(pathToConvert: string): string {
|
||||
return `/${path.relative(markdownPath, pathToConvert)}`
|
||||
.replace(/\.[^/.]+$/, "") // remove the file extension
|
||||
.replace(/ /g, "-") // replace all space with a dash
|
||||
}
|
||||
|
||||
// gets the text after the last slash
|
||||
function path2FileOrFolderName(inputPath: string): string {
|
||||
// remove trailing slash
|
||||
if (inputPath[-1] == "/")
|
||||
inputPath = inputPath.slice(0, inputPath.length - 1)
|
||||
|
||||
// get the last section
|
||||
return inputPath.slice(inputPath.lastIndexOf("/") + 1)
|
||||
}
|
||||
|
||||
// A recursive function that calls itself for every files and directories that it finds
|
||||
function recursiveParsePosts(fileOrFolderPath: string) {
|
||||
// get string after the last slash character
|
||||
const fileOrFolderName = fileOrFolderPath.substring(
|
||||
fileOrFolderPath.lastIndexOf("/") + 1
|
||||
)
|
||||
const fileOrFolderName = path2FileOrFolderName(fileOrFolderPath)
|
||||
|
||||
// ignore if file or directory name starts with a underscore
|
||||
if (fileOrFolderName.startsWith("_")) return
|
||||
|
@ -120,7 +140,7 @@ function recursiveParsePosts(fileOrFolderPath: string) {
|
|||
|
||||
// create directory to put json content files
|
||||
fs.mkdirSync(
|
||||
contentFilePath.substring(0, contentFilePath.lastIndexOf("/")),
|
||||
contentFilePath.slice(0, contentFilePath.lastIndexOf("/")),
|
||||
{ recursive: true }
|
||||
)
|
||||
|
||||
|
@ -159,11 +179,11 @@ function recursiveParsePosts(fileOrFolderPath: string) {
|
|||
year: "numeric",
|
||||
})
|
||||
|
||||
const YYYYMMDD = postDate.toISOString().split("T")[0]
|
||||
if (map.date[YYYYMMDD]) {
|
||||
map.date[YYYYMMDD].push(urlPath)
|
||||
const YYYY_MM_DD = postDate.toISOString().split("T")[0]
|
||||
if (map.date[YYYY_MM_DD]) {
|
||||
map.date[YYYY_MM_DD].push(urlPath)
|
||||
} else {
|
||||
map.date[YYYYMMDD] = [urlPath]
|
||||
map.date[YYYY_MM_DD] = [urlPath]
|
||||
}
|
||||
|
||||
//tags
|
||||
|
@ -184,9 +204,7 @@ function recursiveParsePosts(fileOrFolderPath: string) {
|
|||
|
||||
function recursiveParseUnsearchable(fileOrFolderPath: string) {
|
||||
// get string after the last slash character
|
||||
const fileOrFolderName = fileOrFolderPath.substring(
|
||||
fileOrFolderPath.lastIndexOf("/") + 1
|
||||
)
|
||||
const fileOrFolderName = path2FileOrFolderName(fileOrFolderPath)
|
||||
|
||||
// ignore if file or directory name starts with a underscore
|
||||
if (fileOrFolderName.startsWith("_")) return
|
||||
|
@ -230,7 +248,7 @@ function recursiveParseUnsearchable(fileOrFolderPath: string) {
|
|||
|
||||
// create directory to put json content files
|
||||
fs.mkdirSync(
|
||||
contentFilePath.substring(0, contentFilePath.lastIndexOf("/")),
|
||||
contentFilePath.slice(0, contentFilePath.lastIndexOf("/")),
|
||||
{ recursive: true }
|
||||
)
|
||||
|
||||
|
@ -244,9 +262,9 @@ function recursiveParseUnsearchable(fileOrFolderPath: string) {
|
|||
|
||||
// Parse data that will be written to map.js
|
||||
map.unsearchable[
|
||||
urlPath.substring(
|
||||
urlPath.slice(
|
||||
urlPath
|
||||
.substring(1) // ignore the first slash
|
||||
.slice(1) // ignore the first slash
|
||||
.indexOf("/") + 1
|
||||
)
|
||||
] = {
|
||||
|
@ -255,9 +273,121 @@ function recursiveParseUnsearchable(fileOrFolderPath: string) {
|
|||
}
|
||||
}
|
||||
|
||||
// function recursiveParseSeries(filOrFolderPath: string) {
|
||||
// console.log(filOrFolderPath)
|
||||
// }
|
||||
function recursiveParseSeries(fileOrFolderPath: string) {
|
||||
// get string after the last slash character
|
||||
const fileOrFolderName = path2FileOrFolderName(fileOrFolderPath)
|
||||
|
||||
// ignore if file or directory name starts with a underscore
|
||||
if (fileOrFolderName.startsWith("_")) return
|
||||
|
||||
// get data about the given path
|
||||
const stats = fs.lstatSync(fileOrFolderPath)
|
||||
|
||||
// if it's a directory, call this function to every files/directories in it
|
||||
// if it's a file, parse it and then save it to file
|
||||
if (stats.isDirectory()) {
|
||||
fs.readdirSync(fileOrFolderPath).map((childPath) => {
|
||||
recursiveParseSeries(`${fileOrFolderPath}/${childPath}`)
|
||||
})
|
||||
} else if (stats.isFile()) {
|
||||
// skip if it is not a markdown file
|
||||
if (!fileOrFolderName.endsWith(".md")) {
|
||||
console.log(`Ignoring non markdown file at: ${fileOrFolderPath}`)
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
!fileOrFolderName.includes("_") &&
|
||||
!fileOrFolderName.startsWith("0")
|
||||
)
|
||||
throw Error(`Invalid series post file name at: ${fileOrFolderPath}`)
|
||||
|
||||
// parse markdown metadata
|
||||
const parsedMarkdown = matter(fs.readFileSync(fileOrFolderPath, "utf8"))
|
||||
|
||||
if (!parsedMarkdown.data.title) {
|
||||
throw Error(`Title is not defined in file: ${fileOrFolderPath}`)
|
||||
}
|
||||
|
||||
if (!parsedMarkdown.data.date) {
|
||||
throw Error(`Date is not defined in file: ${fileOrFolderPath}`)
|
||||
}
|
||||
|
||||
// path that will be used as site url
|
||||
let urlPath = path2URL(fileOrFolderPath)
|
||||
urlPath = urlPath.slice(0, urlPath.lastIndexOf("_"))
|
||||
urlPath = urlPath.replace(/\/$/, "") // remove trailing slash
|
||||
|
||||
// urlPath starts with a slash
|
||||
const contentFilePath = `${contentDirectoryPath}${urlPath}.json`
|
||||
|
||||
// create directory to put json content files
|
||||
fs.mkdirSync(
|
||||
contentFilePath.slice(0, contentFilePath.lastIndexOf("/")),
|
||||
{ recursive: true }
|
||||
)
|
||||
|
||||
// write content to json file
|
||||
fs.writeFileSync(
|
||||
contentFilePath,
|
||||
JSON.stringify({
|
||||
content: parsedMarkdown.content.trim(),
|
||||
})
|
||||
)
|
||||
|
||||
// Parse data that will be written to map.js
|
||||
const postData = {
|
||||
title: parsedMarkdown.data.title,
|
||||
preview: "",
|
||||
date: "",
|
||||
tags: [],
|
||||
toc: toc(parsedMarkdown.content).content,
|
||||
}
|
||||
|
||||
// content preview
|
||||
// parsedMarkdown.excerpt is intentionally not used
|
||||
// todo: fix potential improper closing of html tag
|
||||
const slicedContent = parsedMarkdown.content.split(" ")
|
||||
if (slicedContent.length > 19) {
|
||||
postData.preview = slicedContent.slice(0, 19).join(" ") + " ..."
|
||||
} else {
|
||||
postData.preview = parsedMarkdown.content
|
||||
}
|
||||
|
||||
// date
|
||||
const postDate = new Date(parsedMarkdown.data.date)
|
||||
postData.date = postDate.toLocaleString("default", {
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
year: "numeric",
|
||||
})
|
||||
|
||||
const YYYY_MM_DD = postDate.toISOString().split("T")[0]
|
||||
if (map.date[YYYY_MM_DD]) {
|
||||
map.date[YYYY_MM_DD].push(urlPath)
|
||||
} else {
|
||||
map.date[YYYY_MM_DD] = [urlPath]
|
||||
}
|
||||
|
||||
//tags
|
||||
postData.tags = parsedMarkdown.data.tags
|
||||
if (postData.tags) {
|
||||
postData.tags.forEach((tag) => {
|
||||
if (map.tags[tag]) {
|
||||
map.tags[tag].push(urlPath)
|
||||
} else {
|
||||
map.tags[tag] = [urlPath]
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (fileOrFolderName.startsWith("0")) {
|
||||
console.log("new series")
|
||||
} else {
|
||||
map.series[urlPath] = { ...postData, order: [], length: 0 }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete existing files
|
||||
|
||||
|
@ -281,12 +411,12 @@ if (!fs.lstatSync(markdownPath + "/posts").isDirectory())
|
|||
if (!fs.lstatSync(markdownPath + "/unsearchable").isDirectory())
|
||||
throw Error(`Cannot find directory: ${markdownPath + "/posts"}`)
|
||||
|
||||
// if (!fs.lstatSync(markdownPath + "/series").isDirectory())
|
||||
// throw Error(`Cannot find directory: ${markdownPath + "/posts"}`)
|
||||
if (!fs.lstatSync(markdownPath + "/series").isDirectory())
|
||||
throw Error(`Cannot find directory: ${markdownPath + "/posts"}`)
|
||||
|
||||
recursiveParsePosts(markdownPath + "/posts")
|
||||
recursiveParseUnsearchable(markdownPath + "/unsearchable")
|
||||
// recursiveParseSeries(markdownPath + "/series")
|
||||
recursiveParseSeries(markdownPath + "/series")
|
||||
|
||||
// sort dates
|
||||
let dateKeys: string[] = []
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
---
|
||||
title: quotes
|
||||
date: 2021-08-01
|
||||
---
|
||||
|
|
|
@ -69,13 +69,17 @@ export default class PostCard extends React.Component<PostCardProps> {
|
|||
</StyledTitle>
|
||||
<small>
|
||||
<table>
|
||||
{this.props.postData.tags.map((tag) => {
|
||||
return (
|
||||
<td key={this.props.postData.title + tag}>
|
||||
<Tag text={tag} />
|
||||
</td>
|
||||
)
|
||||
})}
|
||||
{this.props.postData.tags ? (
|
||||
this.props.postData.tags.map((tag) => {
|
||||
return (
|
||||
<td key={this.props.postData.title + tag}>
|
||||
<Tag text={tag} />
|
||||
</td>
|
||||
)
|
||||
})
|
||||
) : (
|
||||
<></>
|
||||
)}
|
||||
</table>
|
||||
Published on{" "}
|
||||
{this.props.postData?.date
|
||||
|
|
|
@ -37,7 +37,9 @@ export default class Page extends React.Component<PageProps, PageState> {
|
|||
let _isUnsearchable = false
|
||||
|
||||
// fetch page
|
||||
let fetchedPage = posts.posts[url]
|
||||
let fetchedPage = url.startsWith("/posts")
|
||||
? posts.posts[url]
|
||||
: posts.series[url]
|
||||
if (!fetchedPage) {
|
||||
fetchedPage = posts.unsearchable[url]
|
||||
_isUnsearchable = true
|
||||
|
@ -50,15 +52,9 @@ export default class Page extends React.Component<PageProps, PageState> {
|
|||
}
|
||||
}
|
||||
|
||||
let fetched_content
|
||||
if (_isUnsearchable) {
|
||||
fetched_content = (
|
||||
await import(`../data/content/unsearchable${url}.json`)
|
||||
).content
|
||||
} else {
|
||||
fetched_content = (await import(`../data/content${url}.json`))
|
||||
.content
|
||||
}
|
||||
const fetched_content = _isUnsearchable
|
||||
? (await import(`../data/content/unsearchable${url}.json`)).content
|
||||
: (await import(`../data/content${url}.json`)).content
|
||||
|
||||
fetchedPage.content = fetched_content ? fetched_content : "No content"
|
||||
fetchedPage.toc = fetchedPage?.toc ? fetchedPage.toc : undefined
|
||||
|
|
|
@ -71,10 +71,23 @@ export default class PostList extends React.Component<
|
|||
if (postCount >= this.state.howMany) break
|
||||
|
||||
postCount++
|
||||
const url = posts.date[date][length - i - 1]
|
||||
PostCards.push(
|
||||
<PostCard postData={{ url: url, ...posts.posts[url] }} />
|
||||
)
|
||||
const url: string = posts.date[date][length - i - 1]
|
||||
if (url.startsWith("/posts")) {
|
||||
PostCards.push(
|
||||
<PostCard
|
||||
postData={{ url: url, ...posts.posts[url] }}
|
||||
/>
|
||||
)
|
||||
} else {
|
||||
PostCards.push(
|
||||
<PostCard
|
||||
postData={{
|
||||
url: url,
|
||||
...posts.series[url],
|
||||
}}
|
||||
/>
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue