chore: rename "blog-content" to "content"
This commit is contained in:
parent
c573fcd9b9
commit
e7e50eed39
64 changed files with 27 additions and 29 deletions
8
packages/content/src/config.ts
Normal file
8
packages/content/src/config.ts
Normal file
|
@ -0,0 +1,8 @@
|
|||
export const markdownPath = "./markdown" // where it will look for markdown documents
|
||||
export const outPath = "./dist" // path to the json database
|
||||
|
||||
export const contentDirectoryPath = `${outPath}/content`
|
||||
export const iconsDirectoryPath = `${outPath}/icons`
|
||||
export const mapFilePath = `${outPath}/map.json`
|
||||
export const portfolioFilePath = `${outPath}/portfolio.json`
|
||||
export const searchIndexFilePath = `${outPath}/search.json`
|
86
packages/content/src/index.ts
Normal file
86
packages/content/src/index.ts
Normal file
|
@ -0,0 +1,86 @@
|
|||
/**
|
||||
* @file Read markdown files and write their content and metadata to json files which can then be imported by React.
|
||||
* - File and directory names starting with an underscore (_) are ignored.
|
||||
* - Symbolic links are not supported.
|
||||
* - The filename-to-URL converter isn't perfect. Some non-URL-friendly filenames might cause problems.
|
||||
* - series must start with a number followed by an underscore
|
||||
*/
|
||||
|
||||
import fs from "fs"
|
||||
|
||||
import { mapFilePath, markdownPath, portfolioFilePath } from "./config"
|
||||
import postProcess from "./postProcess"
|
||||
import { recursiveParse } from "./recursiveParse"
|
||||
import { saveIndex } from "./searchIndex"
|
||||
import { ContentMap, ParseMode, PortfolioData, SeriesMap } from "./types/types"
|
||||
|
||||
export const contentMap: ContentMap = {
|
||||
date: {},
|
||||
tags: {},
|
||||
meta: {
|
||||
tags: [],
|
||||
},
|
||||
posts: {},
|
||||
series: {},
|
||||
unsearchable: {},
|
||||
}
|
||||
export const seriesMap: SeriesMap = {}
|
||||
export const portfolioData: PortfolioData = {
|
||||
skills: new Set(),
|
||||
projects: {},
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete previously generated files
|
||||
*/
|
||||
|
||||
try {
|
||||
fs.rmSync("dist", { recursive: true })
|
||||
// eslint-disable-next-line no-empty
|
||||
} catch (err) {}
|
||||
|
||||
/**
|
||||
* Checking
|
||||
*/
|
||||
|
||||
if (!fs.lstatSync(markdownPath).isDirectory())
|
||||
throw Error("Invalid markdown path")
|
||||
|
||||
if (!fs.lstatSync(markdownPath + "/posts").isDirectory())
|
||||
throw Error(`Cannot find directory: ${markdownPath + "/posts"}`)
|
||||
|
||||
if (!fs.lstatSync(markdownPath + "/unsearchable").isDirectory())
|
||||
throw Error(`Cannot find directory: ${markdownPath + "/posts"}`)
|
||||
|
||||
if (!fs.lstatSync(markdownPath + "/series").isDirectory())
|
||||
throw Error(`Cannot find directory: ${markdownPath + "/posts"}`)
|
||||
|
||||
/**
|
||||
* Parse
|
||||
*/
|
||||
|
||||
recursiveParse(ParseMode.POSTS, markdownPath + "/posts")
|
||||
recursiveParse(ParseMode.UNSEARCHABLE, markdownPath + "/unsearchable")
|
||||
recursiveParse(ParseMode.SERIES, markdownPath + "/series")
|
||||
recursiveParse(ParseMode.PORTFOLIO, markdownPath + "/projects")
|
||||
|
||||
/**
|
||||
* Post-process
|
||||
*/
|
||||
|
||||
postProcess()
|
||||
|
||||
/**
|
||||
* Save results
|
||||
*/
|
||||
|
||||
fs.writeFileSync(mapFilePath, JSON.stringify(contentMap))
|
||||
fs.writeFileSync(
|
||||
portfolioFilePath,
|
||||
JSON.stringify({
|
||||
...portfolioData,
|
||||
skills: Array.from(portfolioData.skills),
|
||||
})
|
||||
)
|
||||
|
||||
saveIndex()
|
131
packages/content/src/parseMarkdown.ts
Normal file
131
packages/content/src/parseMarkdown.ts
Normal file
|
@ -0,0 +1,131 @@
|
|||
import "katex/contrib/mhchem" // chemical formula
|
||||
|
||||
import matter from "gray-matter"
|
||||
import hljs from "highlight.js" // code block syntax highlighting
|
||||
import { JSDOM } from "jsdom" // HTML DOM parsing
|
||||
import katex from "katex" // rendering mathematical expression
|
||||
import markdownIt from "markdown-it" // rendering markdown
|
||||
import markdownItAnchor from "markdown-it-anchor" // markdown anchor
|
||||
import markdownItFootnote from "markdown-it-footnote" // markdown footnote
|
||||
import highlightLines from "markdown-it-highlight-lines" // highlighting specific lines in code blocks
|
||||
import markDownItMark from "markdown-it-mark" // text highlighting
|
||||
import markdownItSub from "markdown-it-sub" // markdown subscript
|
||||
import markdownItSup from "markdown-it-sup" // markdown superscript
|
||||
import markdownItTaskCheckbox from "markdown-it-task-checkbox" // a TODO list checkboxes
|
||||
import markdownItTexMath from "markdown-it-texmath" // rendering mathematical expression
|
||||
import toc from "markdown-toc" // table of contents generation
|
||||
import slugify from "slugify"
|
||||
|
||||
import { MarkdownData, ParseMode } from "./types/types"
|
||||
import { nthIndex } from "./util"
|
||||
|
||||
const slugifyIt = (s: string) => slugify(s, { lower: true, strict: true })
|
||||
|
||||
const md = markdownIt({
|
||||
// https://github.com/highlightjs/highlight.js/blob/main/SUPPORTED_LANGUAGES.md
|
||||
highlight: (str, lang) => {
|
||||
if (lang && hljs.getLanguage(lang)) {
|
||||
try {
|
||||
return hljs.highlight(str, { language: lang }).value
|
||||
// eslint-disable-next-line no-empty
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
return "" // use external default escaping
|
||||
},
|
||||
html: true,
|
||||
})
|
||||
.use(markdownItTexMath, {
|
||||
engine: katex,
|
||||
delimiters: "dollars",
|
||||
})
|
||||
.use(markdownItAnchor, {
|
||||
permalink: markdownItAnchor.permalink.ariaHidden({
|
||||
placement: "before",
|
||||
symbol: "#",
|
||||
renderHref: (s) => `#${slugifyIt(s)}`,
|
||||
}),
|
||||
slugify: slugifyIt,
|
||||
})
|
||||
.use(markdownItTaskCheckbox)
|
||||
.use(markDownItMark)
|
||||
.use(markdownItSub)
|
||||
.use(markdownItSup)
|
||||
.use(highlightLines)
|
||||
.use(markdownItFootnote)
|
||||
|
||||
/**
|
||||
* parse the front matter if it exists
|
||||
*
|
||||
* @param {string} markdownRaw - raw unparsed text data of the markdown file
|
||||
* @param {string} path - filename of the markdown file
|
||||
* @param {ParseMode} mode
|
||||
*/
|
||||
export default function parseMarkdown(
|
||||
markdownRaw: string,
|
||||
path: string,
|
||||
mode: ParseMode
|
||||
): MarkdownData {
|
||||
const fileHasFrontMatter = markdownRaw.startsWith("---")
|
||||
|
||||
const frontMatter = fileHasFrontMatter
|
||||
? matter(markdownRaw.slice(0, nthIndex(markdownRaw, "---", 2) + 3)).data
|
||||
: {}
|
||||
|
||||
if (fileHasFrontMatter) {
|
||||
if (mode != ParseMode.PORTFOLIO) {
|
||||
if (!frontMatter.title)
|
||||
throw Error(`Title is not defined in file: ${path}`)
|
||||
|
||||
if (mode != ParseMode.UNSEARCHABLE && !frontMatter.date)
|
||||
throw Error(`Date is not defined in file: ${path}`)
|
||||
}
|
||||
|
||||
if (mode === ParseMode.PORTFOLIO) {
|
||||
if (frontMatter.overview) {
|
||||
frontMatter.overview = md.render(frontMatter.overview)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// work with rendered DOM
|
||||
//
|
||||
|
||||
const dom = new JSDOM(
|
||||
md.render(
|
||||
fileHasFrontMatter
|
||||
? markdownRaw.slice(nthIndex(markdownRaw, "---", 2) + 3)
|
||||
: markdownRaw
|
||||
) || ""
|
||||
)
|
||||
|
||||
// add .hljs class to all block codes
|
||||
|
||||
dom.window.document.querySelectorAll("pre > code").forEach((item) => {
|
||||
item.classList.add("hljs")
|
||||
})
|
||||
|
||||
// add parent div to tables (horizontally scroll table on small displays)
|
||||
|
||||
dom.window.document.querySelectorAll("table").forEach((item) => {
|
||||
// `element` is the element you want to wrap
|
||||
const parent = item.parentNode
|
||||
if (!parent) return // stop if table doesn't have a parent node
|
||||
const wrapper = dom.window.document.createElement("div")
|
||||
wrapper.style.overflowX = "auto"
|
||||
|
||||
parent.replaceChild(wrapper, item)
|
||||
wrapper.appendChild(item)
|
||||
})
|
||||
|
||||
frontMatter.content = dom.window.document.documentElement.innerHTML
|
||||
|
||||
return frontMatter as MarkdownData
|
||||
}
|
||||
|
||||
export function generateToc(markdownRaw: string): string {
|
||||
return md.render(toc(markdownRaw).content, {
|
||||
slugify: slugifyIt,
|
||||
})
|
||||
}
|
8
packages/content/src/portfolio/badge.ejs
Normal file
8
packages/content/src/portfolio/badge.ejs
Normal file
|
@ -0,0 +1,8 @@
|
|||
<div class="badge">
|
||||
<div class="badge-box" style="background-color: <%= badge.hex %>">
|
||||
<div class="icon-container <%= badge.isDark ? 'white' : 'black' %>">
|
||||
<%- badge.svg %>
|
||||
</div>
|
||||
</div>
|
||||
<%= badge.title %>
|
||||
</div>
|
5
packages/content/src/portfolio/badges.ejs
Normal file
5
packages/content/src/portfolio/badges.ejs
Normal file
|
@ -0,0 +1,5 @@
|
|||
<div class="items-wrapper">
|
||||
<% badges.forEach((badge) => { %>
|
||||
<%- include("badge.ejs", { badge }) %>
|
||||
<% }) %>
|
||||
</div>
|
24
packages/content/src/portfolio/skills.ejs
Normal file
24
packages/content/src/portfolio/skills.ejs
Normal file
|
@ -0,0 +1,24 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width="480" height="1075">
|
||||
<style>
|
||||
<%= style %>
|
||||
</style>
|
||||
|
||||
<foreignObject x="0" y="0" width="100%" height="100%">
|
||||
<div
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
>
|
||||
<% for (let key in data) { %>
|
||||
<h2><%- key %></h2>
|
||||
<% if(data[key] instanceof Array){ %>
|
||||
<%- include("badges.ejs", { badges: data[key] }) %>
|
||||
<% } else{ %>
|
||||
<% for (let subKey in data[key]) { %>
|
||||
<h3><%- subKey %></h3>
|
||||
<%- include("badges.ejs", { badges: data[key][subKey] }) %>
|
||||
<% } %>
|
||||
<% } %>
|
||||
<% } %>
|
||||
</div>
|
||||
</foreignObject>
|
||||
</svg>
|
After Width: | Height: | Size: 639 B |
21
packages/content/src/portfolio/skills.json
Normal file
21
packages/content/src/portfolio/skills.json
Normal file
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"Programming Languages": [
|
||||
"javascript",
|
||||
"typescript",
|
||||
"python",
|
||||
"rust",
|
||||
"csharp C#"
|
||||
],
|
||||
"Web Front End": ["react", "svelte", "tailwindcss Tailwind"],
|
||||
"Desktop Front End": ["gtk", "electron", "tauri"],
|
||||
"Back End": ["firebase"],
|
||||
"DevOps": ["docker", "githubactions GH Actions"],
|
||||
"Game Development": ["unity"],
|
||||
"Etc": [
|
||||
"figma",
|
||||
"markdown",
|
||||
"notion",
|
||||
"google Google-Fu",
|
||||
"discord Discord Bot"
|
||||
]
|
||||
}
|
61
packages/content/src/portfolio/style.css
Normal file
61
packages/content/src/portfolio/style.css
Normal file
|
@ -0,0 +1,61 @@
|
|||
svg {
|
||||
/* from github */
|
||||
font-family: -apple-system, BlinkMacSystemFont, Segoe UI, Helvetica, Arial,
|
||||
sans-serif, Apple Color Emoji, Segoe UI Emoji;
|
||||
font-size: 14px;
|
||||
color: #777777;
|
||||
}
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.items-wrapper {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(5, 1fr);
|
||||
|
||||
column-gap: 10px;
|
||||
row-gap: 15px;
|
||||
}
|
||||
|
||||
.badge {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
text-align: center;
|
||||
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.badge-box {
|
||||
display: flex;
|
||||
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
|
||||
border-radius: 7px;
|
||||
|
||||
width: 70px;
|
||||
height: 70px;
|
||||
}
|
||||
|
||||
.icon-container > svg {
|
||||
height: 40px !important;
|
||||
}
|
||||
|
||||
.white {
|
||||
color: white;
|
||||
fill: white;
|
||||
}
|
||||
|
||||
.black {
|
||||
color: black;
|
||||
fill: black;
|
||||
}
|
137
packages/content/src/postProcess.ts
Normal file
137
packages/content/src/postProcess.ts
Normal file
|
@ -0,0 +1,137 @@
|
|||
import ejs from "ejs"
|
||||
import { readFileSync } from "fs"
|
||||
import icons from "simple-icons/icons"
|
||||
import { optimize } from "svgo"
|
||||
import tinycolor from "tinycolor2"
|
||||
|
||||
import { contentMap, seriesMap } from "."
|
||||
import skills from "./portfolio/skills.json"
|
||||
import { Badge } from "./types/types"
|
||||
import { writeToFile } from "./util"
|
||||
|
||||
export default function postProcess() {
|
||||
sortDates()
|
||||
fillTags()
|
||||
parseSeries()
|
||||
generatePortfolioSVGs()
|
||||
}
|
||||
|
||||
function sortDates() {
|
||||
const TmpDate = contentMap.date
|
||||
contentMap.date = {}
|
||||
Object.keys(TmpDate)
|
||||
.sort()
|
||||
.forEach((sortedDateKey) => {
|
||||
contentMap.date[sortedDateKey] = TmpDate[sortedDateKey]
|
||||
})
|
||||
}
|
||||
|
||||
function fillTags() {
|
||||
contentMap.meta.tags = Object.keys(contentMap.tags)
|
||||
}
|
||||
|
||||
function parseSeries() {
|
||||
// sort series map
|
||||
for (const seriesURL in seriesMap) {
|
||||
seriesMap[seriesURL].sort((a, b) => {
|
||||
if (a.index < b.index) return -1
|
||||
if (a.index > b.index) return 1
|
||||
|
||||
return 0
|
||||
})
|
||||
}
|
||||
|
||||
// series length and order
|
||||
for (const seriesURL in seriesMap) {
|
||||
contentMap.series[seriesURL].length = seriesMap[seriesURL].length
|
||||
contentMap.series[seriesURL].order = seriesMap[seriesURL].map(
|
||||
(item) => item.url
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function generatePortfolioSVGs() {
|
||||
/**
|
||||
* render skills.svg
|
||||
*/
|
||||
|
||||
// todo: wait add ejs once it's available
|
||||
|
||||
const style = readFileSync("./src/portfolio/style.css", "utf-8")
|
||||
|
||||
const data: {
|
||||
[key: string]: Badge[] | { [key: string]: Badge[] }
|
||||
} = {}
|
||||
|
||||
// C O G N I T O - H A Z A R D
|
||||
// THIS PART OF THE CODE WAS WRITTEN IN 3 AM
|
||||
// C O G N I T O - H A Z A R D
|
||||
|
||||
for (const key in skills) {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
if (skills[key] instanceof Array) {
|
||||
if (!data[key]) {
|
||||
data[key] = []
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
;(skills[key] as string[]).forEach((badge) =>
|
||||
(data[key] as Badge[]).push(parseBadge(badge))
|
||||
)
|
||||
} else {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
for (const subKey in skills[key]) {
|
||||
if (!data[key]) data[key] = {}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
if (!data[key][subKey]) {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
data[key][subKey] = []
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
skills[key][subKey].forEach((badge: string) =>
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
(data[key][subKey] as Badge[]).push(parseBadge(badge))
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const renderedSVG = ejs.render(
|
||||
readFileSync("./src/portfolio/skills.ejs", "utf-8"),
|
||||
{ style, data },
|
||||
{ views: ["./src/portfolio"] }
|
||||
)
|
||||
|
||||
writeToFile(
|
||||
"./dist/public/img/skills.svg",
|
||||
optimize(renderedSVG, { multipass: true }).data
|
||||
)
|
||||
}
|
||||
|
||||
function parseBadge(badgeRaw: string): Badge {
|
||||
const isMultiWord = badgeRaw.includes(" ")
|
||||
const words = badgeRaw.split(" ")
|
||||
const slug = words[0]
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
const icon = icons["si" + slug[0].toUpperCase() + slug.slice(1)]
|
||||
|
||||
const color = tinycolor(icon.hex).lighten(5).desaturate(5)
|
||||
|
||||
return {
|
||||
svg: icon.svg,
|
||||
hex: color.toHexString(),
|
||||
isDark: color.isDark(),
|
||||
title: isMultiWord ? words.slice(1).join(" ") : icon.title,
|
||||
}
|
||||
}
|
106
packages/content/src/recursiveParse/index.ts
Normal file
106
packages/content/src/recursiveParse/index.ts
Normal file
|
@ -0,0 +1,106 @@
|
|||
import fs from "fs"
|
||||
import readTimeEstimate from "read-time-estimate" // post read time estimation
|
||||
|
||||
import parseMarkdown from "../parseMarkdown"
|
||||
import { ParseMode } from "../types/types"
|
||||
import { path2FileOrFolderName, path2URL } from "../util"
|
||||
import parsePost from "./parsePost"
|
||||
import parseProjects from "./parseProjects"
|
||||
import parseSeries from "./parseSeries"
|
||||
import parseUnsearchable from "./parseUnsearchable"
|
||||
|
||||
/**
|
||||
* Data that's passed from {@link parseFile} to other function
|
||||
*/
|
||||
export interface DataToPass {
|
||||
path: string
|
||||
urlPath: string
|
||||
markdownRaw: string
|
||||
markdownData: {
|
||||
content: string
|
||||
[key: string]: unknown
|
||||
}
|
||||
humanizedDuration: string
|
||||
totalWords: number
|
||||
}
|
||||
|
||||
/**
|
||||
* A recursive function that calls itself for every files and directories that it finds
|
||||
*
|
||||
* @param {ParseMode} mode - parse mode
|
||||
* @param {string} path - path of file or folder
|
||||
*/
|
||||
export function recursiveParse(mode: ParseMode, path: string): void {
|
||||
// get name of the file or folder that's currently being parsed
|
||||
const fileOrFolderName = path2FileOrFolderName(path)
|
||||
|
||||
// stop if the file or folder starts with a underscore
|
||||
if (fileOrFolderName.startsWith("_")) return
|
||||
|
||||
const stats = fs.lstatSync(path)
|
||||
|
||||
// if it's a directory, call this function to every files/directories in it
|
||||
// if it's a file, parse it and then save it to file
|
||||
if (stats.isDirectory()) {
|
||||
fs.readdirSync(path).map((childPath) => {
|
||||
recursiveParse(mode, `${path}/${childPath}`)
|
||||
})
|
||||
} else if (stats.isFile()) {
|
||||
parseFile(mode, path)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a markdown file
|
||||
*
|
||||
* @param {ParseMode} mode - decides which function to use to parse the file
|
||||
* @param {string} path - path of the markdown file
|
||||
*/
|
||||
function parseFile(mode: ParseMode, path: string): void {
|
||||
// stop if it is not a markdown file
|
||||
if (!path.endsWith(".md")) {
|
||||
console.log(`Ignoring non markdown file at: ${path}`)
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse markdown
|
||||
*/
|
||||
|
||||
const markdownRaw = fs.readFileSync(path, "utf8")
|
||||
const markdownData = parseMarkdown(markdownRaw, path, mode)
|
||||
const { humanizedDuration, totalWords } = readTimeEstimate(
|
||||
markdownData.content,
|
||||
275,
|
||||
12,
|
||||
500,
|
||||
["img", "Image"]
|
||||
)
|
||||
|
||||
const dataToPass: DataToPass = {
|
||||
path,
|
||||
urlPath: path2URL(path),
|
||||
markdownRaw,
|
||||
markdownData,
|
||||
humanizedDuration,
|
||||
totalWords,
|
||||
}
|
||||
|
||||
switch (mode) {
|
||||
case ParseMode.POSTS:
|
||||
parsePost(dataToPass)
|
||||
break
|
||||
|
||||
case ParseMode.SERIES:
|
||||
parseSeries(dataToPass)
|
||||
break
|
||||
|
||||
case ParseMode.UNSEARCHABLE:
|
||||
parseUnsearchable(dataToPass)
|
||||
break
|
||||
|
||||
case ParseMode.PORTFOLIO:
|
||||
parseProjects(dataToPass)
|
||||
break
|
||||
}
|
||||
}
|
76
packages/content/src/recursiveParse/parsePost.ts
Normal file
76
packages/content/src/recursiveParse/parsePost.ts
Normal file
|
@ -0,0 +1,76 @@
|
|||
import { contentMap } from ".."
|
||||
import { contentDirectoryPath } from "../config"
|
||||
import { generateToc } from "../parseMarkdown"
|
||||
import { addDocument } from "../searchIndex"
|
||||
import { PostData } from "../types/types"
|
||||
import { writeToFile } from "../util"
|
||||
import { DataToPass } from "."
|
||||
|
||||
export default function parsePost(data: DataToPass): void {
|
||||
const {
|
||||
urlPath,
|
||||
markdownRaw,
|
||||
markdownData,
|
||||
humanizedDuration,
|
||||
totalWords,
|
||||
} = data
|
||||
|
||||
const postData: PostData = {
|
||||
title: markdownData.title as string,
|
||||
date: "",
|
||||
readTime: humanizedDuration,
|
||||
wordCount: totalWords,
|
||||
tags: [],
|
||||
}
|
||||
|
||||
/**
|
||||
* Dates
|
||||
*/
|
||||
|
||||
const postDate = new Date(markdownData.date as string)
|
||||
postData.date = postDate.toLocaleString("default", {
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
year: "numeric",
|
||||
})
|
||||
|
||||
const YYYY_MM_DD = postDate.toISOString().split("T")[0]
|
||||
if (contentMap.date[YYYY_MM_DD]) {
|
||||
contentMap.date[YYYY_MM_DD].push(urlPath)
|
||||
} else {
|
||||
contentMap.date[YYYY_MM_DD] = [urlPath]
|
||||
}
|
||||
|
||||
/**
|
||||
* Tags
|
||||
*/
|
||||
|
||||
postData.tags = markdownData.tags as string[]
|
||||
if (postData.tags) {
|
||||
postData.tags.forEach((tag) => {
|
||||
if (contentMap.tags[tag]) {
|
||||
contentMap.tags[tag].push(urlPath)
|
||||
} else {
|
||||
contentMap.tags[tag] = [urlPath]
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
||||
contentMap.posts[urlPath] = postData
|
||||
addDocument({
|
||||
title: markdownData.title,
|
||||
body: markdownData.content,
|
||||
url: urlPath,
|
||||
})
|
||||
writeToFile(
|
||||
`${contentDirectoryPath}${urlPath}.json`,
|
||||
JSON.stringify({
|
||||
content: markdownData.content,
|
||||
toc: generateToc(markdownRaw),
|
||||
})
|
||||
)
|
||||
}
|
55
packages/content/src/recursiveParse/parseProjects.ts
Normal file
55
packages/content/src/recursiveParse/parseProjects.ts
Normal file
|
@ -0,0 +1,55 @@
|
|||
import { SimpleIcon } from "simple-icons"
|
||||
import icons from "simple-icons/icons"
|
||||
import tinycolor from "tinycolor2"
|
||||
|
||||
import { portfolioData } from ".."
|
||||
import { contentDirectoryPath, iconsDirectoryPath } from "../config"
|
||||
import { generateToc } from "../parseMarkdown"
|
||||
import { writeToFile } from "../util"
|
||||
import { DataToPass } from "."
|
||||
|
||||
export default function parseProjects(data: DataToPass): void {
|
||||
const { urlPath, markdownRaw, markdownData } = data
|
||||
|
||||
if (markdownData.badges) {
|
||||
;(markdownData.badges as string[]).forEach((slug) => {
|
||||
// todo: handle cases when icon is not on simple-icons
|
||||
const icon: SimpleIcon =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
icons["si" + slug[0].toUpperCase() + slug.slice(1)]
|
||||
|
||||
portfolioData.skills.add(slug)
|
||||
|
||||
const color = tinycolor(icon.hex).lighten(5).desaturate(5)
|
||||
|
||||
// save svg icon
|
||||
writeToFile(
|
||||
`${iconsDirectoryPath}/${icon.slug}.json`,
|
||||
JSON.stringify({
|
||||
svg: icon.svg,
|
||||
hex: color.toHexString(),
|
||||
isDark: color.isDark(),
|
||||
title: icon.title,
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
// remove /projects/ prefix
|
||||
portfolioData.projects[urlPath.replace("/projects/", "")] = {
|
||||
name: markdownData.name as string,
|
||||
image: markdownData.image as string,
|
||||
overview: markdownData.overview as string,
|
||||
badges: (markdownData.badges as string[]) || [],
|
||||
repo: (markdownData.repo as string) || "",
|
||||
}
|
||||
|
||||
writeToFile(
|
||||
`${contentDirectoryPath}${urlPath}.json`,
|
||||
JSON.stringify({
|
||||
content: markdownData.content,
|
||||
toc: generateToc(markdownRaw),
|
||||
})
|
||||
)
|
||||
}
|
147
packages/content/src/recursiveParse/parseSeries.ts
Normal file
147
packages/content/src/recursiveParse/parseSeries.ts
Normal file
|
@ -0,0 +1,147 @@
|
|||
import { contentMap, seriesMap } from ".."
|
||||
import { contentDirectoryPath } from "../config"
|
||||
import { generateToc } from "../parseMarkdown"
|
||||
import { addDocument } from "../searchIndex"
|
||||
import { PostData } from "../types/types"
|
||||
import { writeToFile } from "../util"
|
||||
import { DataToPass } from "."
|
||||
|
||||
export default function parseSeries(data: DataToPass): void {
|
||||
const {
|
||||
path,
|
||||
urlPath: _urlPath,
|
||||
markdownRaw,
|
||||
markdownData,
|
||||
humanizedDuration,
|
||||
totalWords,
|
||||
} = data
|
||||
|
||||
// last part of the url without the slash
|
||||
let lastPath = _urlPath.slice(_urlPath.lastIndexOf("/") + 1)
|
||||
if (!lastPath.includes("_") && !lastPath.startsWith("0"))
|
||||
throw Error(`Invalid series file name at: "${path}"`)
|
||||
|
||||
// if file is a series descriptor or not (not = regular series post)
|
||||
const isFileDescriptor = lastPath.startsWith("0") && !lastPath.includes("_")
|
||||
|
||||
// series post url
|
||||
if (isFileDescriptor) {
|
||||
lastPath = ""
|
||||
} else {
|
||||
lastPath = lastPath
|
||||
.slice(lastPath.indexOf("_") + 1) // get string after the series index
|
||||
.replace(/\/$/, "") // remove trailing slash
|
||||
}
|
||||
|
||||
// get url until right before the lastPath
|
||||
const urlUntilLastPath = _urlPath.slice(0, _urlPath.lastIndexOf("/") + 1)
|
||||
|
||||
// remove trailing slash if it's a regular series post
|
||||
const urlPath =
|
||||
(isFileDescriptor
|
||||
? urlUntilLastPath.replace(/\/$/, "")
|
||||
: urlUntilLastPath) + lastPath
|
||||
|
||||
// todo: separate interface for series descriptor (no word count and read time)
|
||||
const postData: PostData = {
|
||||
title: markdownData.title as string,
|
||||
date: "",
|
||||
readTime: humanizedDuration,
|
||||
wordCount: totalWords,
|
||||
tags: [],
|
||||
}
|
||||
|
||||
/**
|
||||
* Date
|
||||
*/
|
||||
|
||||
const postDate = new Date(markdownData.date as string)
|
||||
postData.date = postDate.toLocaleString("default", {
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
year: "numeric",
|
||||
})
|
||||
|
||||
const YYYY_MM_DD = postDate.toISOString().split("T")[0]
|
||||
if (contentMap.date[YYYY_MM_DD]) {
|
||||
contentMap.date[YYYY_MM_DD].push(urlPath)
|
||||
} else {
|
||||
contentMap.date[YYYY_MM_DD] = [urlPath]
|
||||
}
|
||||
|
||||
/**
|
||||
* Tags
|
||||
*/
|
||||
|
||||
postData.tags = markdownData.tags as string[]
|
||||
if (postData.tags) {
|
||||
postData.tags.forEach((tag) => {
|
||||
if (contentMap.tags[tag]) {
|
||||
contentMap.tags[tag].push(urlPath)
|
||||
} else {
|
||||
contentMap.tags[tag] = [urlPath]
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
||||
addDocument({
|
||||
title: markdownData.title,
|
||||
body: markdownData.content,
|
||||
url: urlPath,
|
||||
})
|
||||
|
||||
contentMap.posts[urlPath] = postData
|
||||
|
||||
// series markdown starting with 0 is a series descriptor
|
||||
if (isFileDescriptor) {
|
||||
contentMap.series[urlPath] = {
|
||||
...postData,
|
||||
order: [],
|
||||
length: 0,
|
||||
}
|
||||
} else {
|
||||
// put series post in appropriate series
|
||||
for (const key of Object.keys(contentMap.series)) {
|
||||
if (urlPath.includes(key)) {
|
||||
const index = parseInt(
|
||||
_urlPath.slice(
|
||||
_urlPath.lastIndexOf("/") + 1,
|
||||
_urlPath.lastIndexOf("_")
|
||||
)
|
||||
)
|
||||
|
||||
if (isNaN(index))
|
||||
throw Error(`Invalid series index at: ${path}`)
|
||||
|
||||
const itemToPush = {
|
||||
index: index,
|
||||
url: urlPath,
|
||||
}
|
||||
|
||||
if (seriesMap[key]) {
|
||||
seriesMap[key].push(itemToPush)
|
||||
} else {
|
||||
seriesMap[key] = [itemToPush]
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save content
|
||||
*/
|
||||
|
||||
writeToFile(
|
||||
`${contentDirectoryPath}${urlPath}.json`,
|
||||
JSON.stringify({
|
||||
content: markdownData.content,
|
||||
toc: generateToc(markdownRaw),
|
||||
})
|
||||
)
|
||||
}
|
34
packages/content/src/recursiveParse/parseUnsearchable.ts
Normal file
34
packages/content/src/recursiveParse/parseUnsearchable.ts
Normal file
|
@ -0,0 +1,34 @@
|
|||
import { contentMap } from ".."
|
||||
import { contentDirectoryPath } from "../config"
|
||||
import { addDocument } from "../searchIndex"
|
||||
import { writeToFile } from "../util"
|
||||
import { DataToPass } from "."
|
||||
|
||||
export default function parseUnsearchable(data: DataToPass): void {
|
||||
const { urlPath: _urlPath, markdownData } = data
|
||||
|
||||
// convert path like /XXX/YYY/ZZZ to /YYY/ZZZ
|
||||
const urlPath = _urlPath.slice(_urlPath.slice(1).indexOf("/") + 1)
|
||||
|
||||
addDocument({
|
||||
title: markdownData.title,
|
||||
body: markdownData.content,
|
||||
url: urlPath,
|
||||
})
|
||||
|
||||
// Parse data that will be written to map.js
|
||||
contentMap.unsearchable[urlPath] = {
|
||||
title: markdownData.title as string,
|
||||
}
|
||||
|
||||
/**
|
||||
* Save content
|
||||
*/
|
||||
|
||||
writeToFile(
|
||||
`${contentDirectoryPath}/unsearchable${urlPath}.json`,
|
||||
JSON.stringify({
|
||||
content: markdownData.content,
|
||||
})
|
||||
)
|
||||
}
|
26
packages/content/src/searchIndex.ts
Normal file
26
packages/content/src/searchIndex.ts
Normal file
|
@ -0,0 +1,26 @@
|
|||
/**
|
||||
* @file generate index for searching
|
||||
*/
|
||||
|
||||
import elasticlunr from "elasticlunr"
|
||||
import fs from "fs"
|
||||
|
||||
import { searchIndexFilePath } from "./config"
|
||||
|
||||
const elasticlunrIndex = elasticlunr(function () {
|
||||
this.addField("title" as never)
|
||||
this.addField("body" as never)
|
||||
this.setRef("url" as never)
|
||||
})
|
||||
|
||||
export function addDocument(doc: {
|
||||
title?: unknown
|
||||
body?: string
|
||||
url?: string
|
||||
}) {
|
||||
elasticlunrIndex.addDoc(doc)
|
||||
}
|
||||
|
||||
export function saveIndex() {
|
||||
fs.writeFileSync(searchIndexFilePath, JSON.stringify(elasticlunrIndex))
|
||||
}
|
1
packages/content/src/types/markdown-it-footnote.d.ts
vendored
Normal file
1
packages/content/src/types/markdown-it-footnote.d.ts
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
declare module "markdown-it-footnote"
|
1
packages/content/src/types/markdown-it-highlight-lines.d.ts
vendored
Normal file
1
packages/content/src/types/markdown-it-highlight-lines.d.ts
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
declare module "markdown-it-highlight-lines"
|
1
packages/content/src/types/markdown-it-mark.d.ts
vendored
Normal file
1
packages/content/src/types/markdown-it-mark.d.ts
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
declare module "markdown-it-mark"
|
1
packages/content/src/types/markdown-it-sub.d.ts
vendored
Normal file
1
packages/content/src/types/markdown-it-sub.d.ts
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
declare module "markdown-it-sub"
|
1
packages/content/src/types/markdown-it-sup.d.ts
vendored
Normal file
1
packages/content/src/types/markdown-it-sup.d.ts
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
declare module "markdown-it-sup"
|
1
packages/content/src/types/markdown-it-task-checkbox.d.ts
vendored
Normal file
1
packages/content/src/types/markdown-it-task-checkbox.d.ts
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
declare module "markdown-it-task-checkbox"
|
4
packages/content/src/types/markdown-it-texmath.d.ts
vendored
Normal file
4
packages/content/src/types/markdown-it-texmath.d.ts
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
declare module "markdown-it-texmath" {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export default function texmath(md: MarkdownIt, ...params: any[]): void
|
||||
}
|
6
packages/content/src/types/markdown-toc.d.ts
vendored
Normal file
6
packages/content/src/types/markdown-toc.d.ts
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
declare module "markdown-toc" {
|
||||
export default function toc(str: string): {
|
||||
json: JSON
|
||||
content: string
|
||||
}
|
||||
}
|
141
packages/content/src/types/types.ts
Normal file
141
packages/content/src/types/types.ts
Normal file
|
@ -0,0 +1,141 @@
|
|||
export interface ContentMap {
|
||||
// key: YYYY-MM-DD
|
||||
// value: url
|
||||
date: { [key: string]: string[] }
|
||||
|
||||
// key: tag name
|
||||
// value: url
|
||||
tags: {
|
||||
[key: string]: string[]
|
||||
}
|
||||
|
||||
// list of all meta data
|
||||
meta: {
|
||||
tags: string[]
|
||||
}
|
||||
|
||||
// searchable, non-series posts
|
||||
// must have a post date
|
||||
// tag is not required
|
||||
posts: {
|
||||
[key: string]: PostData
|
||||
}
|
||||
|
||||
// series posts have "previous post" and "next post" button so they need to be ordered
|
||||
series: { [key: string]: Series }
|
||||
|
||||
// urls of unsearchable posts
|
||||
// it is here to quickly check if a post exists or not
|
||||
unsearchable: { [key: string]: { title: string } }
|
||||
}
|
||||
|
||||
/**
|
||||
* General
|
||||
*/
|
||||
|
||||
export enum ParseMode {
|
||||
POSTS,
|
||||
SERIES,
|
||||
UNSEARCHABLE,
|
||||
PORTFOLIO,
|
||||
}
|
||||
|
||||
export interface MarkdownData {
|
||||
content: string
|
||||
[key: string]: unknown
|
||||
}
|
||||
|
||||
export interface PostData {
|
||||
title: string
|
||||
date: string
|
||||
readTime: string
|
||||
wordCount: number
|
||||
tags?: string[]
|
||||
}
|
||||
|
||||
export interface PageData {
|
||||
title: string
|
||||
date: string
|
||||
readTime: string
|
||||
wordCount: number
|
||||
tags: string[]
|
||||
toc?: string
|
||||
content: string
|
||||
|
||||
// series
|
||||
|
||||
seriesHome: string
|
||||
prev?: string
|
||||
next?: string
|
||||
|
||||
// series home
|
||||
|
||||
order: string[]
|
||||
length: number
|
||||
|
||||
// portfolio
|
||||
|
||||
image: string // image url
|
||||
overview: string
|
||||
badges: string[]
|
||||
repo: string
|
||||
}
|
||||
|
||||
export interface Badge {
|
||||
svg: string
|
||||
hex: string
|
||||
isDark: boolean
|
||||
title: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Series
|
||||
*/
|
||||
|
||||
export interface Series {
|
||||
title: string
|
||||
date: string
|
||||
readTime: string
|
||||
wordCount: number
|
||||
order: string[]
|
||||
length: number
|
||||
tags?: string[]
|
||||
}
|
||||
|
||||
export interface SeriesMap {
|
||||
// key: url
|
||||
[key: string]: SeriesEntry[]
|
||||
}
|
||||
|
||||
export interface SeriesEntry {
|
||||
index: number
|
||||
url: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Portfolio
|
||||
*/
|
||||
|
||||
export interface PortfolioData {
|
||||
// a set of valid simple icons slug
|
||||
skills: Set<string>
|
||||
|
||||
// key: url
|
||||
projects: {
|
||||
[key: string]: PortfolioProject
|
||||
}
|
||||
}
|
||||
|
||||
export interface PortfolioOverview {
|
||||
// link to my github
|
||||
github: string
|
||||
description: string
|
||||
}
|
||||
|
||||
export interface PortfolioProject {
|
||||
name: string
|
||||
image: string // url to the image
|
||||
overview: string
|
||||
badges: string[] // array of valid simpleIcons slug
|
||||
repo: string // url of the git repository
|
||||
}
|
53
packages/content/src/util.ts
Normal file
53
packages/content/src/util.ts
Normal file
|
@ -0,0 +1,53 @@
|
|||
import fs from "fs"
|
||||
import { relative } from "path"
|
||||
|
||||
import { markdownPath } from "./config"
|
||||
|
||||
/**
|
||||
* converts file path to url path that will be used in the url (starts with a slash)
|
||||
*
|
||||
* @param {string} pathToConvert
|
||||
*/
|
||||
export function path2URL(pathToConvert: string): string {
|
||||
return `/${relative(markdownPath, pathToConvert)}`
|
||||
.replace(/\.[^/.]+$/, "") // remove the file extension
|
||||
.replace(/ /g, "-") // replace all space with a dash
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the text after the last slash
|
||||
*
|
||||
* @param {string} inputPath - path to parse
|
||||
*/
|
||||
export function path2FileOrFolderName(inputPath: string): string {
|
||||
// remove trailing slash
|
||||
if (inputPath[-1] == "/")
|
||||
inputPath = inputPath.slice(0, inputPath.length - 1)
|
||||
|
||||
// get the last section
|
||||
return inputPath.slice(inputPath.lastIndexOf("/") + 1)
|
||||
}
|
||||
|
||||
// gets the nth occurance of a pattern in string
|
||||
// returns -1 if nothing is found
|
||||
// https://stackoverflow.com/a/14482123/12979111
|
||||
export function nthIndex(str: string, pat: string, n: number) {
|
||||
let i = -1
|
||||
|
||||
while (n-- && i++ < str.length) {
|
||||
i = str.indexOf(pat, i)
|
||||
if (i < 0) break
|
||||
}
|
||||
|
||||
return i
|
||||
}
|
||||
|
||||
export function writeToFile(filePath: string, dataToWrite: string) {
|
||||
// create directory to put the files
|
||||
fs.mkdirSync(filePath.slice(0, filePath.lastIndexOf("/")), {
|
||||
recursive: true,
|
||||
})
|
||||
|
||||
// write content to the file
|
||||
fs.writeFileSync(filePath, dataToWrite)
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue