- added code block styling

- added mathematical expression
- combined recursive parsers
This commit is contained in:
Kim, Jimin 2021-08-04 11:57:32 +09:00
parent f30fe0c843
commit 84f6efec80
6 changed files with 365 additions and 285 deletions

View file

@ -12,10 +12,11 @@ Tools / Frameworks / Packages used:
| [AWS](https://aws.amazon.com) | Domain register | | [AWS](https://aws.amazon.com) | Domain register |
| [Firebase](https://firebase.google.com) | Static site hosting | | [Firebase](https://firebase.google.com) | Static site hosting |
| [react](https://reactjs.org) | Front end framework | | [react](https://reactjs.org) | Front end framework |
| [gray-matter](https://github.com/jonschlinkert/gray-matter) | Parsing markdown files |
| [react-tooltip](https://github.com/wwayne/react-tooltip) | Tooltips | | [react-tooltip](https://github.com/wwayne/react-tooltip) | Tooltips |
| [react-date-range](https://github.com/hypeserver/react-date-range) | Date picker for search page | | [react-date-range](https://github.com/hypeserver/react-date-range) | Date picker for search page |
| [elasticlunr](https://github.com/weixsong/elasticlunr.js) | Search engine | | [elasticlunr](https://github.com/weixsong/elasticlunr.js) | Search engine |
| [gray-matter](https://github.com/jonschlinkert/gray-matter) | Markdown parsing |
| [markdown-it](https://github.com/markdown-it/markdown-it) | Markdown rendering |
# Setup # Setup

View file

@ -10,7 +10,11 @@ import fs from "fs" // read and write files
import path from "path" // get relative path import path from "path" // get relative path
import elasticlunr from "elasticlunr" // search index generation import elasticlunr from "elasticlunr" // search index generation
import matter from "gray-matter" // parse markdown metadata import matter from "gray-matter" // parse markdown metadata
import markdownIt from "markdown-it" // rendering markdown
import hljs from "highlight.js" // code block highlighting
import toc from "markdown-toc" // table of contents generation import toc from "markdown-toc" // table of contents generation
import tm from "markdown-it-texmath" // rendering mathematical expression
import katex from "katex" // rendering mathematical expression
const markdownPath = "./markdown" // where it will look for markdown documents const markdownPath = "./markdown" // where it will look for markdown documents
const outPath = "./src/data" // path to the json database const outPath = "./src/data" // path to the json database
@ -95,6 +99,24 @@ const index = elasticlunr(function () {
this.setRef("url" as never) this.setRef("url" as never)
}) })
const md = markdownIt({
highlight: function (str, lang) {
if (lang && hljs.getLanguage(lang)) {
try {
return hljs.highlight(str, { language: lang }).value
// eslint-disable-next-line no-empty
} catch (error) {}
}
return "" // use external default escaping
},
html: true,
}).use(tm, {
engine: katex,
delimiters: "dollars",
katexOptions: { macros: { "\\RR": "\\mathbb{R}" } },
})
// converts file path to url // converts file path to url
function path2URL(pathToConvert: string): string { function path2URL(pathToConvert: string): string {
return `/${path.relative(markdownPath, pathToConvert)}` return `/${path.relative(markdownPath, pathToConvert)}`
@ -112,193 +134,51 @@ function path2FileOrFolderName(inputPath: string): string {
return inputPath.slice(inputPath.lastIndexOf("/") + 1) return inputPath.slice(inputPath.lastIndexOf("/") + 1)
} }
// A recursive function that calls itself for every files and directories that it finds // gets the nth occurance of a pattern in string
function recursiveParsePosts(fileOrFolderPath: string) { // returns -1 if nothing is found
// get string after the last slash character // https://stackoverflow.com/a/14482123/12979111
const fileOrFolderName = path2FileOrFolderName(fileOrFolderPath) function nthIndex(str: string, pat: string, n: number) {
let i = -1
// ignore if file or directory name starts with a underscore while (n-- && i++ < str.length) {
if (fileOrFolderName.startsWith("_")) return i = str.indexOf(pat, i)
if (i < 0) break
// get data about the given path
const stats = fs.lstatSync(fileOrFolderPath)
// if it's a directory, call this function to every files/directories in it
// if it's a file, parse it and then save it to file
if (stats.isDirectory()) {
fs.readdirSync(fileOrFolderPath).map((childPath) => {
recursiveParsePosts(`${fileOrFolderPath}/${childPath}`)
})
} else if (stats.isFile()) {
// skip if it is not a markdown file
if (!fileOrFolderName.endsWith(".md")) {
console.log(`Ignoring non markdown file at: ${fileOrFolderPath}`)
return
}
// path that will be used as site url
const urlPath = path2URL(fileOrFolderPath)
// parse markdown metadata
const parsedMarkdown = matter(fs.readFileSync(fileOrFolderPath, "utf8"))
if (!parsedMarkdown.data.title) {
throw Error(`Title is not defined in file: ${fileOrFolderPath}`)
}
if (!parsedMarkdown.data.date) {
throw Error(`Date is not defined in file: ${fileOrFolderPath}`)
}
// urlPath starts with a slash
const contentFilePath = `${contentDirectoryPath}${urlPath}.json`
// create directory to put json content files
fs.mkdirSync(
contentFilePath.slice(0, contentFilePath.lastIndexOf("/")),
{ recursive: true }
)
// write content to json file
fs.writeFileSync(
contentFilePath,
JSON.stringify({
content: parsedMarkdown.content.trim(),
})
)
// Parse data that will be written to map.js
const postData = {
title: parsedMarkdown.data.title,
preview: "",
date: "",
tags: [],
toc: toc(parsedMarkdown.content).content,
}
// content preview
// parsedMarkdown.excerpt is intentionally not used
// todo: fix potential improper closing of html tag
const slicedContent = parsedMarkdown.content.split(" ")
if (slicedContent.length > 19) {
postData.preview = slicedContent.slice(0, 19).join(" ") + " ..."
} else {
postData.preview = parsedMarkdown.content
}
// date
const postDate = new Date(parsedMarkdown.data.date)
postData.date = postDate.toLocaleString("default", {
month: "short",
day: "numeric",
year: "numeric",
})
const YYYY_MM_DD = postDate.toISOString().split("T")[0]
if (map.date[YYYY_MM_DD]) {
map.date[YYYY_MM_DD].push(urlPath)
} else {
map.date[YYYY_MM_DD] = [urlPath]
}
//tags
postData.tags = parsedMarkdown.data.tags
if (postData.tags) {
postData.tags.forEach((tag) => {
if (map.tags[tag]) {
map.tags[tag].push(urlPath)
} else {
map.tags[tag] = [urlPath]
}
})
}
map.posts[urlPath] = postData
index.addDoc({
title: parsedMarkdown.data.title,
body: parsedMarkdown.content,
url: urlPath,
})
} }
return i
} }
function recursiveParseUnsearchable(fileOrFolderPath: string) { function writeToJSON(JSONFilePath: string, dataToWrite: string) {
// get string after the last slash character // create directory to put json content files
const fileOrFolderName = path2FileOrFolderName(fileOrFolderPath) fs.mkdirSync(JSONFilePath.slice(0, JSONFilePath.lastIndexOf("/")), {
recursive: true,
})
// ignore if file or directory name starts with a underscore // write content to json file
if (fileOrFolderName.startsWith("_")) return fs.writeFileSync(
JSONFilePath,
// illegal names JSON.stringify({
if ( content: dataToWrite.trim(),
fileOrFolderPath == "./markdown/unsearchable/posts" || })
fileOrFolderPath == "./markdown/unsearchable/series"
) )
throw Error(
`Illegal name (posts/series) in path: "${fileOrFolderPath}".`
)
// get data about the given path
const stats = fs.lstatSync(fileOrFolderPath)
// if it's a directory, call this function to every files/directories in it
// if it's a file, parse it and then save it to file
if (stats.isDirectory()) {
fs.readdirSync(fileOrFolderPath).map((childPath) => {
recursiveParseUnsearchable(`${fileOrFolderPath}/${childPath}`)
})
} else if (stats.isFile()) {
// skip if it is not a markdown file
if (!fileOrFolderName.endsWith(".md")) {
console.log(`Ignoring non markdown file at: ${fileOrFolderPath}`)
return
}
const _urlPath = path2URL(fileOrFolderPath)
const urlPath = _urlPath.slice(
_urlPath
.slice(1) // ignore the first slash
.indexOf("/") + 1
)
// parse markdown metadata
const parsedMarkdown = matter(fs.readFileSync(fileOrFolderPath, "utf8"))
if (!parsedMarkdown.data.title) {
throw Error(`Title is not defined in file: ${fileOrFolderPath}`)
}
// urlPath starts with a slash
const contentFilePath = `${contentDirectoryPath}/unsearchable${urlPath}.json`
// create directory to put json content files
fs.mkdirSync(
contentFilePath.slice(0, contentFilePath.lastIndexOf("/")),
{ recursive: true }
)
// write content to json file
fs.writeFileSync(
contentFilePath,
JSON.stringify({
content: parsedMarkdown.content.trim(),
})
)
// Parse data that will be written to map.js
map.unsearchable[urlPath] = {
title: parsedMarkdown.data.title,
}
index.addDoc({
title: parsedMarkdown.data.title,
body: parsedMarkdown.content,
url: urlPath,
})
}
} }
function recursiveParseSeries(fileOrFolderPath: string) { // A recursive function that calls itself for every files and directories that it finds
function recursiveParse(
mode: "posts" | "series" | "unsearchable",
fileOrFolderPath: string
) {
if (mode == "unsearchable") {
// illegal names
if (
fileOrFolderPath == "./markdown/unsearchable/posts" ||
fileOrFolderPath == "./markdown/unsearchable/series"
)
throw Error(
`Illegal name (posts/series) in path: "${fileOrFolderPath}".`
)
}
// get string after the last slash character // get string after the last slash character
const fileOrFolderName = path2FileOrFolderName(fileOrFolderPath) const fileOrFolderName = path2FileOrFolderName(fileOrFolderPath)
@ -312,7 +192,7 @@ function recursiveParseSeries(fileOrFolderPath: string) {
// if it's a file, parse it and then save it to file // if it's a file, parse it and then save it to file
if (stats.isDirectory()) { if (stats.isDirectory()) {
fs.readdirSync(fileOrFolderPath).map((childPath) => { fs.readdirSync(fileOrFolderPath).map((childPath) => {
recursiveParseSeries(`${fileOrFolderPath}/${childPath}`) recursiveParse(mode, `${fileOrFolderPath}/${childPath}`)
}) })
} else if (stats.isFile()) { } else if (stats.isFile()) {
// skip if it is not a markdown file // skip if it is not a markdown file
@ -321,126 +201,217 @@ function recursiveParseSeries(fileOrFolderPath: string) {
return return
} }
if ( // read markdown file
!fileOrFolderName.includes("_") && const markdownRaw = fs.readFileSync(fileOrFolderPath, "utf8")
!fileOrFolderName.startsWith("0")
)
throw Error(`Invalid series post file name at: ${fileOrFolderPath}`)
// parse markdown metadata // parse markdown metadata
const parsedMarkdown = matter(fs.readFileSync(fileOrFolderPath, "utf8")) const markdownData = matter(
markdownRaw.slice(0, nthIndex(markdownRaw, "---", 2) + 3)
).data
if (!parsedMarkdown.data.title) { if (!markdownData.title)
throw Error(`Title is not defined in file: ${fileOrFolderPath}`) throw Error(`Title is not defined in file: ${fileOrFolderPath}`)
}
if (!parsedMarkdown.data.date) { markdownData.content =
throw Error(`Date is not defined in file: ${fileOrFolderPath}`) md.render(markdownRaw.slice(nthIndex(markdownRaw, "---", 2) + 3)) ||
} ""
// path that will be used as site url if (mode == "posts") {
let urlPath = path2URL(fileOrFolderPath) if (!markdownData.date) {
urlPath = urlPath.slice(0, urlPath.lastIndexOf("_")) throw Error(`Date is not defined in file: ${fileOrFolderPath}`)
urlPath = urlPath.replace(/\/$/, "") // remove trailing slash }
// urlPath starts with a slash // path that will be used as site url (starts with a slash)
const contentFilePath = `${contentDirectoryPath}${urlPath}.json` const urlPath = path2URL(fileOrFolderPath)
// create directory to put json content files writeToJSON(
fs.mkdirSync( `${contentDirectoryPath}${urlPath}.json`,
contentFilePath.slice(0, contentFilePath.lastIndexOf("/")), markdownData.content
{ recursive: true } )
)
// write content to json file // Parse data that will be written to map.js
fs.writeFileSync( const postData = {
contentFilePath, title: markdownData.title,
JSON.stringify({ preview: "",
content: parsedMarkdown.content.trim(), date: "",
tags: [],
toc: toc(markdownData.content).content,
}
// content preview
// parsedMarkdown.excerpt is intentionally not used
// todo: fix potential improper closing of html tag
const slicedContent = markdownData.content.split(" ")
if (slicedContent.length > 19) {
postData.preview = slicedContent.slice(0, 19).join(" ") + " ..."
} else {
postData.preview = markdownData.content
}
// date
const postDate = new Date(markdownData.date)
postData.date = postDate.toLocaleString("default", {
month: "short",
day: "numeric",
year: "numeric",
}) })
)
// Parse data that will be written to map.js const YYYY_MM_DD = postDate.toISOString().split("T")[0]
const postData = { if (map.date[YYYY_MM_DD]) {
title: parsedMarkdown.data.title, map.date[YYYY_MM_DD].push(urlPath)
preview: "", } else {
date: "", map.date[YYYY_MM_DD] = [urlPath]
tags: [], }
toc: toc(parsedMarkdown.content).content,
}
// content preview //tags
// parsedMarkdown.excerpt is intentionally not used postData.tags = markdownData.tags
// todo: fix potential improper closing of html tag if (postData.tags) {
const slicedContent = parsedMarkdown.content.split(" ") postData.tags.forEach((tag) => {
if (slicedContent.length > 19) { if (map.tags[tag]) {
postData.preview = slicedContent.slice(0, 19).join(" ") + " ..." map.tags[tag].push(urlPath)
} else { } else {
postData.preview = parsedMarkdown.content map.tags[tag] = [urlPath]
} }
})
}
// date
const postDate = new Date(parsedMarkdown.data.date)
postData.date = postDate.toLocaleString("default", {
month: "short",
day: "numeric",
year: "numeric",
})
const YYYY_MM_DD = postDate.toISOString().split("T")[0]
if (map.date[YYYY_MM_DD]) {
map.date[YYYY_MM_DD].push(urlPath)
} else {
map.date[YYYY_MM_DD] = [urlPath]
}
//tags
postData.tags = parsedMarkdown.data.tags
if (postData.tags) {
postData.tags.forEach((tag) => {
if (map.tags[tag]) {
map.tags[tag].push(urlPath)
} else {
map.tags[tag] = [urlPath]
}
})
}
if (fileOrFolderName.startsWith("0")) {
map.series[urlPath] = { ...postData, order: [], length: 0 }
} else {
map.posts[urlPath] = postData map.posts[urlPath] = postData
index.addDoc({ index.addDoc({
title: parsedMarkdown.data.title, title: markdownData.title,
body: parsedMarkdown.content, body: markdownData.content,
url: urlPath, url: urlPath,
}) })
for (const key of Object.keys(map.series)) { } else if (mode == "unsearchable") {
if (urlPath.slice(0, urlPath.lastIndexOf("/")).includes(key)) { // path that will be used as site url (starts with a slash)
const index = parseInt( const _urlPath = path2URL(fileOrFolderPath)
fileOrFolderName.slice( const urlPath = _urlPath.slice(
0, _urlPath
fileOrFolderName.lastIndexOf("_") .slice(1) // ignore the first slash
) .indexOf("/") + 1
) )
if (isNaN(index)) { writeToJSON(
throw Error( `${contentDirectoryPath}/unsearchable${urlPath}.json`,
`Invalid series index at: ${fileOrFolderPath}` markdownData.content
) )
}
const itemToPush = { // Parse data that will be written to map.js
index: index, map.unsearchable[urlPath] = {
url: urlPath, title: markdownData.title,
} }
if (seriesMap[key]) { index.addDoc({
seriesMap[key].push(itemToPush) title: markdownData.title,
body: markdownData.content,
url: urlPath,
})
} else if (mode == "series") {
if (
!fileOrFolderName.includes("_") &&
!fileOrFolderName.startsWith("0")
)
throw Error(
`Invalid series post file name at: ${fileOrFolderPath}`
)
if (!markdownData.date) {
throw Error(`Date is not defined in file: ${fileOrFolderPath}`)
}
// path that will be used as site url (starts with a slash)
let urlPath = path2URL(fileOrFolderPath)
urlPath = urlPath.slice(0, urlPath.lastIndexOf("_"))
urlPath = urlPath.replace(/\/$/, "") // remove trailing slash
writeToJSON(
`${contentDirectoryPath}${urlPath}.json`,
markdownData.content
)
// Parse data that will be written to map.js
const postData = {
title: markdownData.title,
preview: "",
date: "",
tags: [],
toc: toc(markdownData.content).content,
}
// content preview
// parsedMarkdown.excerpt is intentionally not used
// todo: fix potential improper closing of html tag
const slicedContent = markdownData.content.split(" ")
if (slicedContent.length > 19) {
postData.preview = slicedContent.slice(0, 19).join(" ") + " ..."
} else {
postData.preview = markdownData.content
}
// date
const postDate = new Date(markdownData.date)
postData.date = postDate.toLocaleString("default", {
month: "short",
day: "numeric",
year: "numeric",
})
const YYYY_MM_DD = postDate.toISOString().split("T")[0]
if (map.date[YYYY_MM_DD]) {
map.date[YYYY_MM_DD].push(urlPath)
} else {
map.date[YYYY_MM_DD] = [urlPath]
}
//tags
postData.tags = markdownData.tags
if (postData.tags) {
postData.tags.forEach((tag) => {
if (map.tags[tag]) {
map.tags[tag].push(urlPath)
} else { } else {
seriesMap[key] = [itemToPush] map.tags[tag] = [urlPath]
}
})
}
if (fileOrFolderName.startsWith("0")) {
map.series[urlPath] = { ...postData, order: [], length: 0 }
} else {
map.posts[urlPath] = postData
index.addDoc({
title: markdownData.title,
body: markdownData.content,
url: urlPath,
})
for (const key of Object.keys(map.series)) {
if (
urlPath.slice(0, urlPath.lastIndexOf("/")).includes(key)
) {
const index = parseInt(
fileOrFolderName.slice(
0,
fileOrFolderName.lastIndexOf("_")
)
)
if (isNaN(index)) {
throw Error(
`Invalid series index at: ${fileOrFolderPath}`
)
}
const itemToPush = {
index: index,
url: urlPath,
}
if (seriesMap[key]) {
seriesMap[key].push(itemToPush)
} else {
seriesMap[key] = [itemToPush]
}
break
} }
break
} }
} }
} }
@ -472,9 +443,9 @@ if (!fs.lstatSync(markdownPath + "/unsearchable").isDirectory())
if (!fs.lstatSync(markdownPath + "/series").isDirectory()) if (!fs.lstatSync(markdownPath + "/series").isDirectory())
throw Error(`Cannot find directory: ${markdownPath + "/posts"}`) throw Error(`Cannot find directory: ${markdownPath + "/posts"}`)
recursiveParsePosts(markdownPath + "/posts") recursiveParse("posts", markdownPath + "/posts")
recursiveParseUnsearchable(markdownPath + "/unsearchable") recursiveParse("unsearchable", markdownPath + "/unsearchable")
recursiveParseSeries(markdownPath + "/series") recursiveParse("series", markdownPath + "/series")
// sort dates // sort dates
let dateKeys: string[] = [] let dateKeys: string[] = []

View file

@ -41,11 +41,11 @@ A post have title, post date, tags, and content.
## Code ## Code
`code`<br /> Here's a `code`.
```python ```python
if __name__ == "__main__": if __name__ == "__main__":
print("code block") print("And here's a code block") # with comments!
``` ```
## Etc ## Etc
@ -58,7 +58,7 @@ if __name__ == "__main__":
_italic_<br /> _italic_<br />
~~strikethrough~~ ~~strikethrough~~
## HTML ## Styling
<p align="center"> <p align="center">
centered paragraph centered paragraph
@ -67,3 +67,7 @@ _italic_<br />
## Key ## Key
<kbd>Ctrl</kbd>+<kbd>C</kbd> <kbd>Ctrl</kbd>+<kbd>C</kbd>
## Mathematical expression
$e=mc^2$ is actually $e^2=(mc^2)^2 + (pc)^2$.

View file

@ -22,10 +22,15 @@
"@fortawesome/free-solid-svg-icons": "^5.15.3", "@fortawesome/free-solid-svg-icons": "^5.15.3",
"@fortawesome/react-fontawesome": "^0.1.14", "@fortawesome/react-fontawesome": "^0.1.14",
"@types/elasticlunr": "^0.9.2", "@types/elasticlunr": "^0.9.2",
"@types/highlight.js": "^10.1.0",
"date-fns": "^2.23.0", "date-fns": "^2.23.0",
"elasticlunr": "^0.9.5", "elasticlunr": "^0.9.5",
"gray-matter": "^4.0.3", "gray-matter": "^4.0.3",
"highlight.js": "^11.2.0",
"katex": "^0.13.13",
"local-storage-fallback": "^4.1.2", "local-storage-fallback": "^4.1.2",
"markdown-it": "^12.2.0",
"markdown-it-texmath": "^0.9.1",
"markdown-toc": "^1.2.0", "markdown-toc": "^1.2.0",
"marked": "^2.1.3", "marked": "^2.1.3",
"query-string": "^7.0.1", "query-string": "^7.0.1",

View file

@ -5,6 +5,9 @@ import { HelmetProvider } from "react-helmet-async"
import storage from "local-storage-fallback" import storage from "local-storage-fallback"
import { isIE } from "react-device-detect" import { isIE } from "react-device-detect"
import "highlight.js/styles/github-dark-dimmed.css"
import "katex/dist/katex.min.css"
import theming from "./theming" import theming from "./theming"
import Spinner from "./components/Spinner" import Spinner from "./components/Spinner"
@ -70,6 +73,35 @@ body::-webkit-scrollbar-thumb {
code { code {
font-family: ${theming.font.code}; font-family: ${theming.font.code};
color: ${(props) =>
theming.theme(props.theme.currentTheme, {
light: theming.light.color1,
dark: theming.dark.color1,
})};
background-color: ${(props) =>
theming.theme(props.theme.currentTheme, {
light: "#eee",
dark: "#555",
})};
border-radius: 3px;
padding: 0 3px;
}
/* https://stackoverflow.com/a/48694906/12979111 */
pre > code {
font-family: ${theming.font.code};
color: #adbac7;
background-color: #22272e;
border: 1px solid #ddd;
page-break-inside: avoid;
font-size: 15px;
line-height: 1.6;
margin-bottom: 1.6em;
max-width: 100%;
overflow: auto;
padding: 1em 1.5em;
display: block;
word-wrap: break-word;
} }
/* https://www.rgagnon.com/jsdetails/js-nice-effect-the-KBD-tag.html */ /* https://www.rgagnon.com/jsdetails/js-nice-effect-the-KBD-tag.html */

View file

@ -1833,6 +1833,13 @@
dependencies: dependencies:
"@types/node" "*" "@types/node" "*"
"@types/highlight.js@^10.1.0":
version "10.1.0"
resolved "https://registry.yarnpkg.com/@types/highlight.js/-/highlight.js-10.1.0.tgz#89bb0c202997d7a90a07bd2ec1f7d00c56bb90b4"
integrity sha512-77hF2dGBsOgnvZll1vymYiNUtqJ8cJfXPD6GG/2M0aLRc29PkvB7Au6sIDjIEFcSICBhCh2+Pyq6WSRS7LUm6A==
dependencies:
highlight.js "*"
"@types/hoist-non-react-statics@*": "@types/hoist-non-react-statics@*":
version "3.3.1" version "3.3.1"
resolved "https://registry.yarnpkg.com/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz#1124aafe5118cb591977aeb1ceaaed1070eb039f" resolved "https://registry.yarnpkg.com/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz#1124aafe5118cb591977aeb1ceaaed1070eb039f"
@ -2545,6 +2552,11 @@ argparse@^1.0.10, argparse@^1.0.7:
dependencies: dependencies:
sprintf-js "~1.0.2" sprintf-js "~1.0.2"
argparse@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==
aria-query@^4.2.2: aria-query@^4.2.2:
version "4.2.2" version "4.2.2"
resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b"
@ -3633,6 +3645,11 @@ commander@^4.1.1:
resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068"
integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==
commander@^6.0.0:
version "6.2.1"
resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c"
integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==
common-tags@^1.8.0: common-tags@^1.8.0:
version "1.8.0" version "1.8.0"
resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.0.tgz#8e3153e542d4a39e9b10554434afaaf98956a937" resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.0.tgz#8e3153e542d4a39e9b10554434afaaf98956a937"
@ -4608,6 +4625,11 @@ entities@^2.0.0:
resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55"
integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==
entities@~2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/entities/-/entities-2.1.0.tgz#992d3129cf7df6870b96c57858c249a120f8b8b5"
integrity sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==
errno@^0.1.3, errno@~0.1.7: errno@^0.1.3, errno@~0.1.7:
version "0.1.8" version "0.1.8"
resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f"
@ -5913,6 +5935,11 @@ hex-color-regex@^1.1.0:
resolved "https://registry.yarnpkg.com/hex-color-regex/-/hex-color-regex-1.1.0.tgz#4c06fccb4602fe2602b3c93df82d7e7dbf1a8a8e" resolved "https://registry.yarnpkg.com/hex-color-regex/-/hex-color-regex-1.1.0.tgz#4c06fccb4602fe2602b3c93df82d7e7dbf1a8a8e"
integrity sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ== integrity sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ==
highlight.js@*, highlight.js@^11.2.0:
version "11.2.0"
resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-11.2.0.tgz#a7e3b8c1fdc4f0538b93b2dc2ddd53a40c6ab0f0"
integrity sha512-JOySjtOEcyG8s4MLR2MNbLUyaXqUunmSnL2kdV/KuGJOmHZuAR5xC54Ko7goAXBWNhf09Vy3B+U7vR62UZ/0iw==
history@^4.9.0: history@^4.9.0:
version "4.10.1" version "4.10.1"
resolved "https://registry.yarnpkg.com/history/-/history-4.10.1.tgz#33371a65e3a83b267434e2b3f3b1b4c58aad4cf3" resolved "https://registry.yarnpkg.com/history/-/history-4.10.1.tgz#33371a65e3a83b267434e2b3f3b1b4c58aad4cf3"
@ -7296,6 +7323,13 @@ jsprim@^1.2.2:
array-includes "^3.1.2" array-includes "^3.1.2"
object.assign "^4.1.2" object.assign "^4.1.2"
katex@^0.13.13:
version "0.13.13"
resolved "https://registry.yarnpkg.com/katex/-/katex-0.13.13.tgz#15a796e95516869bc6d483443b58b2df872ee40f"
integrity sha512-cCMcil4jwMm7behpXGiQfXJA29sko/Gd/26iCsr53Dv5Jn2iHbHyEb14dm9uVrIijUXx6Zz1WhlFhHE6DckvkQ==
dependencies:
commander "^6.0.0"
killable@^1.0.1: killable@^1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892"
@ -7388,6 +7422,13 @@ lines-and-columns@^1.1.6:
resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00"
integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=
linkify-it@^3.0.1:
version "3.0.2"
resolved "https://registry.yarnpkg.com/linkify-it/-/linkify-it-3.0.2.tgz#f55eeb8bc1d3ae754049e124ab3bb56d97797fb8"
integrity sha512-gDBO4aHNZS6coiZCKVhSNh43F9ioIL4JwRjLZPkoLIY4yZFwg264Y5lu2x6rb1Js42Gh6Yqm2f6L2AJcnkzinQ==
dependencies:
uc.micro "^1.0.1"
list-item@^1.1.1: list-item@^1.1.1:
version "1.1.1" version "1.1.1"
resolved "https://registry.yarnpkg.com/list-item/-/list-item-1.1.1.tgz#0c65d00e287cb663ccb3cb3849a77e89ec268a56" resolved "https://registry.yarnpkg.com/list-item/-/list-item-1.1.1.tgz#0c65d00e287cb663ccb3cb3849a77e89ec268a56"
@ -7604,6 +7645,22 @@ map-visit@^1.0.0:
dependencies: dependencies:
object-visit "^1.0.0" object-visit "^1.0.0"
markdown-it-texmath@^0.9.1:
version "0.9.1"
resolved "https://registry.yarnpkg.com/markdown-it-texmath/-/markdown-it-texmath-0.9.1.tgz#fdf1442afbca474e9170b9707b6155125384bae2"
integrity sha512-hRA5KQcgBJf5q3qDBcui4s/VkzjGC/l1ILviLNrdW22cT9JXHKdEA2ZTk1RRtFuiif2AbrbiMEoEwV6tBFwiEw==
markdown-it@^12.2.0:
version "12.2.0"
resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-12.2.0.tgz#091f720fd5db206f80de7a8d1f1a7035fd0d38db"
integrity sha512-Wjws+uCrVQRqOoJvze4HCqkKl1AsSh95iFAeQDwnyfxM09divCBSXlDR1uTvyUP3Grzpn4Ru8GeCxYPM8vkCQg==
dependencies:
argparse "^2.0.1"
entities "~2.1.0"
linkify-it "^3.0.1"
mdurl "^1.0.1"
uc.micro "^1.0.5"
markdown-link@^0.1.1: markdown-link@^0.1.1:
version "0.1.1" version "0.1.1"
resolved "https://registry.yarnpkg.com/markdown-link/-/markdown-link-0.1.1.tgz#32c5c65199a6457316322d1e4229d13407c8c7cf" resolved "https://registry.yarnpkg.com/markdown-link/-/markdown-link-0.1.1.tgz#32c5c65199a6457316322d1e4229d13407c8c7cf"
@ -7656,6 +7713,11 @@ mdn-data@2.0.4:
resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b"
integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==
mdurl@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e"
integrity sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=
media-typer@0.3.0: media-typer@0.3.0:
version "0.3.0" version "0.3.0"
resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
@ -11547,6 +11609,11 @@ ua-parser-js@^0.7.24:
resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.28.tgz#8ba04e653f35ce210239c64661685bf9121dec31" resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.28.tgz#8ba04e653f35ce210239c64661685bf9121dec31"
integrity sha512-6Gurc1n//gjp9eQNXjD9O3M/sMwVtN5S8Lv9bvOYBfKfDNiIIhqiyi01vMBO45u4zkDE420w/e0se7Vs+sIg+g== integrity sha512-6Gurc1n//gjp9eQNXjD9O3M/sMwVtN5S8Lv9bvOYBfKfDNiIIhqiyi01vMBO45u4zkDE420w/e0se7Vs+sIg+g==
uc.micro@^1.0.1, uc.micro@^1.0.5:
version "1.0.6"
resolved "https://registry.yarnpkg.com/uc.micro/-/uc.micro-1.0.6.tgz#9c411a802a409a91fc6cf74081baba34b24499ac"
integrity sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==
unbox-primitive@^1.0.0, unbox-primitive@^1.0.1: unbox-primitive@^1.0.0, unbox-primitive@^1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471" resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471"