1
0
Fork 0
mirror of https://github.com/rharkor/caching-for-turbo.git synced 2025-06-08 01:37:01 +09:00

feat: store cache

This commit is contained in:
rharkor 2024-06-13 14:27:20 +02:00
parent 43be153d6e
commit fdd98d3c46
26 changed files with 92643 additions and 866 deletions

View file

@ -43,10 +43,6 @@ jobs:
id: npm-lint id: npm-lint
run: npm run lint run: npm run lint
- name: Test
id: npm-ci-test
run: npm run ci-test
test-action: test-action:
name: GitHub Actions Test name: GitHub Actions Test
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -56,12 +52,19 @@ jobs:
id: checkout id: checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Test Local Action - name: Setup Node.js
id: test-action id: setup-node
uses: ./ uses: actions/setup-node@v4
with: with:
milliseconds: 2000 node-version-file: .node-version
cache: npm
- name: Print Output - name: Install Dependencies
id: output id: npm-ci
run: echo "${{ steps.test-action.outputs.time }}" run: npm ci
- name: Test Local Action
uses: ./
- name: Test build cache
run: npm run test

2
.gitignore vendored
View file

@ -101,3 +101,5 @@ __tests__/runner/*
.idea .idea
.vscode .vscode
*.code-workspace *.code-workspace
.turbo

View file

@ -1,17 +0,0 @@
/**
* Unit tests for the action's entrypoint, src/index.ts
*/
import * as main from '../src/main'
// Mock the action's entrypoint
const runMock = jest.spyOn(main, 'run').mockImplementation()
describe('index', () => {
it('calls run when imported', async () => {
// eslint-disable-next-line @typescript-eslint/no-require-imports
require('../src/index')
expect(runMock).toHaveBeenCalled()
})
})

View file

@ -1,89 +0,0 @@
/**
* Unit tests for the action's main functionality, src/main.ts
*
* These should be run as if the action was called from a workflow.
* Specifically, the inputs listed in `action.yml` should be set as environment
* variables following the pattern `INPUT_<INPUT_NAME>`.
*/
import * as core from '@actions/core'
import * as main from '../src/main'
// Mock the action's main function
const runMock = jest.spyOn(main, 'run')
// Other utilities
const timeRegex = /^\d{2}:\d{2}:\d{2}/
// Mock the GitHub Actions core library
let debugMock: jest.SpiedFunction<typeof core.debug>
let errorMock: jest.SpiedFunction<typeof core.error>
let getInputMock: jest.SpiedFunction<typeof core.getInput>
let setFailedMock: jest.SpiedFunction<typeof core.setFailed>
let setOutputMock: jest.SpiedFunction<typeof core.setOutput>
describe('action', () => {
beforeEach(() => {
jest.clearAllMocks()
debugMock = jest.spyOn(core, 'debug').mockImplementation()
errorMock = jest.spyOn(core, 'error').mockImplementation()
getInputMock = jest.spyOn(core, 'getInput').mockImplementation()
setFailedMock = jest.spyOn(core, 'setFailed').mockImplementation()
setOutputMock = jest.spyOn(core, 'setOutput').mockImplementation()
})
it('sets the time output', async () => {
// Set the action's inputs as return values from core.getInput()
getInputMock.mockImplementation(name => {
switch (name) {
case 'milliseconds':
return '500'
default:
return ''
}
})
await main.run()
expect(runMock).toHaveReturned()
// Verify that all of the core library functions were called correctly
expect(debugMock).toHaveBeenNthCalledWith(1, 'Waiting 500 milliseconds ...')
expect(debugMock).toHaveBeenNthCalledWith(
2,
expect.stringMatching(timeRegex)
)
expect(debugMock).toHaveBeenNthCalledWith(
3,
expect.stringMatching(timeRegex)
)
expect(setOutputMock).toHaveBeenNthCalledWith(
1,
'time',
expect.stringMatching(timeRegex)
)
expect(errorMock).not.toHaveBeenCalled()
})
it('sets a failed status', async () => {
// Set the action's inputs as return values from core.getInput()
getInputMock.mockImplementation(name => {
switch (name) {
case 'milliseconds':
return 'this is not a number'
default:
return ''
}
})
await main.run()
expect(runMock).toHaveReturned()
// Verify that all of the core library functions were called correctly
expect(setFailedMock).toHaveBeenNthCalledWith(
1,
'milliseconds not a number'
)
expect(errorMock).not.toHaveBeenCalled()
})
})

View file

@ -1,25 +0,0 @@
/**
* Unit tests for src/wait.ts
*/
import { wait } from '../src/wait'
import { expect } from '@jest/globals'
describe('wait.ts', () => {
it('throws an invalid number', async () => {
const input = parseInt('foo', 10)
expect(isNaN(input)).toBe(true)
await expect(wait(input)).rejects.toThrow('milliseconds not a number')
})
it('waits with a valid number', async () => {
const start = new Date()
await wait(500)
const end = new Date()
const delta = Math.abs(end.getTime() - start.getTime())
expect(delta).toBeGreaterThan(450)
})
})

View file

@ -1,6 +1,8 @@
name: 'The name of your action here' name: 'Set up GitHub Actions caching for Turborepo'
description: 'Provide a description here' description:
author: 'Your name or organization here' 'Sets up Turborepo Remote Caching to work with GitHub Actions built-in cache.
No Vercel account access tokens needed.'
author: 'HUORT Louis'
# Add your action's branding here. This will appear on the GitHub Marketplace. # Add your action's branding here. This will appear on the GitHub Marketplace.
branding: branding:
@ -9,16 +11,12 @@ branding:
# Define your inputs here. # Define your inputs here.
inputs: inputs:
milliseconds: cache-prefix:
description: 'Your input description here' description: 'Prefix for the cache key'
required: true required: false
default: '1000' default: turbogha_
# Define your outputs here.
outputs:
time:
description: 'Your output description here'
runs: runs:
using: node20 using: node20
main: dist/index.js main: dist/index.js
post: dist/post.js

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="106" height="20" role="img" aria-label="Coverage: 100%"><title>Coverage: 100%</title><linearGradient id="s" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="r"><rect width="106" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#r)"><rect width="63" height="20" fill="#555"/><rect x="63" width="43" height="20" fill="#4c1"/><rect width="106" height="20" fill="url(#s)"/></g><g fill="#fff" text-anchor="middle" font-family="Verdana,Geneva,DejaVu Sans,sans-serif" text-rendering="geometricPrecision" font-size="110"><text aria-hidden="true" x="325" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="530">Coverage</text><text x="325" y="140" transform="scale(.1)" fill="#fff" textLength="530">Coverage</text><text aria-hidden="true" x="835" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="330">100%</text><text x="835" y="140" transform="scale(.1)" fill="#fff" textLength="330">100%</text></g></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

12
dist/file.js generated vendored Normal file
View file

@ -0,0 +1,12 @@
'use strict'
const pino = require('./pino')
const { once } = require('events')
module.exports = async function (opts = {}) {
const destOpts = Object.assign({}, opts, { dest: opts.destination || 1, sync: false })
delete destOpts.destination
const destination = pino.destination(destOpts)
await once(destination, 'ready')
return destination
}

89095
dist/index.js generated vendored

File diff suppressed because one or more lines are too long

2
dist/index.js.map generated vendored

File diff suppressed because one or more lines are too long

1851
dist/licenses.txt generated vendored

File diff suppressed because it is too large Load diff

194
dist/worker.js generated vendored Normal file
View file

@ -0,0 +1,194 @@
'use strict'
const EE = require('events')
const { pipeline, PassThrough } = require('stream')
const pino = require('../pino.js')
const build = require('pino-abstract-transport')
const loadTransportStreamBuilder = require('./transport-stream')
// This file is not checked by the code coverage tool,
// as it is not reliable.
/* istanbul ignore file */
/*
* > Multiple targets & pipelines
*
*
*
* p
* i
* target n
* o
* targets target .
* m source
* target u
* l write
* t
* pipeline i
* PassThrough s
* t write Thread
* r Stream
* pipeline e
* PassThrough a
* m
*
*
*
*
*
* > One single pipeline or target
*
*
* source
*
* write
*
*
* targets target
*
*
*
*
*
* OR
*
*
*
* targets pipeline Thread
* PassThrough Stream
*
*
*
* OR write
*
*
*
* pipeline
* PassThrough
*
*
*
*
*
*/
module.exports = async function ({ targets, pipelines, levels, dedupe }) {
const targetStreams = []
// Process targets
if (targets && targets.length) {
targets = await Promise.all(targets.map(async (t) => {
const fn = await loadTransportStreamBuilder(t.target)
const stream = await fn(t.options)
return {
level: t.level,
stream
}
}))
targetStreams.push(...targets)
}
// Process pipelines
if (pipelines && pipelines.length) {
pipelines = await Promise.all(
pipelines.map(async (p) => {
let level
const pipeDests = await Promise.all(
p.map(async (t) => {
// level assigned to pipeline is duplicated over all its targets, just store it
level = t.level
const fn = await loadTransportStreamBuilder(t.target)
const stream = await fn(t.options)
return stream
}
))
return {
level,
stream: createPipeline(pipeDests)
}
})
)
targetStreams.push(...pipelines)
}
// Skip building the multistream step if either one single pipeline or target is defined and
// return directly the stream instance back to TreadStream.
// This is equivalent to define either:
//
// pino.transport({ target: ... })
//
// OR
//
// pino.transport({ pipeline: ... })
if (targetStreams.length === 1) {
return targetStreams[0].stream
} else {
return build(process, {
parse: 'lines',
metadata: true,
close (err, cb) {
let expected = 0
for (const transport of targetStreams) {
expected++
transport.stream.on('close', closeCb)
transport.stream.end()
}
function closeCb () {
if (--expected === 0) {
cb(err)
}
}
}
})
}
// TODO: Why split2 was not used for pipelines?
function process (stream) {
const multi = pino.multistream(targetStreams, { levels, dedupe })
// TODO manage backpressure
stream.on('data', function (chunk) {
const { lastTime, lastMsg, lastObj, lastLevel } = this
multi.lastLevel = lastLevel
multi.lastTime = lastTime
multi.lastMsg = lastMsg
multi.lastObj = lastObj
// TODO handle backpressure
multi.write(chunk + '\n')
})
}
/**
* Creates a pipeline using the provided streams and return an instance of `PassThrough` stream
* as a source for the pipeline.
*
* @param {(TransformStream|WritableStream)[]} streams An array of streams.
* All intermediate streams in the array *MUST* be `Transform` streams and only the last one `Writable`.
* @returns A `PassThrough` stream instance representing the source stream of the pipeline
*/
function createPipeline (streams) {
const ee = new EE()
const stream = new PassThrough({
autoDestroy: true,
destroy (_, cb) {
ee.on('error', cb)
ee.on('closed', cb)
}
})
pipeline(stream, ...streams, function (err) {
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
ee.emit('error', err)
return
}
ee.emit('closed')
})
return stream
}
}

170
dist/worker1.js generated vendored Normal file
View file

@ -0,0 +1,170 @@
'use strict'
const { realImport, realRequire } = require('real-require')
const { workerData, parentPort } = require('worker_threads')
const { WRITE_INDEX, READ_INDEX } = require('./indexes')
const { waitDiff } = require('./wait')
const {
dataBuf,
filename,
stateBuf
} = workerData
let destination
const state = new Int32Array(stateBuf)
const data = Buffer.from(dataBuf)
async function start () {
let worker
try {
if (filename.endsWith('.ts') || filename.endsWith('.cts')) {
// TODO: add support for the TSM modules loader ( https://github.com/lukeed/tsm ).
if (!process[Symbol.for('ts-node.register.instance')]) {
realRequire('ts-node/register')
} else if (process.env.TS_NODE_DEV) {
realRequire('ts-node-dev')
}
// TODO: Support ES imports once tsc, tap & ts-node provide better compatibility guarantees.
// Remove extra forwardslash on Windows
worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', '')))
} else {
worker = (await realImport(filename))
}
} catch (error) {
// A yarn user that tries to start a ThreadStream for an external module
// provides a filename pointing to a zip file.
// eg. require.resolve('pino-elasticsearch') // returns /foo/pino-elasticsearch-npm-6.1.0-0c03079478-6915435172.zip/bar.js
// The `import` will fail to try to load it.
// This catch block executes the `require` fallback to load the module correctly.
// In fact, yarn modifies the `require` function to manage the zipped path.
// More details at https://github.com/pinojs/pino/pull/1113
// The error codes may change based on the node.js version (ENOTDIR > 12, ERR_MODULE_NOT_FOUND <= 12 )
if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND') &&
filename.startsWith('file://')) {
worker = realRequire(decodeURIComponent(filename.replace('file://', '')))
} else if (error.code === undefined || error.code === 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING') {
// When bundled with pkg, an undefined error is thrown when called with realImport
// When bundled with pkg and using node v20, an ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING error is thrown when called with realImport
// More info at: https://github.com/pinojs/thread-stream/issues/143
worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', '')))
} else {
throw error
}
}
// Depending on how the default export is performed, and on how the code is
// transpiled, we may find cases of two nested "default" objects.
// See https://github.com/pinojs/pino/issues/1243#issuecomment-982774762
if (typeof worker === 'object') worker = worker.default
if (typeof worker === 'object') worker = worker.default
destination = await worker(workerData.workerData)
destination.on('error', function (err) {
Atomics.store(state, WRITE_INDEX, -2)
Atomics.notify(state, WRITE_INDEX)
Atomics.store(state, READ_INDEX, -2)
Atomics.notify(state, READ_INDEX)
parentPort.postMessage({
code: 'ERROR',
err
})
})
destination.on('close', function () {
// process._rawDebug('worker close emitted')
const end = Atomics.load(state, WRITE_INDEX)
Atomics.store(state, READ_INDEX, end)
Atomics.notify(state, READ_INDEX)
setImmediate(() => {
process.exit(0)
})
})
}
// No .catch() handler,
// in case there is an error it goes
// to unhandledRejection
start().then(function () {
parentPort.postMessage({
code: 'READY'
})
process.nextTick(run)
})
function run () {
const current = Atomics.load(state, READ_INDEX)
const end = Atomics.load(state, WRITE_INDEX)
// process._rawDebug(`pre state ${current} ${end}`)
if (end === current) {
if (end === data.length) {
waitDiff(state, READ_INDEX, end, Infinity, run)
} else {
waitDiff(state, WRITE_INDEX, end, Infinity, run)
}
return
}
// process._rawDebug(`post state ${current} ${end}`)
if (end === -1) {
// process._rawDebug('end')
destination.end()
return
}
const toWrite = data.toString('utf8', current, end)
// process._rawDebug('worker writing: ' + toWrite)
const res = destination.write(toWrite)
if (res) {
Atomics.store(state, READ_INDEX, end)
Atomics.notify(state, READ_INDEX)
setImmediate(run)
} else {
destination.once('drain', function () {
Atomics.store(state, READ_INDEX, end)
Atomics.notify(state, READ_INDEX)
run()
})
}
}
process.on('unhandledRejection', function (err) {
parentPort.postMessage({
code: 'ERROR',
err
})
process.exit(1)
})
process.on('uncaughtException', function (err) {
parentPort.postMessage({
code: 'ERROR',
err
})
process.exit(1)
})
process.once('exit', exitCode => {
if (exitCode !== 0) {
process.exit(exitCode)
return
}
if (destination?.writableNeedDrain && !destination?.writableEnded) {
parentPort.postMessage({
code: 'WARNING',
err: new Error('ThreadStream: process exited before destination stream was drained. this may indicate that the destination stream try to write to a another missing stream')
})
}
process.exit(0)
})

1478
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -22,53 +22,29 @@
}, },
"scripts": { "scripts": {
"bundle": "npm run format:write && npm run package", "bundle": "npm run format:write && npm run package",
"ci-test": "npx jest",
"coverage": "npx make-coverage-badge --output-path ./badges/coverage.svg", "coverage": "npx make-coverage-badge --output-path ./badges/coverage.svg",
"format:write": "npx prettier --write .", "format:write": "npx prettier --write .",
"format:check": "npx prettier --check .", "format:check": "npx prettier --check .",
"lint": "npx eslint . -c ./.github/linters/.eslintrc.yml", "lint": "npx eslint . -c ./.github/linters/.eslintrc.yml",
"package": "npx ncc build src/index.ts -o dist --source-map --license licenses.txt", "package": "npx ncc build src/index.ts -o dist --source-map --license licenses.txt",
"package:watch": "npm run package -- --watch", "package:watch": "npm run package -- --watch",
"test": "npx jest", "all": "npm run format:write && npm run lint && npm run coverage && npm run package",
"all": "npm run format:write && npm run lint && npm run test && npm run coverage && npm run package" "test-build": "npm run -s package",
"test": "turbo run test-build",
"dev-run": "tsx ./src/dev-run.ts"
}, },
"license": "MIT", "license": "MIT",
"jest": {
"preset": "ts-jest",
"verbose": true,
"clearMocks": true,
"testEnvironment": "node",
"moduleFileExtensions": [
"js",
"ts"
],
"testMatch": [
"**/*.test.ts"
],
"testPathIgnorePatterns": [
"/node_modules/",
"/dist/"
],
"transform": {
"^.+\\.ts$": "ts-jest"
},
"coverageReporters": [
"json-summary",
"text",
"lcov"
],
"collectCoverage": true,
"collectCoverageFrom": [
"./src/**"
]
},
"dependencies": { "dependencies": {
"@actions/core": "^1.10.1" "@actions/core": "^1.10.1",
"fastify": "^4.27.0",
"stream-to-promise": "^3.0.0",
"wait-on": "^7.2.0"
}, },
"devDependencies": { "devDependencies": {
"@jest/globals": "^29.7.0", "@jest/globals": "^29.7.0",
"@types/jest": "^29.5.12",
"@types/node": "^20.14.2", "@types/node": "^20.14.2",
"@types/stream-to-promise": "^2.2.4",
"@types/wait-on": "^5.3.4",
"@typescript-eslint/eslint-plugin": "^7.13.0", "@typescript-eslint/eslint-plugin": "^7.13.0",
"@typescript-eslint/parser": "^7.13.0", "@typescript-eslint/parser": "^7.13.0",
"@vercel/ncc": "^0.38.1", "@vercel/ncc": "^0.38.1",
@ -77,11 +53,13 @@
"eslint-plugin-jest": "^28.6.0", "eslint-plugin-jest": "^28.6.0",
"eslint-plugin-jsonc": "^2.16.0", "eslint-plugin-jsonc": "^2.16.0",
"eslint-plugin-prettier": "^5.1.3", "eslint-plugin-prettier": "^5.1.3",
"jest": "^29.7.0",
"make-coverage-badge": "^1.2.0", "make-coverage-badge": "^1.2.0",
"prettier": "^3.3.2", "prettier": "^3.3.2",
"prettier-eslint": "^16.3.0", "prettier-eslint": "^16.3.0",
"ts-jest": "^29.1.4", "ts-jest": "^29.1.4",
"tsx": "^4.15.4",
"turbo": "^2.0.3",
"typescript": "^5.4.5" "typescript": "^5.4.5"
} },
"packageManager": "npm@10.5.2"
} }

13
src/dev-run.ts Normal file
View file

@ -0,0 +1,13 @@
// Run the server in foreground and kill it after the test
import { server } from './lib/server'
import { launchServer } from './lib/server/utils'
const main = async () => {
//* Run server
server()
//* Run launch server
await launchServer(true)
}
main()

91
src/lib/cache/index.ts vendored Normal file
View file

@ -0,0 +1,91 @@
import { Readable } from 'node:stream'
import { env } from '../env'
import { pipeline } from 'node:stream/promises'
import {
createReadStream,
createWriteStream,
existsSync,
statSync
} from 'node:fs'
import { getCacheClient } from './utils'
import { cacheVersion, getCacheKey } from '../constants'
type RequestContext = {
log: {
info: (message: string) => void
}
}
//* Cache API
export async function saveCache(
ctx: RequestContext,
hash: string,
size: number,
tag: string,
stream: Readable
): Promise<void> {
if (!env.valid) {
ctx.log.info(
`Using filesystem cache because cache API env vars are not set`
)
await pipeline(stream, createWriteStream(`/tmp/${hash}.tg.bin`))
return
}
const client = getCacheClient()
const existingCacheResponse = await client.create(
getCacheKey(hash, tag),
cacheVersion
)
// Silently exit when we have not been able to receive a cache-hit
if (existingCacheResponse.success === false) {
return
}
const id = existingCacheResponse.data?.cacheId
if (!id) {
throw new Error(
`Unable to reserve cache (received: ${JSON.stringify(
existingCacheResponse.data
)})`
)
}
ctx.log.info(`Reserved cache ${id}`)
await client.upload(id, stream, size)
await client.commit(id, size)
ctx.log.info(`Saved cache ${id} for ${hash} (${size} bytes)`)
}
export async function getCache(
ctx: RequestContext,
hash: string
): Promise<
[number | undefined, Readable | ReadableStream, string | undefined] | null
> {
if (!env.valid) {
const path = `/tmp/${hash}.tg.bin`
if (!existsSync(path)) return null
const size = statSync(path).size
return [size, createReadStream(path), undefined]
}
const client = getCacheClient()
const cacheKey = getCacheKey(hash)
const { data } = await client.query(cacheKey, cacheVersion)
ctx.log.info(`Cache lookup for ${cacheKey}`)
if (!data) {
ctx.log.info(`Cache lookup did not return data`)
return null
}
const [foundCacheKey, artifactTag] = String(data.cacheKey).split('#')
if (foundCacheKey !== cacheKey) {
ctx.log.info(`Cache key mismatch: ${foundCacheKey} !== ${cacheKey}`)
return null
}
const resp = await fetch(data.archiveLocation)
const size = +(resp.headers.get('content-length') || 0)
const readableStream = resp.body
if (!readableStream) {
throw new Error('Failed to retrieve cache stream')
}
return [size, readableStream, artifactTag]
}

135
src/lib/cache/utils.ts vendored Normal file
View file

@ -0,0 +1,135 @@
import { Readable } from 'node:stream'
import { env } from '../env'
import * as core from '@actions/core'
import streamToPromise from 'stream-to-promise'
class HandledError extends Error {
status: number
statusText: string
data: unknown
constructor(status: number, statusText: string, data: unknown) {
super(`${status}: ${statusText}`)
this.status = status
this.statusText = statusText
this.data = data
}
}
function handleFetchError(message: string) {
return (error: unknown) => {
if (error instanceof HandledError) {
core.error(`${message}: ${error.status} ${error.statusText}`)
core.error(JSON.stringify(error.data))
throw error
}
core.error(`${message}: ${error}`)
throw error
}
}
export function getCacheClient() {
if (!env.valid) {
throw new Error('Cache API env vars are not set')
}
const baseURL = `${env.ACTIONS_CACHE_URL.replace(/\/$/, '')}/_apis/artifactcache`
const headers = new Headers({
Authorization: `Bearer ${env.ACTIONS_RUNTIME_TOKEN}`,
Accept: 'application/json;api-version=6.0-preview.1'
})
const create = async (
key: string,
version: string
): Promise<{
success: boolean
data?: { cacheId: string }
}> => {
try {
const res = await fetch(`${baseURL}/caches`, {
method: 'POST',
headers,
body: JSON.stringify({ key, version })
})
if (!res.ok) {
const { status, statusText } = res
const data = await res.json()
if (status === 409) {
return { success: false }
}
const buildedError = new HandledError(status, statusText, data)
return handleFetchError('Unable to reserve cache')(buildedError)
}
const data = await res.json()
return { success: true, data }
} catch (error) {
return handleFetchError('Unable to reserve cache')(error)
}
}
const upload = async (
id: string,
stream: Readable,
size: number
): Promise<void> => {
try {
const body = await streamToPromise(stream)
await fetch(`${baseURL}/caches/${id}`, {
method: 'PATCH',
headers: {
...headers,
'Content-Length': size.toString(),
'Content-Type': 'application/octet-stream',
'Content-Range': `bytes 0-${size - 1}/*`
},
body
})
} catch (error) {
handleFetchError('Unable to upload cache')(error)
}
}
const commit = async (id: string, size: number): Promise<void> => {
try {
await fetch(`${baseURL}/caches/${id}`, {
method: 'POST',
headers,
body: JSON.stringify({ size })
})
} catch (error) {
handleFetchError('Unable to commit cache')(error)
}
}
const query = async (
keys: string,
version: string
): Promise<{
success: boolean
data?: { cacheKey: string; archiveLocation: string }
}> => {
try {
const res = await fetch(`${baseURL}/caches`, {
method: 'GET',
headers,
body: JSON.stringify({ keys, version })
})
if (!res.ok) {
const { status, statusText } = res
const data = await res.json()
const buildedError = new HandledError(status, statusText, data)
return handleFetchError('Unable to query cache')(buildedError)
}
const data = await res.json()
return { success: true, data }
} catch (error) {
return handleFetchError('Unable to query cache')(error)
}
}
return {
create,
upload,
commit,
query
}
}

8
src/lib/constants.ts Normal file
View file

@ -0,0 +1,8 @@
import * as core from '@actions/core'
export const serverPort = 41230
export const cacheVersion = 'turbogha_v2'
export const cachePrefix = core.getInput('cache-prefix') || 'turbogha_'
export const getCacheKey = (hash: string, tag?: string): string =>
`${cachePrefix}${hash}${tag ? `#${tag}` : ''}`
export const serverLogFile = '/tmp/turbogha.log'

21
src/lib/env/index.ts vendored Normal file
View file

@ -0,0 +1,21 @@
const envObject = {
ACTIONS_RUNTIME_TOKEN: process.env.ACTIONS_RUNTIME_TOKEN,
ACTIONS_CACHE_URL: process.env.ACTIONS_CACHE_URL
}
type TInvalidEnv = {
valid: false
} & typeof envObject
type TValidEnv = {
valid: true
} & {
[K in keyof typeof envObject]: NonNullable<(typeof envObject)[K]>
}
type TEnv = TInvalidEnv | TValidEnv
export const env = {
valid: Object.values(envObject).every(value => value !== undefined),
...envObject
} as TEnv

73
src/lib/server/index.ts Normal file
View file

@ -0,0 +1,73 @@
import Fastify from 'fastify'
import { serverPort } from '../constants'
import { getCache, saveCache } from '../cache'
export async function server(): Promise<void> {
//* Create the server
const fastify = Fastify({
logger: true
})
//? Server status check
fastify.get('/', async () => {
return { ok: true }
})
//? Shut down the server
const shutdown = () => {
setTimeout(() => process.exit(0), 100)
return { ok: true }
}
fastify.delete('/shutdown', async () => {
return shutdown()
})
//? Handle streaming requets body
// https://www.fastify.io/docs/latest/Reference/ContentTypeParser/#catch-all
fastify.addContentTypeParser(
'application/octet-stream',
(_req, _payload, done) => {
done(null)
}
)
//? Upload cache
fastify.put('/v8/artifacts/:hash', async request => {
const hash = (request.params as { hash: string }).hash
request.log.info(`Received artifact for ${hash}`)
await saveCache(
request,
hash,
+(request.headers['content-length'] || 0),
String(request.headers['x-artifact-tag'] || ''),
request.raw
)
request.log.info(`Saved artifact for ${hash}`)
return { ok: true }
})
//? Download cache
fastify.get('/v8/artifacts/:hash', async (request, reply) => {
const hash = (request.params as { hash: string }).hash
request.log.info(`Requested artifact for ${hash}`)
const result = await getCache(request, hash)
if (result === null) {
request.log.info(`Artifact for ${hash} not found`)
reply.code(404)
return { ok: false }
}
const [size, stream, artifactTag] = result
if (size) {
reply.header('Content-Length', size)
}
reply.header('Content-Type', 'application/octet-stream')
if (artifactTag) {
reply.header('x-artifact-tag', artifactTag)
}
request.log.info(`Sending artifact for ${hash}`)
return reply.send(stream)
})
//* Start the server
await fastify.listen({ port: serverPort })
}

66
src/lib/server/utils.ts Normal file
View file

@ -0,0 +1,66 @@
import waitOn from 'wait-on'
import {
cachePrefix,
cacheVersion,
serverLogFile,
serverPort
} from '../constants'
import * as core from '@actions/core'
import { openSync } from 'fs'
import { spawn } from 'child_process'
export const waitForServer = async (): Promise<void> => {
await waitOn({
resources: [`http-get://localhost:${serverPort}`],
timeout: 10000
})
}
export const exportVariable = (name: string, value: string): void => {
core.exportVariable(name, value)
core.info(` ${name}=${value}`)
}
export async function launchServer(devRun?: boolean): Promise<void> {
if (!devRun) {
//* Launch a detached child process to run the server
// See: https://nodejs.org/docs/latest-v16.x/api/child_process.html#optionsdetached
const out = openSync(serverLogFile, 'a')
const err = openSync(serverLogFile, 'a')
const child = spawn(process.argv[0], [process.argv[1], '--server'], {
detached: true,
stdio: ['ignore', out, err]
})
child.unref()
core.info(`Cache version: ${cacheVersion}`)
core.info(`Cache prefix: ${cachePrefix}`)
core.info(`Launched child process: ${child.pid}`)
core.info(`Server log file: ${serverLogFile}`)
}
//* Wait for server
await waitForServer()
core.info(`Server is now up and running.`)
//* Export the environment variables for Turbo
if (devRun) {
console.log('Execute:')
console.log(`export TURBOGHA_PORT=${serverPort}`)
console.log(`export TURBO_API=http://localhost:${serverPort}`)
console.log(`export TURBO_TOKEN=turbogha`)
console.log(`export TURBO_TEAM=turbogha`)
} else {
core.info('The following environment variables are exported:')
exportVariable('TURBOGHA_PORT', `${serverPort}`)
exportVariable('TURBO_API', `http://localhost:${serverPort}`)
exportVariable('TURBO_TOKEN', 'turbogha')
exportVariable('TURBO_TEAM', 'turbogha')
}
}
export async function killServer() {
//* Kill the server
await fetch(`http://localhost:${serverPort}/shutdown`, {
method: 'DELETE'
})
}

View file

@ -1,5 +1,6 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import { wait } from './wait' import { server } from './lib/server'
import { launchServer } from './lib/server/utils'
/** /**
* The main function for the action. * The main function for the action.
@ -7,20 +8,16 @@ import { wait } from './wait'
*/ */
export async function run(): Promise<void> { export async function run(): Promise<void> {
try { try {
const ms: string = core.getInput('milliseconds') //* Daemon process
if (process.argv[2] === '--server') {
// Debug logs are only output if the `ACTIONS_STEP_DEBUG` secret is true return server()
core.debug(`Waiting ${ms} milliseconds ...`) }
//* Base process
// Log the current timestamp, wait, then log the new timestamp return launchServer()
core.debug(new Date().toTimeString())
await wait(parseInt(ms, 10))
core.debug(new Date().toTimeString())
// Set outputs for other workflow steps to use
core.setOutput('time', new Date().toTimeString())
} catch (error) { } catch (error) {
// Fail the workflow run if an error occurs // Fail the workflow run if an error occurs
if (error instanceof Error) core.setFailed(error.message) if (error instanceof Error) core.setFailed(error.message)
} }
} }
run()

23
src/post.ts Normal file
View file

@ -0,0 +1,23 @@
import * as core from '@actions/core'
import { serverLogFile, serverPort } from './lib/constants'
import { readFile } from 'fs/promises'
import { killServer } from './lib/server/utils'
/**
* The post function of the action. It kills the server
* @returns {Promise<void>} Resolves when the action is complete.
*/
export async function run(): Promise<void> {
try {
await killServer()
//* Read the logs
const logs = await readFile(serverLogFile, 'utf-8')
core.info(logs)
} catch (error) {
// Fail the workflow run if an error occurs
if (error instanceof Error) core.setFailed(error.message)
}
}
run()

View file

@ -1,14 +0,0 @@
/**
* Wait for a number of milliseconds.
* @param milliseconds The number of milliseconds to wait.
* @returns {Promise<string>} Resolves with 'done!' after the wait is over.
*/
export async function wait(milliseconds: number): Promise<string> {
return new Promise(resolve => {
if (isNaN(milliseconds)) {
throw new Error('milliseconds not a number')
}
setTimeout(() => resolve('done!'), milliseconds)
})
}

9
turbo.json Normal file
View file

@ -0,0 +1,9 @@
{
"$schema": "https://turbo.build/schema.json",
"tasks": {
"test-build": {
"inputs": ["src"],
"outputs": ["dist"]
}
}
}