1
0
Fork 0
mirror of https://github.com/rharkor/caching-for-turbo.git synced 2025-06-08 01:37:01 +09:00

Merge pull request #11 from rharkor/mvp

Mvp
This commit is contained in:
HUORT Louis 2024-06-13 16:19:30 +02:00 committed by GitHub
commit 8140f0f497
Signed by: github
GPG key ID: B5690EEEBB952194
29 changed files with 137410 additions and 1100 deletions

View file

@ -2,3 +2,4 @@ lib/
dist/
node_modules/
coverage/
post.js

View file

@ -1,4 +1,4 @@
name: Continuous Integration
name: ci
on:
pull_request:
@ -43,10 +43,6 @@ jobs:
id: npm-lint
run: npm run lint
- name: Test
id: npm-ci-test
run: npm run ci-test
test-action:
name: GitHub Actions Test
runs-on: ubuntu-latest
@ -56,12 +52,19 @@ jobs:
id: checkout
uses: actions/checkout@v4
- name: Test Local Action
id: test-action
uses: ./
- name: Setup Node.js
id: setup-node
uses: actions/setup-node@v4
with:
milliseconds: 2000
node-version-file: .node-version
cache: npm
- name: Print Output
id: output
run: echo "${{ steps.test-action.outputs.time }}"
- name: Install Dependencies
id: npm-ci
run: npm ci
- name: Test Local Action
uses: ./
- name: Test build cache
run: npm run test

3
.gitignore vendored
View file

@ -101,3 +101,6 @@ __tests__/runner/*
.idea
.vscode
*.code-workspace
.turbo
tmp.ts

241
README.md
View file

@ -1,229 +1,46 @@
# Create a GitHub Action Using TypeScript
# Caching for Turborepo
[![GitHub Super-Linter](https://github.com/actions/typescript-action/actions/workflows/linter.yml/badge.svg)](https://github.com/super-linter/super-linter)
![CI](https://github.com/actions/typescript-action/actions/workflows/ci.yml/badge.svg)
[![Check dist/](https://github.com/actions/typescript-action/actions/workflows/check-dist.yml/badge.svg)](https://github.com/actions/typescript-action/actions/workflows/check-dist.yml)
[![CodeQL](https://github.com/actions/typescript-action/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/actions/typescript-action/actions/workflows/codeql-analysis.yml)
[![Coverage](./badges/coverage.svg)](./badges/coverage.svg)
[![typescript-action status](https://github.com/rharkor/caching-for-turbo/workflows/ci/badge.svg)](https://github.com/rharkor/caching-for-turbo/actions)
Use this template to bootstrap the creation of a TypeScript action. :rocket:
Caching for [Turborepo](https://turbo.build/repo/), using GitHub Actions cache
service.
This template includes compilation support, tests, a validation workflow,
publishing, and versioning guidance.
## How to use
If you are new, there's also a simpler introduction in the
[Hello world JavaScript action repository](https://github.com/actions/hello-world-javascript-action).
## Create Your Own Action
To create your own action, you can use this repository as a template! Just
follow the below instructions:
1. Click the **Use this template** button at the top of the repository
1. Select **Create a new repository**
1. Select an owner and name for your new repository
1. Click **Create repository**
1. Clone your new repository
> [!IMPORTANT]
>
> Make sure to remove or update the [`CODEOWNERS`](./CODEOWNERS) file! For
> details on how to use this file, see
> [About code owners](https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners).
## Initial Setup
After you've cloned the repository to your local machine or codespace, you'll
need to perform some initial setup steps before you can develop your action.
> [!NOTE]
>
> You'll need to have a reasonably modern version of
> [Node.js](https://nodejs.org) handy (20.x or later should work!). If you are
> using a version manager like [`nodenv`](https://github.com/nodenv/nodenv) or
> [`nvm`](https://github.com/nvm-sh/nvm), this template has a `.node-version`
> file at the root of the repository that will be used to automatically switch
> to the correct version when you `cd` into the repository. Additionally, this
> `.node-version` file is used by GitHub Actions in any `actions/setup-node`
> actions.
1. :hammer_and_wrench: Install the dependencies
```bash
npm install
```
1. :building_construction: Package the TypeScript for distribution
```bash
npm run bundle
```
1. :white_check_mark: Run the tests
```bash
$ npm test
PASS ./index.test.js
✓ throws invalid number (3ms)
✓ wait 500 ms (504ms)
✓ test runs (95ms)
...
```
## Update the Action Metadata
The [`action.yml`](action.yml) file defines metadata about your action, such as
input(s) and output(s). For details about this file, see
[Metadata syntax for GitHub Actions](https://docs.github.com/en/actions/creating-actions/metadata-syntax-for-github-actions).
When you copy this repository, update `action.yml` with the name, description,
inputs, and outputs for your action.
## Update the Action Code
The [`src/`](./src/) directory is the heart of your action! This contains the
source code that will be run when your action is invoked. You can replace the
contents of this directory with your own code.
There are a few things to keep in mind when writing your action code:
- Most GitHub Actions toolkit and CI/CD operations are processed asynchronously.
In `main.ts`, you will see that the action is run in an `async` function.
```javascript
import * as core from '@actions/core'
//...
async function run() {
try {
//...
} catch (error) {
core.setFailed(error.message)
}
}
```
For more information about the GitHub Actions toolkit, see the
[documentation](https://github.com/actions/toolkit/blob/master/README.md).
So, what are you waiting for? Go ahead and start customizing your action!
1. Create a new branch
```bash
git checkout -b releases/v1
```
1. Replace the contents of `src/` with your action code
1. Add tests to `__tests__/` for your source code
1. Format, test, and build the action
```bash
npm run all
```
> This step is important! It will run [`ncc`](https://github.com/vercel/ncc)
> to build the final JavaScript action code with all dependencies included.
> If you do not run this step, your action will not work correctly when it is
> used in a workflow. This step also includes the `--license` option for
> `ncc`, which will create a license file for all of the production node
> modules used in your project.
1. Commit your changes
```bash
git add .
git commit -m "My first action is ready!"
```
1. Push them to your repository
```bash
git push -u origin releases/v1
```
1. Create a pull request and get feedback on your action
1. Merge the pull request into the `main` branch
Your action is now published! :rocket:
For information about versioning your action, see
[Versioning](https://github.com/actions/toolkit/blob/master/docs/action-versioning.md)
in the GitHub Actions toolkit.
## Validate the Action
You can now validate the action by referencing it in a workflow file. For
example, [`ci.yml`](./.github/workflows/ci.yml) demonstrates how to reference an
action in the same repository.
Add this to your GitHub Actions workflow, **before** running `turbo build`.
<!-- prettier-ignore -->
```yaml
steps:
- name: Checkout
id: checkout
uses: actions/checkout@v4
- name: Test Local Action
id: test-action
uses: ./
with:
milliseconds: 1000
- name: Print Output
id: output
run: echo "${{ steps.test-action.outputs.time }}"
- name: Cache for Turbo
uses: rharkor/caching-for-turbo@v1
```
For example workflow runs, check out the
[Actions tab](https://github.com/actions/typescript-action/actions)! :rocket:
The action will:
## Usage
1. Launch a server on `localhost:41230` (and waits for it to be ready).
After testing, you can create version tag(s) that developers can use to
reference different stable versions of your action. For more information, see
[Versioning](https://github.com/actions/toolkit/blob/master/docs/action-versioning.md)
in the GitHub Actions toolkit.
2. Exports the `TURBO_API`, `TURBO_TOKEN` and `TURBO_TEAM` environment variables
for use by `turbo build`.
To include the action in a workflow in another repository, you can use the
`uses` syntax with the `@` symbol to reference a specific branch, tag, or commit
hash.
3. Sets up a post-build step to print the server logs (for debugging).
## Configuration
Configuration is optional. Here are the available options and their default
values:
<!-- prettier-ignore -->
```yaml
steps:
- name: Checkout
id: checkout
uses: actions/checkout@v4
- name: Test Local Action
id: test-action
uses: actions/typescript-action@v1 # Commit with the `v1` tag
with:
milliseconds: 1000
- name: Print Output
id: output
run: echo "${{ steps.test-action.outputs.time }}"
with:
# Set the prefix for the cache keys.
cache-prefix: turbogha_
```
## Publishing a New Release
## License
This project includes a helper script, [`script/release`](./script/release)
designed to streamline the process of tagging and pushing new releases for
GitHub Actions.
This project is licensed under the MIT License - see the [LICENSE](LICENSE)
file.
GitHub Actions allows users to select a specific version of the action to use,
based on release tags. This script simplifies this process by performing the
following steps:
1. **Retrieving the latest release tag:** The script starts by fetching the most
recent release tag by looking at the local data available in your repository.
1. **Prompting for a new release tag:** The user is then prompted to enter a new
release tag. To assist with this, the script displays the latest release tag
and provides a regular expression to validate the format of the new tag.
1. **Tagging the new release:** Once a valid new tag is entered, the script tags
the new release.
1. **Pushing the new tag to the remote:** Finally, the script pushes the new tag
to the remote repository. From here, you will need to create a new release in
GitHub and users can easily reference the new tag in their workflows.
The code was inspired by
[dtinth](https://github.com/dtinth/setup-github-actions-caching-for-turbo/actions)
but was entirely rewritten to be more robust.

View file

@ -1,17 +0,0 @@
/**
* Unit tests for the action's entrypoint, src/index.ts
*/
import * as main from '../src/main'
// Mock the action's entrypoint
const runMock = jest.spyOn(main, 'run').mockImplementation()
describe('index', () => {
it('calls run when imported', async () => {
// eslint-disable-next-line @typescript-eslint/no-require-imports
require('../src/index')
expect(runMock).toHaveBeenCalled()
})
})

View file

@ -1,89 +0,0 @@
/**
* Unit tests for the action's main functionality, src/main.ts
*
* These should be run as if the action was called from a workflow.
* Specifically, the inputs listed in `action.yml` should be set as environment
* variables following the pattern `INPUT_<INPUT_NAME>`.
*/
import * as core from '@actions/core'
import * as main from '../src/main'
// Mock the action's main function
const runMock = jest.spyOn(main, 'run')
// Other utilities
const timeRegex = /^\d{2}:\d{2}:\d{2}/
// Mock the GitHub Actions core library
let debugMock: jest.SpiedFunction<typeof core.debug>
let errorMock: jest.SpiedFunction<typeof core.error>
let getInputMock: jest.SpiedFunction<typeof core.getInput>
let setFailedMock: jest.SpiedFunction<typeof core.setFailed>
let setOutputMock: jest.SpiedFunction<typeof core.setOutput>
describe('action', () => {
beforeEach(() => {
jest.clearAllMocks()
debugMock = jest.spyOn(core, 'debug').mockImplementation()
errorMock = jest.spyOn(core, 'error').mockImplementation()
getInputMock = jest.spyOn(core, 'getInput').mockImplementation()
setFailedMock = jest.spyOn(core, 'setFailed').mockImplementation()
setOutputMock = jest.spyOn(core, 'setOutput').mockImplementation()
})
it('sets the time output', async () => {
// Set the action's inputs as return values from core.getInput()
getInputMock.mockImplementation(name => {
switch (name) {
case 'milliseconds':
return '500'
default:
return ''
}
})
await main.run()
expect(runMock).toHaveReturned()
// Verify that all of the core library functions were called correctly
expect(debugMock).toHaveBeenNthCalledWith(1, 'Waiting 500 milliseconds ...')
expect(debugMock).toHaveBeenNthCalledWith(
2,
expect.stringMatching(timeRegex)
)
expect(debugMock).toHaveBeenNthCalledWith(
3,
expect.stringMatching(timeRegex)
)
expect(setOutputMock).toHaveBeenNthCalledWith(
1,
'time',
expect.stringMatching(timeRegex)
)
expect(errorMock).not.toHaveBeenCalled()
})
it('sets a failed status', async () => {
// Set the action's inputs as return values from core.getInput()
getInputMock.mockImplementation(name => {
switch (name) {
case 'milliseconds':
return 'this is not a number'
default:
return ''
}
})
await main.run()
expect(runMock).toHaveReturned()
// Verify that all of the core library functions were called correctly
expect(setFailedMock).toHaveBeenNthCalledWith(
1,
'milliseconds not a number'
)
expect(errorMock).not.toHaveBeenCalled()
})
})

View file

@ -1,25 +0,0 @@
/**
* Unit tests for src/wait.ts
*/
import { wait } from '../src/wait'
import { expect } from '@jest/globals'
describe('wait.ts', () => {
it('throws an invalid number', async () => {
const input = parseInt('foo', 10)
expect(isNaN(input)).toBe(true)
await expect(wait(input)).rejects.toThrow('milliseconds not a number')
})
it('waits with a valid number', async () => {
const start = new Date()
await wait(500)
const end = new Date()
const delta = Math.abs(end.getTime() - start.getTime())
expect(delta).toBeGreaterThan(450)
})
})

View file

@ -1,6 +1,8 @@
name: 'The name of your action here'
description: 'Provide a description here'
author: 'Your name or organization here'
name: 'Caching for Turborepo'
description:
'Sets up Turborepo Remote Caching to work with GitHub Actions built-in cache.
No Vercel account access tokens needed.'
author: 'HUORT Louis'
# Add your action's branding here. This will appear on the GitHub Marketplace.
branding:
@ -9,16 +11,12 @@ branding:
# Define your inputs here.
inputs:
milliseconds:
description: 'Your input description here'
required: true
default: '1000'
# Define your outputs here.
outputs:
time:
description: 'Your output description here'
cache-prefix:
description: 'Prefix for the cache key'
required: false
default: turbogha_
runs:
using: node20
main: dist/index.js
post: post.js

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="106" height="20" role="img" aria-label="Coverage: 100%"><title>Coverage: 100%</title><linearGradient id="s" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="r"><rect width="106" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#r)"><rect width="63" height="20" fill="#555"/><rect x="63" width="43" height="20" fill="#4c1"/><rect width="106" height="20" fill="url(#s)"/></g><g fill="#fff" text-anchor="middle" font-family="Verdana,Geneva,DejaVu Sans,sans-serif" text-rendering="geometricPrecision" font-size="110"><text aria-hidden="true" x="325" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="530">Coverage</text><text x="325" y="140" transform="scale(.1)" fill="#fff" textLength="530">Coverage</text><text aria-hidden="true" x="835" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="330">100%</text><text x="835" y="140" transform="scale(.1)" fill="#fff" textLength="330">100%</text></g></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

12
dist/file.js generated vendored Normal file
View file

@ -0,0 +1,12 @@
'use strict'
const pino = require('./pino')
const { once } = require('events')
module.exports = async function (opts = {}) {
const destOpts = Object.assign({}, opts, { dest: opts.destination || 1, sync: false })
delete destOpts.destination
const destination = pino.destination(destOpts)
await once(destination, 'ready')
return destination
}

132816
dist/index.js generated vendored

File diff suppressed because one or more lines are too long

2
dist/index.js.map generated vendored

File diff suppressed because one or more lines are too long

2406
dist/licenses.txt generated vendored

File diff suppressed because it is too large Load diff

194
dist/worker.js generated vendored Normal file
View file

@ -0,0 +1,194 @@
'use strict'
const EE = require('events')
const { pipeline, PassThrough } = require('stream')
const pino = require('../pino.js')
const build = require('pino-abstract-transport')
const loadTransportStreamBuilder = require('./transport-stream')
// This file is not checked by the code coverage tool,
// as it is not reliable.
/* istanbul ignore file */
/*
* > Multiple targets & pipelines
*
*
*
* p
* i
* target n
* o
* targets target .
* m source
* target u
* l write
* t
* pipeline i
* PassThrough s
* t write Thread
* r Stream
* pipeline e
* PassThrough a
* m
*
*
*
*
*
* > One single pipeline or target
*
*
* source
*
* write
*
*
* targets target
*
*
*
*
*
* OR
*
*
*
* targets pipeline Thread
* PassThrough Stream
*
*
*
* OR write
*
*
*
* pipeline
* PassThrough
*
*
*
*
*
*/
module.exports = async function ({ targets, pipelines, levels, dedupe }) {
const targetStreams = []
// Process targets
if (targets && targets.length) {
targets = await Promise.all(targets.map(async (t) => {
const fn = await loadTransportStreamBuilder(t.target)
const stream = await fn(t.options)
return {
level: t.level,
stream
}
}))
targetStreams.push(...targets)
}
// Process pipelines
if (pipelines && pipelines.length) {
pipelines = await Promise.all(
pipelines.map(async (p) => {
let level
const pipeDests = await Promise.all(
p.map(async (t) => {
// level assigned to pipeline is duplicated over all its targets, just store it
level = t.level
const fn = await loadTransportStreamBuilder(t.target)
const stream = await fn(t.options)
return stream
}
))
return {
level,
stream: createPipeline(pipeDests)
}
})
)
targetStreams.push(...pipelines)
}
// Skip building the multistream step if either one single pipeline or target is defined and
// return directly the stream instance back to TreadStream.
// This is equivalent to define either:
//
// pino.transport({ target: ... })
//
// OR
//
// pino.transport({ pipeline: ... })
if (targetStreams.length === 1) {
return targetStreams[0].stream
} else {
return build(process, {
parse: 'lines',
metadata: true,
close (err, cb) {
let expected = 0
for (const transport of targetStreams) {
expected++
transport.stream.on('close', closeCb)
transport.stream.end()
}
function closeCb () {
if (--expected === 0) {
cb(err)
}
}
}
})
}
// TODO: Why split2 was not used for pipelines?
function process (stream) {
const multi = pino.multistream(targetStreams, { levels, dedupe })
// TODO manage backpressure
stream.on('data', function (chunk) {
const { lastTime, lastMsg, lastObj, lastLevel } = this
multi.lastLevel = lastLevel
multi.lastTime = lastTime
multi.lastMsg = lastMsg
multi.lastObj = lastObj
// TODO handle backpressure
multi.write(chunk + '\n')
})
}
/**
* Creates a pipeline using the provided streams and return an instance of `PassThrough` stream
* as a source for the pipeline.
*
* @param {(TransformStream|WritableStream)[]} streams An array of streams.
* All intermediate streams in the array *MUST* be `Transform` streams and only the last one `Writable`.
* @returns A `PassThrough` stream instance representing the source stream of the pipeline
*/
function createPipeline (streams) {
const ee = new EE()
const stream = new PassThrough({
autoDestroy: true,
destroy (_, cb) {
ee.on('error', cb)
ee.on('closed', cb)
}
})
pipeline(stream, ...streams, function (err) {
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
ee.emit('error', err)
return
}
ee.emit('closed')
})
return stream
}
}

170
dist/worker1.js generated vendored Normal file
View file

@ -0,0 +1,170 @@
'use strict'
const { realImport, realRequire } = require('real-require')
const { workerData, parentPort } = require('worker_threads')
const { WRITE_INDEX, READ_INDEX } = require('./indexes')
const { waitDiff } = require('./wait')
const {
dataBuf,
filename,
stateBuf
} = workerData
let destination
const state = new Int32Array(stateBuf)
const data = Buffer.from(dataBuf)
async function start () {
let worker
try {
if (filename.endsWith('.ts') || filename.endsWith('.cts')) {
// TODO: add support for the TSM modules loader ( https://github.com/lukeed/tsm ).
if (!process[Symbol.for('ts-node.register.instance')]) {
realRequire('ts-node/register')
} else if (process.env.TS_NODE_DEV) {
realRequire('ts-node-dev')
}
// TODO: Support ES imports once tsc, tap & ts-node provide better compatibility guarantees.
// Remove extra forwardslash on Windows
worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', '')))
} else {
worker = (await realImport(filename))
}
} catch (error) {
// A yarn user that tries to start a ThreadStream for an external module
// provides a filename pointing to a zip file.
// eg. require.resolve('pino-elasticsearch') // returns /foo/pino-elasticsearch-npm-6.1.0-0c03079478-6915435172.zip/bar.js
// The `import` will fail to try to load it.
// This catch block executes the `require` fallback to load the module correctly.
// In fact, yarn modifies the `require` function to manage the zipped path.
// More details at https://github.com/pinojs/pino/pull/1113
// The error codes may change based on the node.js version (ENOTDIR > 12, ERR_MODULE_NOT_FOUND <= 12 )
if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND') &&
filename.startsWith('file://')) {
worker = realRequire(decodeURIComponent(filename.replace('file://', '')))
} else if (error.code === undefined || error.code === 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING') {
// When bundled with pkg, an undefined error is thrown when called with realImport
// When bundled with pkg and using node v20, an ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING error is thrown when called with realImport
// More info at: https://github.com/pinojs/thread-stream/issues/143
worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', '')))
} else {
throw error
}
}
// Depending on how the default export is performed, and on how the code is
// transpiled, we may find cases of two nested "default" objects.
// See https://github.com/pinojs/pino/issues/1243#issuecomment-982774762
if (typeof worker === 'object') worker = worker.default
if (typeof worker === 'object') worker = worker.default
destination = await worker(workerData.workerData)
destination.on('error', function (err) {
Atomics.store(state, WRITE_INDEX, -2)
Atomics.notify(state, WRITE_INDEX)
Atomics.store(state, READ_INDEX, -2)
Atomics.notify(state, READ_INDEX)
parentPort.postMessage({
code: 'ERROR',
err
})
})
destination.on('close', function () {
// process._rawDebug('worker close emitted')
const end = Atomics.load(state, WRITE_INDEX)
Atomics.store(state, READ_INDEX, end)
Atomics.notify(state, READ_INDEX)
setImmediate(() => {
process.exit(0)
})
})
}
// No .catch() handler,
// in case there is an error it goes
// to unhandledRejection
start().then(function () {
parentPort.postMessage({
code: 'READY'
})
process.nextTick(run)
})
function run () {
const current = Atomics.load(state, READ_INDEX)
const end = Atomics.load(state, WRITE_INDEX)
// process._rawDebug(`pre state ${current} ${end}`)
if (end === current) {
if (end === data.length) {
waitDiff(state, READ_INDEX, end, Infinity, run)
} else {
waitDiff(state, WRITE_INDEX, end, Infinity, run)
}
return
}
// process._rawDebug(`post state ${current} ${end}`)
if (end === -1) {
// process._rawDebug('end')
destination.end()
return
}
const toWrite = data.toString('utf8', current, end)
// process._rawDebug('worker writing: ' + toWrite)
const res = destination.write(toWrite)
if (res) {
Atomics.store(state, READ_INDEX, end)
Atomics.notify(state, READ_INDEX)
setImmediate(run)
} else {
destination.once('drain', function () {
Atomics.store(state, READ_INDEX, end)
Atomics.notify(state, READ_INDEX)
run()
})
}
}
process.on('unhandledRejection', function (err) {
parentPort.postMessage({
code: 'ERROR',
err
})
process.exit(1)
})
process.on('uncaughtException', function (err) {
parentPort.postMessage({
code: 'ERROR',
err
})
process.exit(1)
})
process.once('exit', exitCode => {
if (exitCode !== 0) {
process.exit(exitCode)
return
}
if (destination?.writableNeedDrain && !destination?.writableEnded) {
parentPort.postMessage({
code: 'WARNING',
err: new Error('ThreadStream: process exited before destination stream was drained. this may indicate that the destination stream try to write to a another missing stream')
})
}
process.exit(0)
})

1954
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -22,53 +22,30 @@
},
"scripts": {
"bundle": "npm run format:write && npm run package",
"ci-test": "npx jest",
"coverage": "npx make-coverage-badge --output-path ./badges/coverage.svg",
"format:write": "npx prettier --write .",
"format:check": "npx prettier --check .",
"lint": "npx eslint . -c ./.github/linters/.eslintrc.yml",
"package": "npx ncc build src/index.ts -o dist --source-map --license licenses.txt",
"package:watch": "npm run package -- --watch",
"test": "npx jest",
"all": "npm run format:write && npm run lint && npm run test && npm run coverage && npm run package"
"all": "npm run format:write && npm run lint && npm run coverage && npm run package",
"test-build": "npm run -s package",
"test": "turbo run test-build",
"dev-run": "tsx ./src/dev-run.ts"
},
"license": "MIT",
"jest": {
"preset": "ts-jest",
"verbose": true,
"clearMocks": true,
"testEnvironment": "node",
"moduleFileExtensions": [
"js",
"ts"
],
"testMatch": [
"**/*.test.ts"
],
"testPathIgnorePatterns": [
"/node_modules/",
"/dist/"
],
"transform": {
"^.+\\.ts$": "ts-jest"
},
"coverageReporters": [
"json-summary",
"text",
"lcov"
],
"collectCoverage": true,
"collectCoverageFrom": [
"./src/**"
]
},
"dependencies": {
"@actions/core": "^1.10.1"
"@actions/cache": "^3.2.4",
"@actions/core": "^1.10.1",
"fastify": "^4.27.0",
"stream-to-promise": "^3.0.0",
"wait-on": "^7.2.0"
},
"devDependencies": {
"@jest/globals": "^29.7.0",
"@types/jest": "^29.5.12",
"@types/node": "^20.14.2",
"@types/stream-to-promise": "^2.2.4",
"@types/wait-on": "^5.3.4",
"@typescript-eslint/eslint-plugin": "^7.13.0",
"@typescript-eslint/parser": "^7.13.0",
"@vercel/ncc": "^0.38.1",
@ -77,11 +54,13 @@
"eslint-plugin-jest": "^28.6.0",
"eslint-plugin-jsonc": "^2.16.0",
"eslint-plugin-prettier": "^5.1.3",
"jest": "^29.7.0",
"make-coverage-badge": "^1.2.0",
"prettier": "^3.3.2",
"prettier-eslint": "^16.3.0",
"ts-jest": "^29.1.4",
"tsx": "^4.15.4",
"turbo": "^2.0.3",
"typescript": "^5.4.5"
}
}
},
"packageManager": "npm@10.5.2"
}

27
post.js Normal file
View file

@ -0,0 +1,27 @@
const core = require('@actions/core')
const { readFile } = require('fs/promises')
const serverPort = 41230
const serverLogFile = '/tmp/turbogha.log'
/**
* The post function of the action. It kills the server
* @returns {Promise<void>} Resolves when the action is complete.
*/
async function run() {
try {
//* Kill the server
await fetch(`http://localhost:${serverPort}/shutdown`, {
method: 'DELETE'
})
//* Read the logs
const logs = await readFile(serverLogFile, 'utf-8')
core.info(logs)
} catch (error) {
// Fail the workflow run if an error occurs
if (error instanceof Error) core.setFailed(error.message)
}
}
run()

View file

@ -4,23 +4,14 @@
"config:recommended"
],
"dependencyDashboard": true,
"baseBranches": [
"dev"
],
"baseBranches": ["dev"],
"packageRules": [
{
"matchUpdateTypes": [
"minor",
"patch",
"pin",
"digest"
],
"matchUpdateTypes": ["minor", "patch", "pin", "digest"],
"automerge": true
},
{
"matchDepTypes": [
"devDependencies"
],
"matchDepTypes": ["devDependencies"],
"automerge": true
}
],

13
src/dev-run.ts Normal file
View file

@ -0,0 +1,13 @@
// Run the server in foreground and kill it after the test
import { server } from './lib/server'
import { launchServer } from './lib/server/utils'
const main = async (): Promise<void> => {
//* Run server
server()
//* Run launch server
await launchServer(true)
}
main()

89
src/lib/cache/index.ts vendored Normal file
View file

@ -0,0 +1,89 @@
import { Readable } from 'node:stream'
import { env } from '../env'
import { pipeline } from 'node:stream/promises'
import {
createReadStream,
createWriteStream,
existsSync,
statSync
} from 'node:fs'
import { getCacheClient } from './utils'
import { cacheVersion, getCacheKey } from '../constants'
type RequestContext = {
log: {
info: (message: string) => void
}
}
//* Cache API
export async function saveCache(
ctx: RequestContext,
hash: string,
tag: string,
stream: Readable
): Promise<void> {
if (!env.valid) {
ctx.log.info(
`Using filesystem cache because cache API env vars are not set`
)
await pipeline(stream, createWriteStream(`/tmp/${hash}.tg.bin`))
return
}
const client = getCacheClient()
const existingCacheResponse = await client.reserve(
getCacheKey(hash, tag),
cacheVersion
)
// Silently exit when we have not been able to receive a cache-hit
if (existingCacheResponse.success === false) {
return
}
const id = existingCacheResponse.data?.cacheId
if (!id) {
throw new Error(
`Unable to reserve cache (received: ${JSON.stringify(
existingCacheResponse.data
)})`
)
}
ctx.log.info(`Reserved cache ${id}`)
await client.save(parseInt(id), stream)
ctx.log.info(`Saved cache ${id} for ${hash}`)
}
export async function getCache(
ctx: RequestContext,
hash: string
): Promise<
[number | undefined, Readable | ReadableStream, string | undefined] | null
> {
if (!env.valid) {
const path = `/tmp/${hash}.tg.bin`
if (!existsSync(path)) return null
const size = statSync(path).size
return [size, createReadStream(path), undefined]
}
const client = getCacheClient()
const cacheKey = getCacheKey(hash)
const { data } = await client.query(cacheKey, cacheVersion)
ctx.log.info(`Cache lookup for ${cacheKey}`)
if (!data) {
ctx.log.info(`Cache lookup did not return data`)
return null
}
const [foundCacheKey, artifactTag] = String(data.cacheKey).split('#')
if (foundCacheKey !== cacheKey) {
ctx.log.info(`Cache key mismatch: ${foundCacheKey} !== ${cacheKey}`)
return null
}
const resp = await fetch(data.archiveLocation)
const size = +(resp.headers.get('content-length') || 0)
const readableStream = resp.body
if (!readableStream) {
throw new Error('Failed to retrieve cache stream')
}
return [size, readableStream, artifactTag]
}

122
src/lib/cache/utils.ts vendored Normal file
View file

@ -0,0 +1,122 @@
import { Readable } from 'node:stream'
import { env } from '../env'
import * as core from '@actions/core'
import * as cacheHttpClient from '@actions/cache/lib/internal/cacheHttpClient'
import streamToPromise from 'stream-to-promise'
import { createWriteStream } from 'node:fs'
import { unlink } from 'node:fs/promises'
class HandledError extends Error {
status: number
statusText: string
data: unknown
constructor(status: number, statusText: string, data: unknown) {
super(`${status}: ${statusText}`)
this.status = status
this.statusText = statusText
this.data = data
}
}
function handleFetchError(message: string) {
return (error: unknown) => {
if (error instanceof HandledError) {
core.error(`${message}: ${error.status} ${error.statusText}`)
core.error(JSON.stringify(error.data))
throw error
}
core.error(`${message}: ${error}`)
throw error
}
}
export function getCacheClient() {
if (!env.valid) {
throw new Error('Cache API env vars are not set')
}
const reserve = async (
key: string,
version: string
): Promise<{
success: boolean
data?: { cacheId: string }
}> => {
try {
const reserveCacheResponse = await cacheHttpClient.reserveCache(key, [
version
])
if (reserveCacheResponse?.result?.cacheId) {
return {
success: true,
data: {
cacheId: reserveCacheResponse.result.cacheId
}
}
} else if (reserveCacheResponse?.statusCode === 409) {
return { success: false }
} else {
const { statusCode, statusText } = reserveCacheResponse
const data = await reserveCacheResponse.readBody()
const buildedError = new HandledError(statusCode, statusText, data)
return handleFetchError('Unable to reserve cache')(buildedError)
}
} catch (error) {
return handleFetchError('Unable to reserve cache')(error)
}
}
const save = async (id: number, stream: Readable): Promise<void> => {
try {
//* Create a temporary file to store the cache
const tempFile = `/tmp/cache-${id}.tg.bin`
const writeStream = createWriteStream(tempFile)
await streamToPromise(stream.pipe(writeStream))
core.info(`Saved cache to ${tempFile}`)
await cacheHttpClient.saveCache(id, tempFile)
core.info(`Saved cache ${id}`)
//* Remove the temporary file
await unlink(tempFile)
} catch (error) {
handleFetchError('Unable to upload cache')(error)
}
}
const query = async (
keys: string,
version: string
): Promise<{
success: boolean
data?: { cacheKey: string; archiveLocation: string }
}> => {
try {
const queryCacheResponse = await cacheHttpClient.getCacheEntry(
[keys],
[version]
)
if (queryCacheResponse?.archiveLocation) {
return {
success: true,
data: {
cacheKey: keys,
archiveLocation: queryCacheResponse.archiveLocation
}
}
} else {
return {
success: false
}
}
} catch (error) {
return handleFetchError('Unable to query cache')(error)
}
}
return {
reserve,
save,
query
}
}

8
src/lib/constants.ts Normal file
View file

@ -0,0 +1,8 @@
import * as core from '@actions/core'
export const serverPort = 41230
export const cacheVersion = 'turbogha_v2'
export const cachePrefix = core.getInput('cache-prefix') || 'turbogha_'
export const getCacheKey = (hash: string, tag?: string): string =>
`${cachePrefix}${hash}${tag ? `#${tag}` : ''}`
export const serverLogFile = '/tmp/turbogha.log'

21
src/lib/env/index.ts vendored Normal file
View file

@ -0,0 +1,21 @@
const envObject = {
ACTIONS_RUNTIME_TOKEN: process.env.ACTIONS_RUNTIME_TOKEN,
ACTIONS_CACHE_URL: process.env.ACTIONS_CACHE_URL
}
type TInvalidEnv = {
valid: false
} & typeof envObject
type TValidEnv = {
valid: true
} & {
[K in keyof typeof envObject]: NonNullable<(typeof envObject)[K]>
}
type TEnv = TInvalidEnv | TValidEnv
export const env = {
valid: Object.values(envObject).every(value => value !== undefined),
...envObject
} as TEnv

72
src/lib/server/index.ts Normal file
View file

@ -0,0 +1,72 @@
import Fastify from 'fastify'
import { serverPort } from '../constants'
import { getCache, saveCache } from '../cache'
export async function server(): Promise<void> {
//* Create the server
const fastify = Fastify({
logger: true
})
//? Server status check
fastify.get('/', async () => {
return { ok: true }
})
//? Shut down the server
const shutdown = () => {
setTimeout(() => process.exit(0), 100)
return { ok: true }
}
fastify.delete('/shutdown', async () => {
return shutdown()
})
//? Handle streaming requets body
// https://www.fastify.io/docs/latest/Reference/ContentTypeParser/#catch-all
fastify.addContentTypeParser(
'application/octet-stream',
(_req, _payload, done) => {
done(null)
}
)
//? Upload cache
fastify.put('/v8/artifacts/:hash', async request => {
const hash = (request.params as { hash: string }).hash
request.log.info(`Received artifact for ${hash}`)
await saveCache(
request,
hash,
String(request.headers['x-artifact-tag'] || ''),
request.raw
)
request.log.info(`Saved artifact for ${hash}`)
return { ok: true }
})
//? Download cache
fastify.get('/v8/artifacts/:hash', async (request, reply) => {
const hash = (request.params as { hash: string }).hash
request.log.info(`Requested artifact for ${hash}`)
const result = await getCache(request, hash)
if (result === null) {
request.log.info(`Artifact for ${hash} not found`)
reply.code(404)
return { ok: false }
}
const [size, stream, artifactTag] = result
if (size) {
reply.header('Content-Length', size)
}
reply.header('Content-Type', 'application/octet-stream')
if (artifactTag) {
reply.header('x-artifact-tag', artifactTag)
}
request.log.info(`Sending artifact for ${hash}`)
return reply.send(stream)
})
//* Start the server
await fastify.listen({ port: serverPort })
}

66
src/lib/server/utils.ts Normal file
View file

@ -0,0 +1,66 @@
import waitOn from 'wait-on'
import {
cachePrefix,
cacheVersion,
serverLogFile,
serverPort
} from '../constants'
import * as core from '@actions/core'
import { openSync } from 'fs'
import { spawn } from 'child_process'
export const waitForServer = async (): Promise<void> => {
await waitOn({
resources: [`http-get://localhost:${serverPort}`],
timeout: 5000
})
}
export const exportVariable = (name: string, value: string): void => {
core.exportVariable(name, value)
core.info(` ${name}=${value}`)
}
export async function launchServer(devRun?: boolean): Promise<void> {
if (!devRun) {
//* Launch a detached child process to run the server
// See: https://nodejs.org/docs/latest-v16.x/api/child_process.html#optionsdetached
const out = openSync(serverLogFile, 'a')
const err = openSync(serverLogFile, 'a')
const child = spawn(process.argv[0], [process.argv[1], '--server'], {
detached: true,
stdio: ['ignore', out, err]
})
child.unref()
core.info(`Cache version: ${cacheVersion}`)
core.info(`Cache prefix: ${cachePrefix}`)
core.info(`Launched child process: ${child.pid}`)
core.info(`Server log file: ${serverLogFile}`)
}
//* Wait for server
await waitForServer()
core.info(`Server is now up and running.`)
//* Export the environment variables for Turbo
if (devRun) {
console.log('Execute:')
console.log(`export TURBOGHA_PORT=${serverPort}`)
console.log(`export TURBO_API=http://localhost:${serverPort}`)
console.log(`export TURBO_TOKEN=turbogha`)
console.log(`export TURBO_TEAM=turbogha`)
} else {
core.info('The following environment variables are exported:')
exportVariable('TURBOGHA_PORT', `${serverPort}`)
exportVariable('TURBO_API', `http://localhost:${serverPort}`)
exportVariable('TURBO_TOKEN', 'turbogha')
exportVariable('TURBO_TEAM', 'turbogha')
}
}
export async function killServer() {
//* Kill the server
await fetch(`http://localhost:${serverPort}/shutdown`, {
method: 'DELETE'
})
}

View file

@ -1,5 +1,6 @@
import * as core from '@actions/core'
import { wait } from './wait'
import { server } from './lib/server'
import { launchServer } from './lib/server/utils'
/**
* The main function for the action.
@ -7,18 +8,12 @@ import { wait } from './wait'
*/
export async function run(): Promise<void> {
try {
const ms: string = core.getInput('milliseconds')
// Debug logs are only output if the `ACTIONS_STEP_DEBUG` secret is true
core.debug(`Waiting ${ms} milliseconds ...`)
// Log the current timestamp, wait, then log the new timestamp
core.debug(new Date().toTimeString())
await wait(parseInt(ms, 10))
core.debug(new Date().toTimeString())
// Set outputs for other workflow steps to use
core.setOutput('time', new Date().toTimeString())
//* Daemon process
if (process.argv[2] === '--server') {
return server()
}
//* Base process
return launchServer()
} catch (error) {
// Fail the workflow run if an error occurs
if (error instanceof Error) core.setFailed(error.message)

View file

@ -1,14 +0,0 @@
/**
* Wait for a number of milliseconds.
* @param milliseconds The number of milliseconds to wait.
* @returns {Promise<string>} Resolves with 'done!' after the wait is over.
*/
export async function wait(milliseconds: number): Promise<string> {
return new Promise(resolve => {
if (isNaN(milliseconds)) {
throw new Error('milliseconds not a number')
}
setTimeout(() => resolve('done!'), milliseconds)
})
}

9
turbo.json Normal file
View file

@ -0,0 +1,9 @@
{
"$schema": "https://turbo.build/schema.json",
"tasks": {
"test-build": {
"inputs": ["src"],
"outputs": ["dist"]
}
}
}