1
0
Fork 0
mirror of https://github.com/rharkor/caching-for-turbo.git synced 2025-06-08 01:37:01 +09:00

Merge pull request #287 from rharkor/dev

Dev
This commit is contained in:
HUORT Louis 2025-04-10 16:13:27 +02:00 committed by GitHub
commit a1c4079258
Signed by: github
GPG key ID: B5690EEEBB952194
27 changed files with 50106 additions and 2009 deletions

10
.env.example Normal file
View file

@ -0,0 +1,10 @@
S3_ACCESS_KEY_ID=secret
S3_SECRET_ACCESS_KEY=secret
S3_BUCKET=my-bucket
S3_REGION=us-east-1
S3_ENDPOINT=https://s3.amazonaws.com
S3_PREFIX=turbogha/
PROVIDER=s3
MAX_AGE=1d
MAX_FILES=100
MAX_SIZE=100mb

View file

@ -107,3 +107,46 @@ jobs:
- name: Test build cache (full cache) - name: Test build cache (full cache)
run: ./check-full-turbo.sh run: ./check-full-turbo.sh
test-action-s3:
name: GitHub Actions Test (S3)
runs-on: ubuntu-latest
steps:
- name: Checkout
id: checkout
uses: actions/checkout@v4
- name: Setup Node.js
id: setup-node
uses: actions/setup-node@v4
with:
node-version-file: .node-version
cache: npm
- name: Install Dependencies
id: npm-ci
run: npm ci
- name: Test Local Action
uses: ./
with:
# Use a unique cache prefix for each pipeline & job
cache-prefix:
${{ runner.os }}-${{ github.run_id }}-${{ github.run_number }}-${{
runner.os }}
provider: s3
s3-access-key-id: ${{ secrets.S3_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_SECRET_ACCESS_KEY }}
s3-bucket: ${{ secrets.S3_BUCKET }}
s3-region: ${{ secrets.S3_REGION }}
s3-endpoint: ${{ secrets.S3_ENDPOINT }}
max-age: 3m
max-size: 100mb
max-files: 100
- name: Test build cache
run: npm run test
- name: Test build cache (full cache)
run: ./check-full-turbo.sh

101
README.md
View file

@ -1,6 +1,6 @@
# Caching for Turborepo with GitHub Actions # Caching for Turborepo with GitHub Actions
[![CI Status](https://github.com/rharkor/caching-for-turbo/workflows/ci/badge.svg)](https://github.com/rharkor/caching-for-turbo/actions) [![CI Status](https://github.com/rharkor/caching-for-turbo/workflows/Test%20core%20functionality/badge.svg)](https://github.com/rharkor/caching-for-turbo/actions)
Supercharge your [Turborepo](https://turbo.build/repo/) builds with our Supercharge your [Turborepo](https://turbo.build/repo/) builds with our
dedicated GitHub Actions caching service, designed to make your CI workflows dedicated GitHub Actions caching service, designed to make your CI workflows
@ -55,7 +55,7 @@ the following step **before** you run `turbo build`:
```yaml ```yaml
- name: Cache for Turbo - name: Cache for Turbo
uses: rharkor/caching-for-turbo@v1.7 uses: rharkor/caching-for-turbo@v1.8
``` ```
This GitHub Action facilitates: This GitHub Action facilitates:
@ -75,6 +75,95 @@ provided):
```yaml ```yaml
with: with:
cache-prefix: turbogha_ # Custom prefix for cache keys cache-prefix: turbogha_ # Custom prefix for cache keys
provider: github # Storage provider: 'github' (default) or 's3'
# S3 Provider Configuration (required when provider is set to 's3')
s3-access-key-id: ${{ secrets.S3_ACCESS_KEY_ID }} # S3 access key
s3-secret-access-key: ${{ secrets.S3_SECRET_ACCESS_KEY }} # S3 secret key
s3-bucket: your-bucket-name # S3 bucket name
s3-region: us-east-1 # S3 bucket region
s3-endpoint: https://s3.amazonaws.com # S3 endpoint
s3-prefix: turbogha/ # Optional prefix for S3 objects (default: 'turbogha/')
```
### Storage Providers
#### GitHub Cache (Default)
By default, this action uses GitHub's built-in cache service, which offers:
- Seamless integration with GitHub Actions
- No additional setup required
- Automatic cache pruning by GitHub
#### S3 Storage
For teams requiring more control over caching infrastructure, the action
supports Amazon S3 or compatible storage:
- Store cache artifacts in your own S3 bucket
- Works with any S3-compatible storage (AWS S3, MinIO, DigitalOcean Spaces,
etc.)
- Greater control over retention policies and storage costs
- Useful for larger organizations with existing S3 infrastructure
It is very important to note that by default the cached files are stored
forever. It is recommended to set a max-size (or other cleanup options) to avoid
unexpected costs.
Example S3 configuration:
```yaml
- name: Cache for Turbo
uses: rharkor/caching-for-turbo@v1.8
with:
provider: s3
s3-access-key-id: ${{ secrets.S3_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_SECRET_ACCESS_KEY }}
s3-bucket: my-turbo-cache-bucket
s3-region: us-west-2
s3-endpoint: https://s3.amazonaws.com
```
### Cache Cleanup Options
To prevent unbounded growth of your cache (especially important when using S3
storage), you can configure automatic cleanup using one or more of these
options:
```yaml
with:
# Cleanup by age - remove cache entries older than the specified duration
max-age: 1mo # e.g., 1d (1 day), 1w (1 week), 1mo (1 month)
# Cleanup by count - keep only the specified number of most recent cache entries
max-files: 300 # e.g., limit to 300 files
# Cleanup by size - remove oldest entries when total size exceeds the limit
max-size: 10gb # e.g., 100mb, 5gb, 10gb
```
When using the GitHub provider, the built-in cache has its own pruning
mechanism, but these options can still be useful for more precise control.
For S3 storage, implementing these cleanup options is **highly recommended** to
control storage costs, as S3 does not automatically remove old cache entries.
Example with cleanup configuration:
```yaml
- name: Cache for Turbo
uses: rharkor/caching-for-turbo@v1.8
with:
provider: s3
s3-access-key-id: ${{ secrets.S3_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_SECRET_ACCESS_KEY }}
s3-bucket: my-turbo-cache-bucket
s3-region: us-west-2
s3-endpoint: https://s3.amazonaws.com
# Cleanup configuration
max-age: 2w
max-size: 5gb
``` ```
## Contributing ## Contributing
@ -90,9 +179,15 @@ with:
2. In a separate terminal, execute the tests: 2. In a separate terminal, execute the tests:
```bash ```bash
npm test npm test -- --cache=remote:rw --no-daemon
``` ```
#### Testing the cleanup script
```bash
npm run cleanup
```
## Licensing ## Licensing
Licensed under the MIT License. For more details, see the [LICENSE](LICENSE) Licensed under the MIT License. For more details, see the [LICENSE](LICENSE)

View file

@ -11,10 +11,49 @@ branding:
# Define your inputs here. # Define your inputs here.
inputs: inputs:
provider:
description: 'Provider to use for caching (github, s3)'
required: true
default: 'github'
cache-prefix: cache-prefix:
description: 'Prefix for the cache key' description: 'Prefix for the cache key'
required: false required: false
default: turbogha_ default: turbogha_
max-age:
description:
'Cleanup cache files older than this age (ex: 1mo, 1w, 1d). using
https://www.npmjs.com/package/parse-duration'
required: false
max-files:
description:
'Cleanup oldest cache files when number of files exceeds this limit (ex:
300)'
required: false
max-size:
description:
'Cleanup oldest cache files when total size exceeds this limit (ex: 100mb,
10gb)'
required: false
s3-access-key-id:
description: 'AWS S3 access key ID (required when provider is s3)'
required: false
s3-secret-access-key:
description: 'AWS S3 secret access key (required when provider is s3)'
required: false
s3-bucket:
description: 'AWS S3 bucket name (required when provider is s3)'
required: false
s3-region:
description: 'AWS S3 region (required when provider is s3)'
required: false
s3-prefix:
description: 'Prefix for S3 objects'
required: false
default: 'turbogha/'
s3-endpoint:
description: 'S3 endpoint (required when provider is s3)'
required: false
default: 'https://s3.amazonaws.com'
runs: runs:
using: node20 using: node20

View file

@ -1,7 +1,7 @@
#!/bin/bash #!/bin/bash
# Run the test and capture the output # Run the test and capture the output
npm run test -- --cache=remote:rw | tee test-output.log npm run test -- --cache=remote:rw --no-daemon | tee test-output.log
cat test-output.log cat test-output.log

487
dist/post/index.js generated vendored

File diff suppressed because it is too large Load diff

2
dist/post/index.js.map generated vendored

File diff suppressed because one or more lines are too long

3
dist/post/package.json generated vendored Normal file
View file

@ -0,0 +1,3 @@
{
"type": "module"
}

1
dist/post/sourcemap-register.cjs generated vendored Normal file

File diff suppressed because one or more lines are too long

34673
dist/setup/index.js generated vendored

File diff suppressed because one or more lines are too long

2
dist/setup/index.js.map generated vendored

File diff suppressed because one or more lines are too long

14564
dist/setup/licenses.txt generated vendored

File diff suppressed because it is too large Load diff

3
dist/setup/package.json generated vendored Normal file
View file

@ -0,0 +1,3 @@
{
"type": "module"
}

1
dist/setup/sourcemap-register.cjs generated vendored Normal file

File diff suppressed because one or more lines are too long

1687
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -17,6 +17,7 @@
"exports": { "exports": {
".": "./dist/index.js" ".": "./dist/index.js"
}, },
"type": "module",
"engines": { "engines": {
"node": ">=20" "node": ">=20"
}, },
@ -31,13 +32,18 @@
"all": "npm run format:write && npm run lint && npm run coverage && npm run package", "all": "npm run format:write && npm run lint && npm run coverage && npm run package",
"test-build": "npm run -s package", "test-build": "npm run -s package",
"test": "turbo run test-build", "test": "turbo run test-build",
"dev-run": "tsx ./src/dev-run.ts" "dev-run": "tsx ./src/dev-run.ts",
"cleanup": "tsx ./src/dev/cleanup.ts"
}, },
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/cache": "^4.0.0", "@actions/cache": "^4.0.0",
"@actions/core": "^1.10.1", "@actions/core": "^1.10.1",
"@aws-sdk/client-s3": "^3.0.0",
"@aws-sdk/lib-storage": "^3.0.0",
"fastify": "^5.0.0", "fastify": "^5.0.0",
"filesize-parser": "^1.5.1",
"parse-duration": "^2.1.4",
"stream-to-promise": "^3.0.0", "stream-to-promise": "^3.0.0",
"wait-on": "^8.0.0" "wait-on": "^8.0.0"
}, },
@ -50,6 +56,7 @@
"@typescript-eslint/eslint-plugin": "^8.29.1", "@typescript-eslint/eslint-plugin": "^8.29.1",
"@typescript-eslint/parser": "^8.29.1", "@typescript-eslint/parser": "^8.29.1",
"@vercel/ncc": "^0.38.1", "@vercel/ncc": "^0.38.1",
"dotenv": "^16.4.7",
"eslint": "^9.24.0", "eslint": "^9.24.0",
"eslint-plugin-github": "^6.0.0", "eslint-plugin-github": "^6.0.0",
"eslint-plugin-jest": "^28.6.0", "eslint-plugin-jest": "^28.6.0",

View file

@ -1,5 +1,8 @@
// Run the server in foreground and kill it after the test // Run the server in foreground and kill it after the test
import { config } from 'dotenv'
config()
import { server } from './lib/server' import { server } from './lib/server'
import { launchServer } from './lib/server/utils' import { launchServer } from './lib/server/utils'

14
src/dev/cleanup.ts Normal file
View file

@ -0,0 +1,14 @@
import { config } from 'dotenv'
config()
import { cleanup } from 'src/lib/server/cleanup'
const main = async () => {
await cleanup({
log: {
info: console.log
}
})
}
main()

View file

@ -3,7 +3,7 @@ import { join } from 'path'
import { env } from './env' import { env } from './env'
export const serverPort = 41230 export const serverPort = 41230
export const cachePath = 'turbogha_v2' export const cachePath = 'turbogha_'
export const cachePrefix = core.getInput('cache-prefix') || cachePath export const cachePrefix = core.getInput('cache-prefix') || cachePath
export const getCacheKey = (hash: string, tag?: string): string => export const getCacheKey = (hash: string, tag?: string): string =>
`${cachePrefix}${hash}${tag ? `#${tag}` : ''}` `${cachePrefix}${hash}${tag ? `#${tag}` : ''}`

39
src/lib/providers.ts Normal file
View file

@ -0,0 +1,39 @@
import * as core from '@actions/core'
import { Readable } from 'stream'
import { TListFile } from './server/cleanup'
import { RequestContext } from './server'
import { getGithubProvider } from './providers/cache'
import { getS3Provider } from './providers/s3'
export type TProvider = {
save: (
ctx: RequestContext,
hash: string,
tag: string,
stream: Readable
) => Promise<void>
get: (
ctx: RequestContext,
hash: string
) => Promise<
[number | undefined, Readable | ReadableStream, string | undefined] | null
>
delete: (key: string) => Promise<void>
list: () => Promise<TListFile[]>
}
export const getProvider = (): TProvider => {
const provider = core.getInput('provider') || process.env.PROVIDER
if (!provider) {
throw new Error('Provider is required')
}
if (provider === 'github') {
return getGithubProvider()
}
if (provider === 's3') {
return getS3Provider()
}
throw new Error(`Provider ${provider} not supported`)
}

View file

@ -1,5 +1,5 @@
import { Readable } from 'node:stream' import { Readable } from 'node:stream'
import { env } from '../env' import { env } from '../../env'
import { pipeline } from 'node:stream/promises' import { pipeline } from 'node:stream/promises'
import { import {
createReadStream, createReadStream,
@ -7,14 +7,12 @@ import {
existsSync, existsSync,
statSync statSync
} from 'node:fs' } from 'node:fs'
import { getCacheKey, getFsCachePath, getTempCachePath } from '../../constants'
import { RequestContext } from '../../server'
import * as core from '@actions/core'
import { TListFile } from '../../server/cleanup'
import { getCacheClient } from './utils' import { getCacheClient } from './utils'
import { getCacheKey, getFsCachePath, getTempCachePath } from '../constants' import { TProvider } from '../../providers'
type RequestContext = {
log: {
info: (message: string) => void
}
}
//* Cache API //* Cache API
export async function saveCache( export async function saveCache(
@ -68,3 +66,22 @@ export async function getCache(
const readableStream = createReadStream(fileRestorationPath) const readableStream = createReadStream(fileRestorationPath)
return [size, readableStream, artifactTag] return [size, readableStream, artifactTag]
} }
export async function deleteCache(): Promise<void> {
core.error(`Cannot delete github cache automatically.`)
throw new Error(`Cannot delete github cache automatically.`)
}
export async function listCache(): Promise<TListFile[]> {
core.error(`Cannot list github cache automatically.`)
throw new Error(`Cannot list github cache automatically.`)
}
export const getGithubProvider = (): TProvider => {
return {
save: saveCache,
get: getCache,
delete: deleteCache,
list: listCache
}
}

View file

@ -1,12 +1,11 @@
import { Readable } from 'node:stream' import { Readable } from 'node:stream'
import { env } from '../env' import { env } from '../../env'
import * as core from '@actions/core' import * as core from '@actions/core'
import streamToPromise from 'stream-to-promise' import streamToPromise from 'stream-to-promise'
import { createWriteStream } from 'node:fs' import { createWriteStream } from 'node:fs'
import { unlink } from 'node:fs/promises' import { unlink } from 'node:fs/promises'
import { getTempCachePath } from '../constants' import { getTempCachePath } from '../../constants'
import { restoreCache, saveCache } from '@actions/cache' import { restoreCache, saveCache } from '@actions/cache'
class HandledError extends Error { class HandledError extends Error {
status: number status: number
statusText: string statusText: string

View file

@ -0,0 +1,228 @@
import { TProvider } from 'src/lib/providers'
import * as core from '@actions/core'
import { Readable } from 'stream'
import { RequestContext } from '../../server'
import { TListFile } from '../../server/cleanup'
import {
S3Client,
GetObjectCommand,
DeleteObjectCommand,
ListObjectsV2Command
} from '@aws-sdk/client-s3'
import { Upload } from '@aws-sdk/lib-storage'
import { getCacheKey } from 'src/lib/constants'
export const getS3Provider = (): TProvider => {
const s3AccessKeyId =
core.getInput('s3-access-key-id') || process.env.S3_ACCESS_KEY_ID
const s3SecretAccessKey =
core.getInput('s3-secret-access-key') || process.env.S3_SECRET_ACCESS_KEY
const s3Bucket = core.getInput('s3-bucket') || process.env.S3_BUCKET
const s3Region = core.getInput('s3-region') || process.env.S3_REGION
const s3Endpoint = core.getInput('s3-endpoint') || process.env.S3_ENDPOINT
const s3Prefix =
core.getInput('s3-prefix') || process.env.S3_PREFIX || 'turbogha/'
if (
!s3AccessKeyId ||
!s3SecretAccessKey ||
!s3Bucket ||
!s3Region ||
!s3Endpoint
) {
throw new Error(
'S3 provider requires s3-access-key-id, s3-secret-access-key, s3-bucket, s3-region, and s3-endpoint inputs'
)
}
const s3Client = new S3Client({
region: s3Region,
endpoint: s3Endpoint,
credentials: {
accessKeyId: s3AccessKeyId,
secretAccessKey: s3SecretAccessKey
}
})
const getS3Key = (hash: string, tag?: string) => {
const key = getCacheKey(hash, tag)
if (s3Prefix) {
return `${s3Prefix}${key}`
}
return key
}
const save = async (
ctx: RequestContext,
hash: string,
tag: string,
stream: Readable
): Promise<void> => {
const objectKey = getS3Key(hash, tag)
console.log({ objectKey, s3Prefix })
try {
// Use the S3 Upload utility which handles multipart uploads for large files
const upload = new Upload({
client: s3Client,
params: {
Bucket: s3Bucket,
Key: objectKey,
Body: stream,
ContentType: 'application/octet-stream'
}
})
await upload.done()
ctx.log.info(`Saved artifact to S3: ${objectKey}`)
} catch (error) {
ctx.log.info(`Error saving artifact to S3: ${error}`)
throw error
}
}
const get = async (
ctx: RequestContext,
hash: string
): Promise<
[number | undefined, Readable | ReadableStream, string | undefined] | null
> => {
// First try to get with just the hash
const objectKey = getS3Key(hash)
try {
// Try to find the object
const listCommand = new ListObjectsV2Command({
Bucket: s3Bucket,
Prefix: objectKey,
MaxKeys: 10
})
const listResponse = await s3Client.send(listCommand)
if (!listResponse.Contents || listResponse.Contents.length === 0) {
ctx.log.info(`No cached artifact found for ${hash}`)
return null
}
// Find the most recent object that matches the hash prefix
const matchingObjects = listResponse.Contents.filter(
obj => obj.Key && obj.Key.startsWith(objectKey)
)
if (matchingObjects.length === 0) {
return null
}
// Sort by last modified date, newest first
matchingObjects.sort((a, b) => {
const dateA = a.LastModified?.getTime() || 0
const dateB = b.LastModified?.getTime() || 0
return dateB - dateA
})
const latestObject = matchingObjects[0]
const key = latestObject.Key as string
// Get the object
const getCommand = new GetObjectCommand({
Bucket: s3Bucket,
Key: key
})
const response = await s3Client.send(getCommand)
if (!response.Body) {
ctx.log.info(`Failed to get artifact body from S3`)
return null
}
const size = response.ContentLength
const stream = response.Body as Readable
// Extract the tag if it exists
let artifactTag: string | undefined
if (key.includes('#')) {
const parts = key.split('#')
artifactTag = parts[parts.length - 1]
}
ctx.log.info(`Retrieved artifact from S3: ${key}`)
return [size, stream, artifactTag]
} catch (error) {
ctx.log.info(`Error getting artifact from S3: ${error}`)
return null
}
}
const deleteObj = async (key: string): Promise<void> => {
try {
const deleteCommand = new DeleteObjectCommand({
Bucket: s3Bucket,
Key: key
})
await s3Client.send(deleteCommand)
} catch (error) {
core.error(`Error deleting artifact from S3: ${error}`)
throw error
}
}
const list = async (): Promise<TListFile[]> => {
try {
const files: TListFile[] = []
let continuationToken: string | undefined
do {
// Create a new command for each request with the current continuation token
const listCommand = new ListObjectsV2Command({
Bucket: s3Bucket,
Prefix: s3Prefix,
MaxKeys: 1000,
ContinuationToken: continuationToken
})
core.debug(
`Listing S3 objects with prefix ${s3Prefix}${continuationToken ? ' and continuation token' : ''}`
)
const response = await s3Client.send(listCommand)
if (response.Contents && response.Contents.length > 0) {
core.debug(`Found ${response.Contents.length} objects`)
const objects = response.Contents.filter(obj => obj.Key).map(
(obj): TListFile => {
return {
path: obj.Key as string,
createdAt: (obj.LastModified || new Date()).toISOString(),
size: obj.Size || 0
}
}
)
files.push(...objects)
}
continuationToken = response.NextContinuationToken
if (continuationToken) {
core.debug(`NextContinuationToken: ${continuationToken}`)
}
} while (continuationToken)
core.debug(`Total files listed: ${files.length}`)
return files
} catch (error) {
core.error(`Error listing artifacts from S3: ${error}`)
throw error
}
}
return {
save,
get,
delete: deleteObj,
list
}
}

111
src/lib/server/cleanup.ts Normal file
View file

@ -0,0 +1,111 @@
import * as core from '@actions/core'
import { RequestContext } from '.'
import parse from 'parse-duration'
import { getProvider } from '../providers'
import { parseFileSize } from './utils'
export type TListFile = {
path: string
createdAt: string
size: number
}
export async function cleanup(ctx: RequestContext) {
const maxAge = core.getInput('max-age') || process.env.MAX_AGE
const maxFiles = core.getInput('max-files') || process.env.MAX_FILES
const maxSize = core.getInput('max-size') || process.env.MAX_SIZE
if (!maxAge && !maxFiles && !maxSize) {
ctx.log.info('No cleanup options provided, skipping cleanup')
return
}
const { maxAgeParsed, maxFilesParsed, maxSizeParsed } = {
maxAgeParsed: maxAge ? parse(maxAge) : undefined,
maxFilesParsed: maxFiles ? parseInt(maxFiles) : undefined,
maxSizeParsed: maxSize ? parseFileSize(maxSize) : undefined
}
if (maxAge && !maxAgeParsed) {
core.error('Invalid max-age provided')
throw new Error('Invalid max-age provided')
}
if (maxFiles && !maxFilesParsed) {
core.error('Invalid max-files provided')
throw new Error('Invalid max-files provided')
}
if (maxSize && !maxSizeParsed) {
core.error('Invalid max-size provided')
throw new Error('Invalid max-size provided')
}
const provider = getProvider()
const files = await provider.list()
const fileToDelete: (TListFile & {
reason: 'max-age' | 'max-files' | 'max-size'
})[] = []
if (maxAgeParsed) {
const now = new Date()
const age = new Date(now.getTime() - maxAgeParsed)
fileToDelete.push(
...files
.filter(file => new Date(file.createdAt) < age)
.map(file => ({ ...file, reason: 'max-age' as const }))
)
}
if (maxFilesParsed && files.length > maxFilesParsed) {
const sortedByDate = [...files].sort(
(a, b) =>
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
)
const excessFiles = sortedByDate.slice(0, files.length - maxFilesParsed)
excessFiles.forEach(file => {
if (!fileToDelete.some(f => f.path === file.path)) {
fileToDelete.push({ ...file, reason: 'max-files' })
}
})
}
if (maxSizeParsed) {
let totalSize = files.reduce((sum, file) => sum + file.size, 0)
if (totalSize > maxSizeParsed) {
const sortedByDate = [...files].sort(
(a, b) =>
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
)
for (const file of sortedByDate) {
if (totalSize <= maxSizeParsed) break
if (!fileToDelete.some(f => f.path === file.path)) {
fileToDelete.push({ ...file, reason: 'max-size' })
totalSize -= file.size
}
}
}
}
if (fileToDelete.length > 0) {
ctx.log.info(
`Cleaning up ${fileToDelete.length} files (${fileToDelete.map(
f => `${f.path} (${f.reason})`
)})`
)
for (const file of fileToDelete) {
try {
await provider.delete(file.path)
ctx.log.info(`Deleted ${file.path}`)
} catch (error) {
core.error(`Failed to delete ${file.path}: ${error}`)
}
}
} else {
ctx.log.info('No files to clean up')
}
}

View file

@ -1,6 +1,13 @@
import Fastify from 'fastify' import Fastify from 'fastify'
import { serverPort } from '../constants' import { serverPort } from '../constants'
import { getCache, saveCache } from '../cache' import { cleanup } from './cleanup'
import { getProvider } from '../providers'
export type RequestContext = {
log: {
info: (message: string) => void
}
}
export async function server(): Promise<void> { export async function server(): Promise<void> {
//* Create the server //* Create the server
@ -14,12 +21,16 @@ export async function server(): Promise<void> {
}) })
//? Shut down the server //? Shut down the server
const shutdown = () => { const shutdown = async (ctx: RequestContext) => {
//* Handle cleanup
await cleanup(ctx)
// Exit the server after responding (100ms)
setTimeout(() => process.exit(0), 100) setTimeout(() => process.exit(0), 100)
return { ok: true } return { ok: true }
} }
fastify.delete('/shutdown', async () => { fastify.delete('/shutdown', async request => {
return shutdown() return shutdown(request)
}) })
//? Handle streaming requets body //? Handle streaming requets body
@ -35,7 +46,8 @@ export async function server(): Promise<void> {
fastify.put('/v8/artifacts/:hash', async request => { fastify.put('/v8/artifacts/:hash', async request => {
const hash = (request.params as { hash: string }).hash const hash = (request.params as { hash: string }).hash
request.log.info(`Received artifact for ${hash}`) request.log.info(`Received artifact for ${hash}`)
await saveCache( const provider = getProvider()
await provider.save(
request, request,
hash, hash,
String(request.headers['x-artifact-tag'] || ''), String(request.headers['x-artifact-tag'] || ''),
@ -49,7 +61,8 @@ export async function server(): Promise<void> {
fastify.get('/v8/artifacts/:hash', async (request, reply) => { fastify.get('/v8/artifacts/:hash', async (request, reply) => {
const hash = (request.params as { hash: string }).hash const hash = (request.params as { hash: string }).hash
request.log.info(`Requested artifact for ${hash}`) request.log.info(`Requested artifact for ${hash}`)
const result = await getCache(request, hash) const provider = getProvider()
const result = await provider.get(request, hash)
if (result === null) { if (result === null) {
request.log.info(`Artifact for ${hash} not found`) request.log.info(`Artifact for ${hash} not found`)
reply.code(404) reply.code(404)

View file

@ -59,3 +59,27 @@ export async function killServer() {
method: 'DELETE' method: 'DELETE'
}) })
} }
export const parseFileSize = (size: string): number => {
const units: { [key: string]: number } = {
b: 1,
kb: 1024,
mb: 1024 * 1024,
gb: 1024 * 1024 * 1024,
tb: 1024 * 1024 * 1024 * 1024
}
const match = size.toLowerCase().match(/^(\d+)\s*([a-z]+)$/)
if (!match) {
throw new Error(`Invalid file size format: ${size}`)
}
const [, value, unit] = match
const multiplier = units[unit]
if (!multiplier) {
throw new Error(`Invalid file size unit: ${unit}`)
}
return parseInt(value) * multiplier
}

View file

@ -2,9 +2,9 @@
"$schema": "https://json.schemastore.org/tsconfig", "$schema": "https://json.schemastore.org/tsconfig",
"compilerOptions": { "compilerOptions": {
"target": "ES2022", "target": "ES2022",
"module": "NodeNext", "module": "ESNext",
"rootDir": "./src", "rootDir": "./src",
"moduleResolution": "NodeNext", "moduleResolution": "bundler",
"baseUrl": "./", "baseUrl": "./",
"sourceMap": true, "sourceMap": true,
"outDir": "./dist", "outDir": "./dist",