1
0
Fork 0
mirror of https://github.com/rharkor/caching-for-turbo.git synced 2025-06-08 01:37:01 +09:00

fix: cleanups

This commit is contained in:
rharkor 2025-04-10 16:11:07 +02:00
parent 46a1ec72af
commit f8d842ad5a
13 changed files with 257 additions and 121 deletions

10
.env.example Normal file
View file

@ -0,0 +1,10 @@
S3_ACCESS_KEY_ID=secret
S3_SECRET_ACCESS_KEY=secret
S3_BUCKET=my-bucket
S3_REGION=us-east-1
S3_ENDPOINT=https://s3.amazonaws.com
S3_PREFIX=turbogha/
PROVIDER=s3
MAX_AGE=1d
MAX_FILES=100
MAX_SIZE=100mb

View file

@ -179,9 +179,15 @@ Example with cleanup configuration:
2. In a separate terminal, execute the tests:
```bash
npm test
npm test -- --cache=remote:rw --no-daemon
```
#### Testing the cleanup script
```bash
npm run cleanup
```
## Licensing
Licensed under the MIT License. For more details, see the [LICENSE](LICENSE)

View file

@ -1,7 +1,7 @@
#!/bin/bash
# Run the test and capture the output
npm run test -- --cache=remote:rw | tee test-output.log
npm run test -- --cache=remote:rw --no-daemon | tee test-output.log
cat test-output.log

207
dist/setup/index.js generated vendored
View file

@ -196620,12 +196620,12 @@ var lib_storage_dist_cjs = __nccwpck_require__(22358);
const getS3Provider = () => {
const s3AccessKeyId = core.getInput('s3-access-key-id');
const s3SecretAccessKey = core.getInput('s3-secret-access-key');
const s3Bucket = core.getInput('s3-bucket');
const s3Region = core.getInput('s3-region');
const s3Endpoint = core.getInput('s3-endpoint');
const s3Prefix = core.getInput('s3-prefix') || 'turbogha/';
const s3AccessKeyId = core.getInput('s3-access-key-id') || process.env.S3_ACCESS_KEY_ID;
const s3SecretAccessKey = core.getInput('s3-secret-access-key') || process.env.S3_SECRET_ACCESS_KEY;
const s3Bucket = core.getInput('s3-bucket') || process.env.S3_BUCKET;
const s3Region = core.getInput('s3-region') || process.env.S3_REGION;
const s3Endpoint = core.getInput('s3-endpoint') || process.env.S3_ENDPOINT;
const s3Prefix = core.getInput('s3-prefix') || process.env.S3_PREFIX || 'turbogha/';
if (!s3AccessKeyId ||
!s3SecretAccessKey ||
!s3Bucket ||
@ -196641,8 +196641,16 @@ const getS3Provider = () => {
secretAccessKey: s3SecretAccessKey
}
});
const getS3Key = (hash, tag) => {
const key = getCacheKey(hash, tag);
if (s3Prefix) {
return `${s3Prefix}${key}`;
}
return key;
};
const save = async (ctx, hash, tag, stream) => {
const objectKey = getCacheKey(hash, tag);
const objectKey = getS3Key(hash, tag);
console.log({ objectKey, s3Prefix });
try {
// Use the S3 Upload utility which handles multipart uploads for large files
const upload = new lib_storage_dist_cjs.Upload({
@ -196664,7 +196672,7 @@ const getS3Provider = () => {
};
const get = async (ctx, hash) => {
// First try to get with just the hash
const objectKey = getCacheKey(hash);
const objectKey = getS3Key(hash);
try {
// Try to find the object
const listCommand = new dist_cjs.ListObjectsV2Command({
@ -196716,11 +196724,11 @@ const getS3Provider = () => {
return null;
}
};
const deleteObj = async (hash) => {
const deleteObj = async (key) => {
try {
const deleteCommand = new dist_cjs.DeleteObjectCommand({
Bucket: s3Bucket,
Key: getCacheKey(hash)
Key: key
});
await s3Client.send(deleteCommand);
}
@ -196780,7 +196788,10 @@ const getS3Provider = () => {
const getProvider = () => {
const provider = core.getInput('provider');
const provider = core.getInput('provider') || process.env.PROVIDER;
if (!provider) {
throw new Error('Provider is required');
}
if (provider === 'github') {
return getGithubProvider();
}
@ -196790,22 +196801,107 @@ const getProvider = () => {
throw new Error(`Provider ${provider} not supported`);
};
// EXTERNAL MODULE: ./node_modules/wait-on/lib/wait-on.js
var wait_on = __nccwpck_require__(1503);
var wait_on_default = /*#__PURE__*/__nccwpck_require__.n(wait_on);
// EXTERNAL MODULE: external "fs"
var external_fs_ = __nccwpck_require__(79896);
// EXTERNAL MODULE: external "child_process"
var external_child_process_ = __nccwpck_require__(35317);
;// CONCATENATED MODULE: ./src/lib/server/utils.ts
const waitForServer = async () => {
await wait_on_default()({
resources: [`http-get://localhost:${constants_serverPort}`],
timeout: 5000
});
};
const exportVariable = (name, value) => {
core.exportVariable(name, value);
core.info(` ${name}=${value}`);
};
async function launchServer(devRun) {
if (!devRun) {
//* Launch a detached child process to run the server
// See: https://nodejs.org/docs/latest-v16.x/api/child_process.html#optionsdetached
const out = (0,external_fs_.openSync)(serverLogFile, 'a');
const err = (0,external_fs_.openSync)(serverLogFile, 'a');
const child = (0,external_child_process_.spawn)(process.argv[0], [process.argv[1], '--server'], {
detached: true,
stdio: ['ignore', out, err]
});
child.unref();
core.info(`Cache version: ${cachePath}`);
core.info(`Cache prefix: ${cachePrefix}`);
core.info(`Launched child process: ${child.pid}`);
core.info(`Server log file: ${serverLogFile}`);
}
//* Wait for server
await waitForServer();
core.info(`Server is now up and running.`);
//* Export the environment variables for Turbo
if (devRun) {
console.log('Execute:');
console.log(`export TURBOGHA_PORT=${constants_serverPort}`);
console.log(`export TURBO_API=http://localhost:${constants_serverPort}`);
console.log(`export TURBO_TOKEN=turbogha`);
console.log(`export TURBO_TEAM=turbogha`);
}
else {
core.info('The following environment variables are exported:');
exportVariable('TURBOGHA_PORT', `${constants_serverPort}`);
exportVariable('TURBO_API', `http://localhost:${constants_serverPort}`);
exportVariable('TURBO_TOKEN', 'turbogha');
exportVariable('TURBO_TEAM', 'turbogha');
}
}
async function killServer() {
//* Kill the server
await fetch(`http://localhost:${serverPort}/shutdown`, {
method: 'DELETE'
});
}
const parseFileSize = (size) => {
const units = {
b: 1,
kb: 1024,
mb: 1024 * 1024,
gb: 1024 * 1024 * 1024,
tb: 1024 * 1024 * 1024 * 1024
};
const match = size.toLowerCase().match(/^(\d+)\s*([a-z]+)$/);
if (!match) {
throw new Error(`Invalid file size format: ${size}`);
}
const [, value, unit] = match;
const multiplier = units[unit];
if (!multiplier) {
throw new Error(`Invalid file size unit: ${unit}`);
}
return parseInt(value) * multiplier;
};
;// CONCATENATED MODULE: ./src/lib/server/cleanup.ts
async function cleanup(ctx) {
const maxAge = core.getInput('max-age');
const maxFiles = core.getInput('max-files');
const maxSize = core.getInput('max-size');
const maxAge = core.getInput('max-age') || process.env.MAX_AGE;
const maxFiles = core.getInput('max-files') || process.env.MAX_FILES;
const maxSize = core.getInput('max-size') || process.env.MAX_SIZE;
if (!maxAge && !maxFiles && !maxSize) {
ctx.log.info('No cleanup options provided, skipping cleanup');
return;
}
const { maxAgeParsed, maxFilesParsed, maxSizeParsed } = {
maxAgeParsed: parse(maxAge),
maxFilesParsed: parseInt(maxFiles),
maxSizeParsed: parseInt(maxSize)
maxAgeParsed: maxAge ? parse(maxAge) : undefined,
maxFilesParsed: maxFiles ? parseInt(maxFiles) : undefined,
maxSizeParsed: maxSize ? parseFileSize(maxSize) : undefined
};
if (maxAge && !maxAgeParsed) {
core.error('Invalid max-age provided');
@ -196821,20 +196917,20 @@ async function cleanup(ctx) {
}
const provider = getProvider();
const files = await provider.list();
ctx.log.info(`Found ${files.length} files in cache`);
core.info(JSON.stringify(files, null, 2));
const fileToDelete = [];
if (maxAgeParsed) {
const now = new Date();
const age = new Date(now.getTime() - maxAgeParsed);
fileToDelete.push(...files.filter(file => new Date(file.createdAt) < age));
fileToDelete.push(...files
.filter(file => new Date(file.createdAt) < age)
.map(file => ({ ...file, reason: 'max-age' })));
}
if (maxFilesParsed && files.length > maxFilesParsed) {
const sortedByDate = [...files].sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
const excessFiles = sortedByDate.slice(0, files.length - maxFilesParsed);
excessFiles.forEach(file => {
if (!fileToDelete.some(f => f.path === file.path)) {
fileToDelete.push(file);
fileToDelete.push({ ...file, reason: 'max-files' });
}
});
}
@ -196846,14 +196942,14 @@ async function cleanup(ctx) {
if (totalSize <= maxSizeParsed)
break;
if (!fileToDelete.some(f => f.path === file.path)) {
fileToDelete.push(file);
fileToDelete.push({ ...file, reason: 'max-size' });
totalSize -= file.size;
}
}
}
}
if (fileToDelete.length > 0) {
ctx.log.info(`Cleaning up ${fileToDelete.length} files`);
ctx.log.info(`Cleaning up ${fileToDelete.length} files (${fileToDelete.map(f => `${f.path} (${f.reason})`)})`);
for (const file of fileToDelete) {
try {
await provider.delete(file.path);
@ -196934,71 +197030,6 @@ async function server() {
await fastify.listen({ port: constants_serverPort });
}
// EXTERNAL MODULE: ./node_modules/wait-on/lib/wait-on.js
var wait_on = __nccwpck_require__(1503);
var wait_on_default = /*#__PURE__*/__nccwpck_require__.n(wait_on);
// EXTERNAL MODULE: external "fs"
var external_fs_ = __nccwpck_require__(79896);
// EXTERNAL MODULE: external "child_process"
var external_child_process_ = __nccwpck_require__(35317);
;// CONCATENATED MODULE: ./src/lib/server/utils.ts
const waitForServer = async () => {
await wait_on_default()({
resources: [`http-get://localhost:${constants_serverPort}`],
timeout: 5000
});
};
const exportVariable = (name, value) => {
core.exportVariable(name, value);
core.info(` ${name}=${value}`);
};
async function launchServer(devRun) {
if (!devRun) {
//* Launch a detached child process to run the server
// See: https://nodejs.org/docs/latest-v16.x/api/child_process.html#optionsdetached
const out = (0,external_fs_.openSync)(serverLogFile, 'a');
const err = (0,external_fs_.openSync)(serverLogFile, 'a');
const child = (0,external_child_process_.spawn)(process.argv[0], [process.argv[1], '--server'], {
detached: true,
stdio: ['ignore', out, err]
});
child.unref();
core.info(`Cache version: ${cachePath}`);
core.info(`Cache prefix: ${cachePrefix}`);
core.info(`Launched child process: ${child.pid}`);
core.info(`Server log file: ${serverLogFile}`);
}
//* Wait for server
await waitForServer();
core.info(`Server is now up and running.`);
//* Export the environment variables for Turbo
if (devRun) {
console.log('Execute:');
console.log(`export TURBOGHA_PORT=${constants_serverPort}`);
console.log(`export TURBO_API=http://localhost:${constants_serverPort}`);
console.log(`export TURBO_TOKEN=turbogha`);
console.log(`export TURBO_TEAM=turbogha`);
}
else {
core.info('The following environment variables are exported:');
exportVariable('TURBOGHA_PORT', `${constants_serverPort}`);
exportVariable('TURBO_API', `http://localhost:${constants_serverPort}`);
exportVariable('TURBO_TOKEN', 'turbogha');
exportVariable('TURBO_TEAM', 'turbogha');
}
}
async function killServer() {
//* Kill the server
await fetch(`http://localhost:${serverPort}/shutdown`, {
method: 'DELETE'
});
}
;// CONCATENATED MODULE: ./src/main.ts

2
dist/setup/index.js.map generated vendored

File diff suppressed because one or more lines are too long

25
package-lock.json generated
View file

@ -11,9 +11,10 @@
"dependencies": {
"@actions/cache": "^4.0.0",
"@actions/core": "^1.10.1",
"@aws-sdk/client-s3": "^3.782.0",
"@aws-sdk/lib-storage": "^3.782.0",
"@aws-sdk/client-s3": "^3.0.0",
"@aws-sdk/lib-storage": "^3.0.0",
"fastify": "^5.0.0",
"filesize-parser": "^1.5.1",
"parse-duration": "^2.1.4",
"stream-to-promise": "^3.0.0",
"wait-on": "^8.0.0"
@ -27,6 +28,7 @@
"@typescript-eslint/eslint-plugin": "^8.29.1",
"@typescript-eslint/parser": "^8.29.1",
"@vercel/ncc": "^0.38.1",
"dotenv": "^16.4.7",
"eslint": "^9.24.0",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-jest": "^28.6.0",
@ -5710,6 +5712,19 @@
"node": ">=6.0.0"
}
},
"node_modules/dotenv": {
"version": "16.4.7",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz",
"integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==",
"dev": true,
"license": "BSD-2-Clause",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://dotenvx.com"
}
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
@ -7030,6 +7045,12 @@
"node": ">=10"
}
},
"node_modules/filesize-parser": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/filesize-parser/-/filesize-parser-1.5.1.tgz",
"integrity": "sha512-wRjdlQ5JM3WHZp6xpakIHQbkcGig8ANglYQDPcQSgZUN5kcDGOgmAwB0396BxzHxcl+kr+GLuusxBnsjdO6x9A==",
"license": "MIT"
},
"node_modules/fill-range": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",

View file

@ -32,7 +32,8 @@
"all": "npm run format:write && npm run lint && npm run coverage && npm run package",
"test-build": "npm run -s package",
"test": "turbo run test-build",
"dev-run": "tsx ./src/dev-run.ts"
"dev-run": "tsx ./src/dev-run.ts",
"cleanup": "tsx ./src/dev/cleanup.ts"
},
"license": "MIT",
"dependencies": {
@ -41,6 +42,7 @@
"@aws-sdk/client-s3": "^3.0.0",
"@aws-sdk/lib-storage": "^3.0.0",
"fastify": "^5.0.0",
"filesize-parser": "^1.5.1",
"parse-duration": "^2.1.4",
"stream-to-promise": "^3.0.0",
"wait-on": "^8.0.0"
@ -54,6 +56,7 @@
"@typescript-eslint/eslint-plugin": "^8.29.1",
"@typescript-eslint/parser": "^8.29.1",
"@vercel/ncc": "^0.38.1",
"dotenv": "^16.4.7",
"eslint": "^9.24.0",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-jest": "^28.6.0",

View file

@ -1,5 +1,8 @@
// Run the server in foreground and kill it after the test
import { config } from 'dotenv'
config()
import { server } from './lib/server'
import { launchServer } from './lib/server/utils'

14
src/dev/cleanup.ts Normal file
View file

@ -0,0 +1,14 @@
import { config } from 'dotenv'
config()
import { cleanup } from 'src/lib/server/cleanup'
const main = async () => {
await cleanup({
log: {
info: console.log
}
})
}
main()

View file

@ -17,12 +17,17 @@ export type TProvider = {
) => Promise<
[number | undefined, Readable | ReadableStream, string | undefined] | null
>
delete: (hash: string) => Promise<void>
delete: (key: string) => Promise<void>
list: () => Promise<TListFile[]>
}
export const getProvider = (): TProvider => {
const provider = core.getInput('provider')
const provider = core.getInput('provider') || process.env.PROVIDER
if (!provider) {
throw new Error('Provider is required')
}
if (provider === 'github') {
return getGithubProvider()
}

View file

@ -13,12 +13,15 @@ import { Upload } from '@aws-sdk/lib-storage'
import { getCacheKey } from 'src/lib/constants'
export const getS3Provider = (): TProvider => {
const s3AccessKeyId = core.getInput('s3-access-key-id')
const s3SecretAccessKey = core.getInput('s3-secret-access-key')
const s3Bucket = core.getInput('s3-bucket')
const s3Region = core.getInput('s3-region')
const s3Endpoint = core.getInput('s3-endpoint')
const s3Prefix = core.getInput('s3-prefix') || 'turbogha/'
const s3AccessKeyId =
core.getInput('s3-access-key-id') || process.env.S3_ACCESS_KEY_ID
const s3SecretAccessKey =
core.getInput('s3-secret-access-key') || process.env.S3_SECRET_ACCESS_KEY
const s3Bucket = core.getInput('s3-bucket') || process.env.S3_BUCKET
const s3Region = core.getInput('s3-region') || process.env.S3_REGION
const s3Endpoint = core.getInput('s3-endpoint') || process.env.S3_ENDPOINT
const s3Prefix =
core.getInput('s3-prefix') || process.env.S3_PREFIX || 'turbogha/'
if (
!s3AccessKeyId ||
@ -41,13 +44,22 @@ export const getS3Provider = (): TProvider => {
}
})
const getS3Key = (hash: string, tag?: string) => {
const key = getCacheKey(hash, tag)
if (s3Prefix) {
return `${s3Prefix}${key}`
}
return key
}
const save = async (
ctx: RequestContext,
hash: string,
tag: string,
stream: Readable
): Promise<void> => {
const objectKey = getCacheKey(hash, tag)
const objectKey = getS3Key(hash, tag)
console.log({ objectKey, s3Prefix })
try {
// Use the S3 Upload utility which handles multipart uploads for large files
@ -76,7 +88,7 @@ export const getS3Provider = (): TProvider => {
[number | undefined, Readable | ReadableStream, string | undefined] | null
> => {
// First try to get with just the hash
const objectKey = getCacheKey(hash)
const objectKey = getS3Key(hash)
try {
// Try to find the object
@ -143,11 +155,11 @@ export const getS3Provider = (): TProvider => {
}
}
const deleteObj = async (hash: string): Promise<void> => {
const deleteObj = async (key: string): Promise<void> => {
try {
const deleteCommand = new DeleteObjectCommand({
Bucket: s3Bucket,
Key: getCacheKey(hash)
Key: key
})
await s3Client.send(deleteCommand)

View file

@ -2,6 +2,7 @@ import * as core from '@actions/core'
import { RequestContext } from '.'
import parse from 'parse-duration'
import { getProvider } from '../providers'
import { parseFileSize } from './utils'
export type TListFile = {
path: string
@ -10,9 +11,9 @@ export type TListFile = {
}
export async function cleanup(ctx: RequestContext) {
const maxAge = core.getInput('max-age')
const maxFiles = core.getInput('max-files')
const maxSize = core.getInput('max-size')
const maxAge = core.getInput('max-age') || process.env.MAX_AGE
const maxFiles = core.getInput('max-files') || process.env.MAX_FILES
const maxSize = core.getInput('max-size') || process.env.MAX_SIZE
if (!maxAge && !maxFiles && !maxSize) {
ctx.log.info('No cleanup options provided, skipping cleanup')
@ -20,9 +21,9 @@ export async function cleanup(ctx: RequestContext) {
}
const { maxAgeParsed, maxFilesParsed, maxSizeParsed } = {
maxAgeParsed: parse(maxAge),
maxFilesParsed: parseInt(maxFiles),
maxSizeParsed: parseInt(maxSize)
maxAgeParsed: maxAge ? parse(maxAge) : undefined,
maxFilesParsed: maxFiles ? parseInt(maxFiles) : undefined,
maxSizeParsed: maxSize ? parseFileSize(maxSize) : undefined
}
if (maxAge && !maxAgeParsed) {
@ -44,14 +45,17 @@ export async function cleanup(ctx: RequestContext) {
const files = await provider.list()
ctx.log.info(`Found ${files.length} files in cache`)
core.info(JSON.stringify(files, null, 2))
const fileToDelete: TListFile[] = []
const fileToDelete: (TListFile & {
reason: 'max-age' | 'max-files' | 'max-size'
})[] = []
if (maxAgeParsed) {
const now = new Date()
const age = new Date(now.getTime() - maxAgeParsed)
fileToDelete.push(...files.filter(file => new Date(file.createdAt) < age))
fileToDelete.push(
...files
.filter(file => new Date(file.createdAt) < age)
.map(file => ({ ...file, reason: 'max-age' as const }))
)
}
if (maxFilesParsed && files.length > maxFilesParsed) {
@ -62,7 +66,7 @@ export async function cleanup(ctx: RequestContext) {
const excessFiles = sortedByDate.slice(0, files.length - maxFilesParsed)
excessFiles.forEach(file => {
if (!fileToDelete.some(f => f.path === file.path)) {
fileToDelete.push(file)
fileToDelete.push({ ...file, reason: 'max-files' })
}
})
}
@ -80,7 +84,7 @@ export async function cleanup(ctx: RequestContext) {
if (totalSize <= maxSizeParsed) break
if (!fileToDelete.some(f => f.path === file.path)) {
fileToDelete.push(file)
fileToDelete.push({ ...file, reason: 'max-size' })
totalSize -= file.size
}
}
@ -88,8 +92,11 @@ export async function cleanup(ctx: RequestContext) {
}
if (fileToDelete.length > 0) {
ctx.log.info(`Cleaning up ${fileToDelete.length} files`)
ctx.log.info(
`Cleaning up ${fileToDelete.length} files (${fileToDelete.map(
f => `${f.path} (${f.reason})`
)})`
)
for (const file of fileToDelete) {
try {
await provider.delete(file.path)

View file

@ -59,3 +59,27 @@ export async function killServer() {
method: 'DELETE'
})
}
export const parseFileSize = (size: string): number => {
const units: { [key: string]: number } = {
b: 1,
kb: 1024,
mb: 1024 * 1024,
gb: 1024 * 1024 * 1024,
tb: 1024 * 1024 * 1024 * 1024
}
const match = size.toLowerCase().match(/^(\d+)\s*([a-z]+)$/)
if (!match) {
throw new Error(`Invalid file size format: ${size}`)
}
const [, value, unit] = match
const multiplier = units[unit]
if (!multiplier) {
throw new Error(`Invalid file size unit: ${unit}`)
}
return parseInt(value) * multiplier
}