hydra/scripts/upload-build.cjs

64 lines
1.7 KiB
JavaScript
Raw Normal View History

2024-12-10 01:56:49 +03:00
const fs = require("node:fs");
const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
const path = require("node:path");
const packageJson = require("../package.json");
2024-12-10 02:12:10 +03:00
if (!process.env.BUILD_WEBHOOK_URL) {
console.log("No BUILD_WEBHOOK_URL provided, skipping upload");
process.exit(0);
}
2024-12-10 01:56:49 +03:00
const s3 = new S3Client({
region: "auto",
endpoint: process.env.S3_ENDPOINT,
forcePathStyle: true,
credentials: {
2024-12-10 02:04:37 +03:00
accessKeyId: process.env.S3_ACCESS_KEY_ID,
2024-12-10 01:56:49 +03:00
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY,
},
});
2024-12-10 01:59:40 +03:00
const dist = path.resolve(__dirname, "..", "resources");
2024-12-10 01:56:49 +03:00
2024-12-10 01:59:40 +03:00
const extensionsToUpload = [".deb", ".exe", ".png"];
2024-12-10 01:56:49 +03:00
fs.readdir(dist, async (err, files) => {
if (err) throw err;
2024-12-10 02:12:10 +03:00
const uploads = await Promise.all(
2024-12-10 03:03:14 +03:00
files
.filter((file) => extensionsToUpload.includes(path.extname(file)))
.map(async (file) => {
2024-12-10 01:56:49 +03:00
const fileName = `${new Date().getTime()}-${file}`;
const command = new PutObjectCommand({
Bucket: process.env.S3_BUILDS_BUCKET_NAME,
Key: fileName,
Body: fs.createReadStream(path.resolve(dist, file)),
});
await s3.send(command);
return {
url: `${process.env.S3_ENDPOINT}/${process.env.S3_BUILDS_BUCKET_NAME}/${fileName}`,
name: fileName,
};
2024-12-10 03:03:14 +03:00
})
2024-12-10 01:56:49 +03:00
);
2024-12-10 03:03:14 +03:00
if (uploads.length > 0) {
await fetch(process.env.BUILD_WEBHOOK_URL, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
uploads,
branchName: process.env.BRANCH_NAME,
version: packageJson.version,
githubActor: process.env.GITHUB_ACTOR,
}),
});
}
2024-12-10 01:56:49 +03:00
});