hydra/scripts/upload-build.cjs

62 lines
1.6 KiB
JavaScript
Raw Normal View History

2024-12-10 01:56:49 +03:00
const fs = require("node:fs");
const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
const path = require("node:path");
const packageJson = require("../package.json");
2024-12-10 02:12:10 +03:00
if (!process.env.BUILD_WEBHOOK_URL) {
console.log("No BUILD_WEBHOOK_URL provided, skipping upload");
process.exit(0);
}
2024-12-10 01:56:49 +03:00
const s3 = new S3Client({
region: "auto",
endpoint: process.env.S3_ENDPOINT,
forcePathStyle: true,
credentials: {
2024-12-10 02:04:37 +03:00
accessKeyId: process.env.S3_ACCESS_KEY_ID,
2024-12-10 01:56:49 +03:00
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY,
},
});
2024-12-10 01:59:40 +03:00
const dist = path.resolve(__dirname, "..", "resources");
2024-12-10 01:56:49 +03:00
2024-12-10 01:59:40 +03:00
const extensionsToUpload = [".deb", ".exe", ".png"];
2024-12-10 01:56:49 +03:00
fs.readdir(dist, async (err, files) => {
if (err) throw err;
2024-12-10 02:12:10 +03:00
const uploads = await Promise.all(
2024-12-10 01:56:49 +03:00
files.map(async (file) => {
if (extensionsToUpload.includes(path.extname(file))) {
const fileName = `${new Date().getTime()}-${file}`;
const command = new PutObjectCommand({
Bucket: process.env.S3_BUILDS_BUCKET_NAME,
Key: fileName,
Body: fs.createReadStream(path.resolve(dist, file)),
});
await s3.send(command);
return {
url: `${process.env.S3_ENDPOINT}/${process.env.S3_BUILDS_BUCKET_NAME}/${fileName}`,
name: fileName,
};
}
})
);
await fetch(process.env.BUILD_WEBHOOK_URL, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
2024-12-10 02:12:10 +03:00
uploads,
2024-12-10 01:56:49 +03:00
branchName: process.env.BRANCH_NAME,
version: packageJson.version,
2024-12-10 02:14:21 +03:00
githubActor: process.env.GITHUB_ACTOR,
2024-12-10 01:56:49 +03:00
}),
});
});