Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,10 @@ jobs:
- name: Test
run: npm run test

- name: Test (Publish)
if: matrix.node-version != '20'
run: npx vitest run --project publish

- name: Test (Integration)
if: matrix.operating-system == 'ubuntu-latest'
run: npm run test:integration
Expand Down
9 changes: 0 additions & 9 deletions Gruntfile.js

This file was deleted.

392 changes: 197 additions & 195 deletions package-lock.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
"test:browser": "vitest run --project browser",
"test:unit": "vitest run --project node",
"test:tasks": "vitest run --project tasks",
"test:publish": "npm run build && vitest run --project publish",
"test:browser-smoke": "playwright test --config tests/browser/playwright.config.js",
"test:serve": "npx serve -l 9999 .",
"test:integration": "npm run build && ./tests/integration/run-integration-tests.sh",
Expand Down
59 changes: 38 additions & 21 deletions tasks/publish-to-aws.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,13 @@ const { S3, PutObjectCommand } = require('@aws-sdk/client-s3');
const git = require('./util/git');
const semver = require('semver');

const PUBLISHED_FILES = [
'handlebars.js',
'handlebars.min.js',
'handlebars.runtime.js',
'handlebars.runtime.min.js',
];

let s3Client;

async function main() {
Expand All @@ -13,24 +20,28 @@ async function main() {
const commitInfo = await git.commitInfo();
console.log('tag: ', commitInfo.tagName);

const suffixes = buildSuffixes(commitInfo);

if (suffixes.length > 0) {
validateS3Env();
console.log('publishing file-suffixes: ' + JSON.stringify(suffixes));
await publish(suffixes);
}
}

function buildSuffixes(commitInfo) {
const suffixes = [];

// Publish the master as "latest" and with the commit-id
if (commitInfo.isMaster) {
suffixes.push('-latest');
suffixes.push('-' + commitInfo.headSha);
}

// Publish tags by their tag-name
if (commitInfo.tagName != null && semver.valid(commitInfo.tagName)) {
suffixes.push('-' + commitInfo.tagName);
}

if (suffixes.length > 0) {
validateS3Env();
console.log('publishing file-suffixes: ' + JSON.stringify(suffixes));
await publish(suffixes);
}
return suffixes;
}

function validateS3Env() {
Expand All @@ -44,33 +55,30 @@ function validateS3Env() {
}
}

async function publish(suffixes) {
const publishPromises = suffixes.map((suffix) => publishSuffix(suffix));
async function publish(suffixes, overrides) {
const publishPromises = suffixes.map((suffix) =>
publishSuffix(suffix, overrides)
);
return Promise.all(publishPromises);
}

async function publishSuffix(suffix) {
const filenames = [
'handlebars.js',
'handlebars.min.js',
'handlebars.runtime.js',
'handlebars.runtime.min.js',
];
const publishPromises = filenames.map(async (filename) => {
async function publishSuffix(suffix, overrides) {
const publishPromises = PUBLISHED_FILES.map(async (filename) => {
const nameInBucket = getNameInBucket(filename, suffix);
const localFile = getLocalFile(filename);
await uploadToBucket(localFile, nameInBucket);
await uploadToBucket(localFile, nameInBucket, overrides);
console.log(`Published ${localFile} to build server (${nameInBucket})`);
});
return Promise.all(publishPromises);
}

async function uploadToBucket(localFile, nameInBucket) {
const s3 = getS3Client();
async function uploadToBucket(localFile, nameInBucket, overrides) {
const s3 = overrides?.s3Client ?? getS3Client();
const bucket = overrides?.bucket ?? process.env.S3_BUCKET_NAME;

return s3.send(
new PutObjectCommand({
Bucket: process.env.S3_BUCKET_NAME,
Bucket: bucket,
Key: nameInBucket,
Body: fs.readFileSync(localFile, 'utf8'),
})
Expand Down Expand Up @@ -98,6 +106,15 @@ function getLocalFile(filename) {
return 'dist/' + filename;
}

module.exports = {
PUBLISHED_FILES,
buildSuffixes,
validateS3Env,
publish,
getNameInBucket,
getLocalFile,
};

if (require.main === module) {
main().catch((err) => {
console.error(err);
Expand Down
81 changes: 81 additions & 0 deletions tasks/tests/fake-s3.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
const http = require('http');

/**
* Minimal in-memory S3-compatible server for testing.
* Supports PutObject and GetObject via path-style requests.
*/
function createFakeS3() {
const buckets = new Map();

const server = http.createServer((req, res) => {
const { bucket, key } = parsePath(req.url);

if (!bucket || !buckets.has(bucket)) {
res.writeHead(404, { 'Content-Type': 'application/xml' });
res.end('<Error><Code>NoSuchBucket</Code></Error>');
return;
}

const store = buckets.get(bucket);

if (req.method === 'PUT' && key) {
const chunks = [];
req.on('data', (chunk) => chunks.push(chunk));
req.on('end', () => {
store.set(key, Buffer.concat(chunks));
res.writeHead(200);
res.end();
});
} else if ((req.method === 'GET' || req.method === 'HEAD') && key) {
if (!store.has(key)) {
res.writeHead(404, { 'Content-Type': 'application/xml' });
res.end('<Error><Code>NoSuchKey</Code></Error>');
return;
}
const body = store.get(key);
res.writeHead(200, {
'Content-Length': body.length,
'Content-Type': 'application/octet-stream',
});
res.end(req.method === 'HEAD' ? undefined : body);
} else {
res.writeHead(405);
res.end();
}
});

return {
start() {
return new Promise((resolve) => {
server.listen(0, '127.0.0.1', () => {
const { port } = server.address();
resolve({
address: `http://127.0.0.1:${port}`,
createBucket(name) {
if (!buckets.has(name)) buckets.set(name, new Map());
},
reset() {
for (const store of buckets.values()) store.clear();
},
stop() {
return new Promise((r) => server.close(r));
},
});
});
});
},
};
}

function parsePath(url) {
// Path-style: /<bucket>/<key>
const parts = new URL(url, 'http://localhost').pathname
.replace(/^\//, '')
.split('/');
return {
bucket: parts[0] || null,
key: parts.slice(1).join('/') || null,
};
}

module.exports = { createFakeS3 };
Loading