mirror of
https://github.com/serverless/serverless.git
synced 2026-01-18 14:58:43 +00:00
Update awsDeploy plugin to use new .zip package artifact
This commit is contained in:
parent
b74fa46563
commit
6b32fdc291
@ -77,7 +77,7 @@ Here are the steps the AWS plugins take to compile and deploy the service on the
|
||||
resources and stored into memory (`deploy:compileFunctions`)
|
||||
5. Each functions events are compiled into CloudFormation resources and stored into memory (`deploy:compileEvents`)
|
||||
6. Old functions (if available) are removed from the S3 bucket (`deploy:deploy`)
|
||||
7. The function code gets zipped up and is uploaded to S3 (`deploy:deploy`)
|
||||
7. The service gets zipped up and is uploaded to S3 (`deploy:createDeploymentPackage` and `deploy:deploy`)
|
||||
8. The compiled function and event resources are attached to the core CloudFormation template and the updated
|
||||
CloudFormation template gets redeployed (`deploy:deploy`)
|
||||
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
const _ = require('lodash');
|
||||
const path = require('path');
|
||||
|
||||
class AwsCompileFunctions {
|
||||
constructor(serverless, options) {
|
||||
@ -45,7 +46,7 @@ class AwsCompileFunctions {
|
||||
.S3Bucket =
|
||||
`${this.serverless.service.service}-${this.options.stage}-${this.options.region}`;
|
||||
newFunction.Properties.Code
|
||||
.S3Key = ''; // will be replaced in a further step
|
||||
.S3Key = this.serverless.service.package.artifact.split(path.sep).pop();
|
||||
|
||||
if (!functionObject.handler) {
|
||||
const errorMessage = [
|
||||
|
||||
@ -40,7 +40,7 @@ describe('AwsCompileFunctions', () => {
|
||||
Properties: {
|
||||
Code: {
|
||||
S3Bucket: 'new-service-dev-us-east-1',
|
||||
S3Key: '',
|
||||
S3Key: 'artifact.zip',
|
||||
},
|
||||
FunctionName: 'new-service-dev-first',
|
||||
Handler: 'first.function.handler',
|
||||
@ -55,7 +55,7 @@ describe('AwsCompileFunctions', () => {
|
||||
Properties: {
|
||||
Code: {
|
||||
S3Bucket: 'new-service-dev-us-east-1',
|
||||
S3Key: '',
|
||||
S3Key: 'artifact.zip',
|
||||
},
|
||||
FunctionName: 'new-service-dev-second',
|
||||
Handler: 'second.function.handler',
|
||||
@ -78,6 +78,7 @@ describe('AwsCompileFunctions', () => {
|
||||
serverless.service.resources = { Resources: {} };
|
||||
awsCompileFunctions.serverless.service.functions = functionsObjectMock;
|
||||
awsCompileFunctions.serverless.service.service = 'new-service';
|
||||
awsCompileFunctions.serverless.service.package.artifact = 'artifact.zip';
|
||||
});
|
||||
|
||||
describe('#constructor()', () => {
|
||||
|
||||
@ -4,6 +4,7 @@ const BbPromise = require('bluebird');
|
||||
const validate = require('./lib/validate');
|
||||
const initializeResources = require('./lib/initializeResources');
|
||||
const createStack = require('./lib/createStack');
|
||||
const uploadDeploymentPackage = require('./lib/uploadDeploymentPackage');
|
||||
const deployFunctions = require('./lib/deployFunctions');
|
||||
const updateStack = require('./lib/updateStack');
|
||||
|
||||
@ -21,6 +22,7 @@ class AwsDeploy {
|
||||
validate,
|
||||
initializeResources,
|
||||
createStack,
|
||||
uploadDeploymentPackage,
|
||||
deployFunctions,
|
||||
updateStack
|
||||
);
|
||||
@ -34,6 +36,7 @@ class AwsDeploy {
|
||||
'deploy:createProviderStacks': () => BbPromise.bind(this).then(this.createStack),
|
||||
|
||||
'deploy:deploy': () => BbPromise.bind(this)
|
||||
.then(this.uploadDeploymentPackage)
|
||||
.then(this.deployFunctions)
|
||||
.then(this.updateStack)
|
||||
.then(() => this.serverless.cli.log('Deployment successful!')),
|
||||
|
||||
@ -1,12 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const BbPromise = require('bluebird');
|
||||
const forEach = require('lodash').forEach;
|
||||
const last = require('lodash').last;
|
||||
const union = require('lodash').union;
|
||||
const Zip = require('node-zip');
|
||||
|
||||
module.exports = {
|
||||
extractFunctionHandlers() {
|
||||
@ -16,8 +11,6 @@ module.exports = {
|
||||
this.deployedFunctions.push({
|
||||
name: key,
|
||||
handler: value.handler,
|
||||
exclude: value.exclude,
|
||||
include: value.include,
|
||||
});
|
||||
}
|
||||
});
|
||||
@ -25,154 +18,8 @@ module.exports = {
|
||||
return BbPromise.resolve();
|
||||
},
|
||||
|
||||
zipFunctions() {
|
||||
this.serverless.cli.log('Zipping functions...');
|
||||
this.deployedFunctions.forEach((func, index) => {
|
||||
// create a new zip instance so that old functions won't slip into the new zip archive
|
||||
const zip = new Zip();
|
||||
|
||||
const servicePath = this.serverless.config.servicePath;
|
||||
|
||||
let exclude = func.exclude || [];
|
||||
|
||||
// add defaults for exclude
|
||||
exclude = union(exclude, [
|
||||
'.git',
|
||||
'.gitignore',
|
||||
'.DS_Store',
|
||||
'serverless.yaml',
|
||||
'serverless.env.yaml',
|
||||
]);
|
||||
|
||||
const include = func.include || [];
|
||||
|
||||
const handler = (last(func.handler.split('/'))).replace(/\\g/, '/');
|
||||
const handlerFullPath = path.join(servicePath, handler);
|
||||
const zipFileName = `${func.name}-${(new Date).getTime().toString()}.zip`;
|
||||
|
||||
if (!handlerFullPath.endsWith(func.handler)) {
|
||||
const errorMessage = [
|
||||
`The handler ${func.handler} was not found.`,
|
||||
' Please make sure you have this handler in your service at the referenced location.',
|
||||
' Please check the docs for more info',
|
||||
].join('');
|
||||
throw new this.serverless.classes.Error(errorMessage);
|
||||
}
|
||||
|
||||
const packageRoot = handlerFullPath.replace(func.handler, '');
|
||||
|
||||
this.serverless.utils.walkDirSync(packageRoot).forEach((filePath) => {
|
||||
const relativeFilePath = path.relative(packageRoot, filePath);
|
||||
|
||||
const shouldBeExcluded = exclude.some(sRegex => {
|
||||
const regex = new RegExp(sRegex);
|
||||
const matches = regex.exec(relativeFilePath);
|
||||
return matches && matches.length > 0;
|
||||
});
|
||||
|
||||
const shouldBeIncluded = include.some(sRegex => {
|
||||
const regex = new RegExp(sRegex);
|
||||
const matches = regex.exec(relativeFilePath);
|
||||
return matches && matches.length > 0;
|
||||
});
|
||||
|
||||
if (!shouldBeExcluded || shouldBeIncluded) {
|
||||
zip.file(relativeFilePath, fs.readFileSync(filePath));
|
||||
}
|
||||
});
|
||||
|
||||
const data = zip.generate({
|
||||
type: 'nodebuffer',
|
||||
compression: 'DEFLATE',
|
||||
platform: process.platform,
|
||||
});
|
||||
|
||||
this.deployedFunctions[index].zipFileData = data;
|
||||
this.deployedFunctions[index].zipFileKey = zipFileName;
|
||||
});
|
||||
|
||||
return BbPromise.resolve();
|
||||
},
|
||||
|
||||
getFunctionObjectsFromS3Bucket() {
|
||||
const bucketName =
|
||||
`${this.serverless.service.service}-${this.options.stage}-${this.options.region}`;
|
||||
|
||||
return this.sdk.request('S3',
|
||||
'listObjectsV2',
|
||||
{ Bucket: bucketName },
|
||||
this.options.stage,
|
||||
this.options.region)
|
||||
.then((result) => {
|
||||
if (result.Contents.length) {
|
||||
const fileNames = result.Contents.map((object) => object.Key);
|
||||
|
||||
const objectsToRemove = [];
|
||||
fileNames.forEach((fileName) => {
|
||||
const regex = new RegExp(/^(.+)-.+\.zip$/);
|
||||
|
||||
if (fileName.match(regex)) {
|
||||
objectsToRemove.push({ Key: fileName });
|
||||
}
|
||||
});
|
||||
|
||||
return BbPromise.resolve(objectsToRemove);
|
||||
}
|
||||
return BbPromise.resolve();
|
||||
});
|
||||
},
|
||||
|
||||
cleanupS3Bucket(objectsToRemove) {
|
||||
if (objectsToRemove && objectsToRemove.length) {
|
||||
this.serverless.cli.log('Removing old function versions...');
|
||||
|
||||
const bucketName =
|
||||
`${this.serverless.service.service}-${this.options.stage}-${this.options.region}`;
|
||||
|
||||
return this.sdk.request('S3',
|
||||
'deleteObjects',
|
||||
{
|
||||
Bucket: bucketName,
|
||||
Delete: { Objects: objectsToRemove },
|
||||
},
|
||||
this.options.stage,
|
||||
this.options.region);
|
||||
}
|
||||
|
||||
return BbPromise.resolve();
|
||||
},
|
||||
|
||||
uploadZipFilesToS3Bucket() {
|
||||
this.serverless.cli.log('Uploading zip files to S3...');
|
||||
const bucketName =
|
||||
`${this.serverless.service.service}-${this.options.stage}-${this.options.region}`;
|
||||
const uploadPromises = [];
|
||||
|
||||
this.deployedFunctions.forEach(func => {
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Key: func.zipFileKey,
|
||||
Body: func.zipFileData,
|
||||
};
|
||||
|
||||
const putObjectPromise = this.sdk.request('S3',
|
||||
'putObject',
|
||||
params,
|
||||
this.options.stage,
|
||||
this.options.region);
|
||||
|
||||
uploadPromises.push(putObjectPromise);
|
||||
});
|
||||
|
||||
return BbPromise.all(uploadPromises);
|
||||
},
|
||||
|
||||
deployFunctions() {
|
||||
return BbPromise.bind(this)
|
||||
.then(this.extractFunctionHandlers)
|
||||
.then(this.zipFunctions)
|
||||
.then(this.getFunctionObjectsFromS3Bucket)
|
||||
.then(this.cleanupS3Bucket)
|
||||
.then(this.uploadZipFilesToS3Bucket);
|
||||
.then(this.extractFunctionHandlers);
|
||||
},
|
||||
};
|
||||
|
||||
@ -2,25 +2,9 @@
|
||||
|
||||
const BbPromise = require('bluebird');
|
||||
const async = require('async');
|
||||
const _ = require('lodash');
|
||||
|
||||
module.exports = {
|
||||
update() {
|
||||
this.serverless.cli.log('Adding function resources to CF...');
|
||||
const serviceResources = this.serverless.service.resources;
|
||||
|
||||
_.forEach(serviceResources.Resources, (value, key) => {
|
||||
if (_.find(this.deployedFunctions, { name: key })) {
|
||||
const newValue = value;
|
||||
newValue.Properties.Code.S3Key =
|
||||
(_.find(this.deployedFunctions, { name: key }).zipFileKey);
|
||||
|
||||
_.merge(serviceResources.Resources, { [key]: newValue });
|
||||
}
|
||||
});
|
||||
|
||||
this.serverless.service.resources = serviceResources;
|
||||
|
||||
this.serverless.cli.log('Updating Stack...');
|
||||
const stackName = `${this.serverless.service.service}-${this.options.stage}`;
|
||||
const params = {
|
||||
|
||||
83
lib/plugins/aws/deploy/lib/uploadDeploymentPackage.js
Normal file
83
lib/plugins/aws/deploy/lib/uploadDeploymentPackage.js
Normal file
@ -0,0 +1,83 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const BbPromise = require('bluebird');
|
||||
|
||||
module.exports = {
|
||||
getServiceObjectsFromS3Bucket() {
|
||||
const bucketName =
|
||||
`${this.serverless.service.service}-${this.options.stage}-${this.options.region}`;
|
||||
|
||||
return this.sdk.request('S3',
|
||||
'listObjectsV2',
|
||||
{ Bucket: bucketName },
|
||||
this.options.stage,
|
||||
this.options.region)
|
||||
.then((result) => {
|
||||
if (result.Contents.length) {
|
||||
const fileNames = result.Contents.map((object) => object.Key);
|
||||
|
||||
const objectsToRemove = [];
|
||||
fileNames.forEach((fileName) => {
|
||||
const regex = new RegExp(/^(.+)-.+\.zip$/);
|
||||
|
||||
if (fileName.match(regex)) {
|
||||
objectsToRemove.push({ Key: fileName });
|
||||
}
|
||||
});
|
||||
|
||||
return BbPromise.resolve(objectsToRemove);
|
||||
}
|
||||
return BbPromise.resolve();
|
||||
});
|
||||
},
|
||||
|
||||
cleanupS3Bucket(objectsToRemove) {
|
||||
if (objectsToRemove && objectsToRemove.length) {
|
||||
this.serverless.cli.log('Removing old service versions...');
|
||||
|
||||
const bucketName =
|
||||
`${this.serverless.service.service}-${this.options.stage}-${this.options.region}`;
|
||||
|
||||
return this.sdk.request('S3',
|
||||
'deleteObjects',
|
||||
{
|
||||
Bucket: bucketName,
|
||||
Delete: { Objects: objectsToRemove },
|
||||
},
|
||||
this.options.stage,
|
||||
this.options.region);
|
||||
}
|
||||
|
||||
return BbPromise.resolve();
|
||||
},
|
||||
|
||||
uploadZipFileToS3Bucket() {
|
||||
this.serverless.cli.log('Uploading .zip file to S3...');
|
||||
|
||||
const bucketName =
|
||||
`${this.serverless.service.service}-${this.options.stage}-${this.options.region}`;
|
||||
|
||||
const body = fs.readFileSync(this.serverless.service.package.artifact);
|
||||
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Key: this.serverless.service.package.artifact.split(path.sep).pop(),
|
||||
Body: body,
|
||||
};
|
||||
|
||||
return this.sdk.request('S3',
|
||||
'putObject',
|
||||
params,
|
||||
this.options.stage,
|
||||
this.options.region);
|
||||
},
|
||||
|
||||
uploadDeploymentPackage() {
|
||||
return BbPromise.bind(this)
|
||||
.then(this.getServiceObjectsFromS3Bucket)
|
||||
.then(this.cleanupS3Bucket)
|
||||
.then(this.uploadZipFileToS3Bucket);
|
||||
},
|
||||
};
|
||||
@ -3,6 +3,7 @@
|
||||
require('./validate');
|
||||
require('./initializeResources');
|
||||
require('./createStack');
|
||||
require('./uploadDeploymentPackage');
|
||||
require('./deployFunctions');
|
||||
require('./updateStack');
|
||||
require('./index');
|
||||
|
||||
@ -1,99 +1,26 @@
|
||||
'use strict';
|
||||
|
||||
const expect = require('chai').expect;
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const AwsDeploy = require('../index');
|
||||
const Serverless = require('../../../../Serverless');
|
||||
const Zip = require('node-zip');
|
||||
const sinon = require('sinon');
|
||||
const BbPromise = require('bluebird');
|
||||
|
||||
describe('deployFunctions', () => {
|
||||
let serverless;
|
||||
let awsDeploy;
|
||||
let zip;
|
||||
|
||||
const functionsObjectMock = {
|
||||
name_template: 'name-template-name',
|
||||
first: {
|
||||
handler: 'first.function.handler',
|
||||
exclude: [
|
||||
'foo',
|
||||
'bar.js',
|
||||
],
|
||||
include: [
|
||||
'bar.js', // should be included even if it's excluded
|
||||
'includeme',
|
||||
],
|
||||
},
|
||||
second: {
|
||||
handler: 'second.function.handler',
|
||||
exclude: [
|
||||
'baz',
|
||||
'qux.js',
|
||||
],
|
||||
include: [
|
||||
'qux.js', // should be included even if it's excluded
|
||||
'includeme',
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const simpleDeployedFunctionsArrayMock = [
|
||||
{
|
||||
name: 'function',
|
||||
handler: 'function.handler',
|
||||
exclude: [
|
||||
'foo',
|
||||
'bar.js',
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const nestedDeployedFunctionsArrayMock = [
|
||||
{
|
||||
name: 'function',
|
||||
handler: 'nested/function.handler',
|
||||
exclude: [
|
||||
'foo',
|
||||
'bar.js',
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const includeExcludedFileDeployedFunctionsArrayMock = [
|
||||
{
|
||||
name: 'function',
|
||||
handler: 'function.handler',
|
||||
exclude: [
|
||||
'bar.js',
|
||||
],
|
||||
include: [
|
||||
'bar.js',
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const functionCodeMock = `
|
||||
'use strict';
|
||||
|
||||
module.exports.handler = function(event, context, cb) {
|
||||
return cb(null, {
|
||||
message: 'First function'
|
||||
});
|
||||
};
|
||||
`;
|
||||
|
||||
beforeEach(() => {
|
||||
serverless = new Serverless();
|
||||
zip = new Zip();
|
||||
const options = {
|
||||
stage: 'dev',
|
||||
region: 'us-east-1',
|
||||
};
|
||||
awsDeploy = new AwsDeploy(serverless, options);
|
||||
awsDeploy.serverless.cli = new serverless.classes.CLI();
|
||||
awsDeploy = new AwsDeploy(serverless);
|
||||
});
|
||||
|
||||
describe('#extractFunctionHandlers()', () => {
|
||||
@ -111,300 +38,5 @@ describe('deployFunctions', () => {
|
||||
).to.equal(functionsObjectMock.second.handler);
|
||||
})
|
||||
);
|
||||
|
||||
it('should extract the exclude array in the function definitions', () => awsDeploy
|
||||
.extractFunctionHandlers().then(() => {
|
||||
expect(
|
||||
awsDeploy.deployedFunctions[0].exclude
|
||||
).to.include('foo');
|
||||
expect(
|
||||
awsDeploy.deployedFunctions[0].exclude
|
||||
).to.include('bar.js');
|
||||
|
||||
expect(
|
||||
awsDeploy.deployedFunctions[1].exclude
|
||||
).to.include('baz');
|
||||
expect(
|
||||
awsDeploy.deployedFunctions[1].exclude
|
||||
).to.include('qux.js');
|
||||
})
|
||||
);
|
||||
|
||||
it('should extract the include array in the functions definitions', () => awsDeploy
|
||||
.extractFunctionHandlers().then(() => {
|
||||
expect(
|
||||
awsDeploy.deployedFunctions[0].include
|
||||
).to.include('bar.js');
|
||||
expect(
|
||||
awsDeploy.deployedFunctions[0].include
|
||||
).to.include('includeme');
|
||||
|
||||
expect(
|
||||
awsDeploy.deployedFunctions[1].include
|
||||
).to.include('qux.js');
|
||||
expect(
|
||||
awsDeploy.deployedFunctions[1].include
|
||||
).to.include('includeme');
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
describe('#zipFunctions()', () => {
|
||||
it('should zip a simple function', () => {
|
||||
awsDeploy.deployedFunctions = simpleDeployedFunctionsArrayMock;
|
||||
|
||||
const functionFileNameBase = 'function';
|
||||
|
||||
// create a function in a temporary directory
|
||||
const tmpDirPath = path.join(os.tmpdir(), (new Date).getTime().toString());
|
||||
const tmpFilePath = path.join(tmpDirPath, `${functionFileNameBase}.js`);
|
||||
serverless.utils.writeFileSync(tmpFilePath, functionCodeMock);
|
||||
|
||||
// set the servicePath
|
||||
serverless.config.servicePath = tmpDirPath;
|
||||
|
||||
return awsDeploy.zipFunctions().then(() => {
|
||||
expect(typeof awsDeploy.deployedFunctions[0].zipFileData).to.not.equal('undefined');
|
||||
|
||||
// look into the zippedFileData
|
||||
const unzippedFileData = zip.load(awsDeploy.deployedFunctions[0].zipFileData);
|
||||
|
||||
expect(unzippedFileData.files[`${functionFileNameBase}.js`].name)
|
||||
.to.equal(`${functionFileNameBase}.js`);
|
||||
expect(unzippedFileData.files[`${functionFileNameBase}.js`].dir).to.equal(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should zip nested code', () => {
|
||||
// set the deployedFunctions array
|
||||
awsDeploy.deployedFunctions = nestedDeployedFunctionsArrayMock;
|
||||
|
||||
const functionFileNameBase = 'function';
|
||||
|
||||
// create a function in a temporary directory --> nested/function.js
|
||||
const tmpDirPath = path.join(os.tmpdir(), (new Date).getTime().toString(), 'nested');
|
||||
const tmpFilePath = path.join(tmpDirPath, `${functionFileNameBase}.js`);
|
||||
|
||||
serverless.utils.writeFileSync(tmpFilePath, functionCodeMock);
|
||||
|
||||
// add a lib directory on the same level where the "nested" directory lives --> lib/some-file
|
||||
const libDirectory = path.join(tmpDirPath, '..', 'lib');
|
||||
serverless.utils.writeFileSync(path.join(libDirectory, 'some-file'), 'content');
|
||||
|
||||
// set the servicePath
|
||||
serverless.config.servicePath = tmpDirPath;
|
||||
|
||||
return awsDeploy.zipFunctions().then(() => {
|
||||
expect(typeof awsDeploy.deployedFunctions[0].zipFileData).to.not.equal('undefined');
|
||||
|
||||
// look into the zippedFileData
|
||||
const unzippedFileData = zip.load(awsDeploy.deployedFunctions[0].zipFileData);
|
||||
|
||||
expect(unzippedFileData.files[`nested/${functionFileNameBase}.js`].name)
|
||||
.to.equal(`nested/${functionFileNameBase}.js`);
|
||||
expect(unzippedFileData.files[`nested/${functionFileNameBase}.js`].dir)
|
||||
.to.equal(false);
|
||||
|
||||
expect(unzippedFileData.files['lib/some-file'].name)
|
||||
.to.equal('lib/some-file');
|
||||
expect(unzippedFileData.files['lib/some-file'].dir)
|
||||
.to.equal(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should exclude defined files and folders', () => {
|
||||
awsDeploy.deployedFunctions = simpleDeployedFunctionsArrayMock;
|
||||
|
||||
const functionFileNameBase = 'function';
|
||||
|
||||
// create a function in a temporary directory
|
||||
const tmpDirPath = path.join(os.tmpdir(), (new Date).getTime().toString());
|
||||
const tmpFilePath = path.join(tmpDirPath, `${functionFileNameBase}.js`);
|
||||
serverless.utils.writeFileSync(tmpFilePath, functionCodeMock);
|
||||
|
||||
// create a folder with the name "foo" which also includes a file --> foo/baz.txt
|
||||
serverless.utils.writeFileSync(path.join(tmpDirPath, 'foo', 'baz.txt'), 'content');
|
||||
|
||||
// create a file with the name "bar.js" --> bar.js
|
||||
serverless.utils.writeFileSync(path.join(tmpDirPath, 'bar.js'), 'content');
|
||||
|
||||
// set the servicePath
|
||||
serverless.config.servicePath = tmpDirPath;
|
||||
|
||||
return awsDeploy.zipFunctions().then(() => {
|
||||
// look into the zippedFileData
|
||||
const unzippedFileData = zip.load(awsDeploy.deployedFunctions[0].zipFileData);
|
||||
|
||||
expect(unzippedFileData.files[`${functionFileNameBase}.js`].name)
|
||||
.to.equal(`${functionFileNameBase}.js`);
|
||||
expect(unzippedFileData.files[`${functionFileNameBase}.js`].dir).to.equal(false);
|
||||
|
||||
expect(unzippedFileData.files['foo/baz.txt']).to.equal(undefined);
|
||||
expect(unzippedFileData.files['bar.js']).to.equal(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
it('should exclude predefined files and folders (e.g. like .git)', () => {
|
||||
awsDeploy.deployedFunctions = simpleDeployedFunctionsArrayMock;
|
||||
|
||||
const functionFileNameBase = 'function';
|
||||
|
||||
// create a function in a temporary directory
|
||||
const tmpDirPath = path.join(os.tmpdir(), (new Date).getTime().toString());
|
||||
const tmpFilePath = path.join(tmpDirPath, `${functionFileNameBase}.js`);
|
||||
serverless.utils.writeFileSync(tmpFilePath, functionCodeMock);
|
||||
|
||||
// create the files and folder which should be ignored
|
||||
// .gitignore
|
||||
const gitignoreFilePath = path.join(tmpDirPath, '.gitignore');
|
||||
serverless.utils.writeFileSync(gitignoreFilePath, 'content');
|
||||
|
||||
// .DS_Store
|
||||
const dsStoreFilePath = path.join(tmpDirPath, '.DS_Store');
|
||||
serverless.utils.writeFileSync(dsStoreFilePath, 'content');
|
||||
|
||||
// serverless.yaml
|
||||
const serverlessYamlFilePath = path.join(tmpDirPath, 'serverless.yaml');
|
||||
serverless.utils.writeFileSync(serverlessYamlFilePath, 'content');
|
||||
|
||||
// serverless.env.yaml
|
||||
const serverlessEnvYamlFilePath = path.join(tmpDirPath, 'serverless.env.yaml');
|
||||
serverless.utils.writeFileSync(serverlessEnvYamlFilePath, 'content');
|
||||
|
||||
const gitFilePath = path.join(path.join(tmpDirPath, '.git'), 'some-random-git-file');
|
||||
serverless.utils.writeFileSync(gitFilePath, 'content');
|
||||
|
||||
// set the servicePath
|
||||
serverless.config.servicePath = tmpDirPath;
|
||||
|
||||
return awsDeploy.zipFunctions().then(() => {
|
||||
// look into the zippedFileData
|
||||
const unzippedFileData = zip.load(awsDeploy.deployedFunctions[0].zipFileData);
|
||||
|
||||
expect(unzippedFileData.files[`${functionFileNameBase}.js`].name)
|
||||
.to.equal(`${functionFileNameBase}.js`);
|
||||
expect(unzippedFileData.files[`${functionFileNameBase}.js`].dir).to.equal(false);
|
||||
|
||||
expect(unzippedFileData.files['.gitignore']).to.equal(undefined);
|
||||
expect(unzippedFileData.files['.DS_Store']).to.equal(undefined);
|
||||
expect(unzippedFileData.files['.serverless.yaml']).to.equal(undefined);
|
||||
expect(unzippedFileData.files['.serverless.env.yaml']).to.equal(undefined);
|
||||
expect(unzippedFileData.files['.git']).to.equal(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
it('should include a previously excluded file', () => {
|
||||
awsDeploy.deployedFunctions = includeExcludedFileDeployedFunctionsArrayMock;
|
||||
|
||||
const functionFileNameBase = 'function';
|
||||
|
||||
// create a function in a temporary directory
|
||||
const tmpDirPath = path.join(os.tmpdir(), (new Date).getTime().toString());
|
||||
const tmpFilePath = path.join(tmpDirPath, `${functionFileNameBase}.js`);
|
||||
serverless.utils.writeFileSync(tmpFilePath, functionCodeMock);
|
||||
|
||||
// create a file with the name "bar.js" --> bar.js
|
||||
serverless.utils.writeFileSync(path.join(tmpDirPath, 'bar.js'), 'content');
|
||||
|
||||
// set the servicePath
|
||||
serverless.config.servicePath = tmpDirPath;
|
||||
|
||||
return awsDeploy.zipFunctions().then(() => {
|
||||
// look into the zippedFileData
|
||||
const unzippedFileData = zip.load(awsDeploy.deployedFunctions[0].zipFileData);
|
||||
|
||||
expect(unzippedFileData.files[`${functionFileNameBase}.js`].name)
|
||||
.to.equal(`${functionFileNameBase}.js`);
|
||||
expect(unzippedFileData.files[`${functionFileNameBase}.js`].dir).to.equal(false);
|
||||
|
||||
expect(unzippedFileData.files['bar.js'].name).to.equal('bar.js');
|
||||
expect(unzippedFileData.files['bar.js'].dir).to.equal(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getFunctionObjectsFromS3Bucket()', () => {
|
||||
it('should resolve if no function objects are found', () => {
|
||||
const functionObjects = {
|
||||
Contents: [],
|
||||
};
|
||||
|
||||
const listObjectsStub = sinon
|
||||
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve(functionObjects));
|
||||
|
||||
return awsDeploy.getFunctionObjectsFromS3Bucket().then(() => {
|
||||
expect(listObjectsStub.calledOnce).to.be.equal(true);
|
||||
expect(listObjectsStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
|
||||
awsDeploy.sdk.request.restore();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return all to be removed function objects from the S3 bucket', () => {
|
||||
const functionObjects = {
|
||||
Contents: [
|
||||
{
|
||||
Key: 'first-function',
|
||||
},
|
||||
{
|
||||
Key: 'second-function',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const listObjectsStub = sinon
|
||||
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve(functionObjects));
|
||||
|
||||
return awsDeploy.getFunctionObjectsFromS3Bucket().then(() => {
|
||||
expect(listObjectsStub.calledOnce).to.be.equal(true);
|
||||
expect(listObjectsStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
|
||||
awsDeploy.sdk.request.restore();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#cleanupS3Bucket()', () => {
|
||||
let deleteObjectsStub;
|
||||
|
||||
beforeEach(() => {
|
||||
deleteObjectsStub = sinon
|
||||
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve());
|
||||
});
|
||||
|
||||
it('should resolve if no function objects are found in the S3 bucket', () => awsDeploy
|
||||
.cleanupS3Bucket().then(() => {
|
||||
expect(deleteObjectsStub.calledOnce).to.be.equal(false);
|
||||
awsDeploy.sdk.request.restore();
|
||||
})
|
||||
);
|
||||
|
||||
it('should remove all old function files from the S3 bucket if available', () => {
|
||||
const functionObjects = [{ Key: 'first-function' }, { Key: 'second-function' }];
|
||||
|
||||
return awsDeploy.cleanupS3Bucket(functionObjects).then(() => {
|
||||
expect(deleteObjectsStub.calledOnce).to.be.equal(true);
|
||||
expect(deleteObjectsStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
|
||||
awsDeploy.sdk.request.restore();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#uploadZipFilesToS3Bucket()', () => {
|
||||
it('should upload the zip files to the S3 bucket', () => {
|
||||
awsDeploy.deployedFunctions = [
|
||||
{
|
||||
zipFileKey: true,
|
||||
zipFileData: true,
|
||||
},
|
||||
];
|
||||
|
||||
const putObjectStub = sinon
|
||||
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve());
|
||||
|
||||
return awsDeploy.uploadZipFilesToS3Bucket().then(() => {
|
||||
expect(putObjectStub.calledOnce).to.be.equal(true);
|
||||
expect(putObjectStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
|
||||
awsDeploy.sdk.request.restore();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -52,15 +52,19 @@ describe('AwsDeploy', () => {
|
||||
});
|
||||
|
||||
it('should run "deploy:deploy" promise chain in order', () => {
|
||||
const uploadDeploymentPackage = sinon
|
||||
.stub(awsDeploy, 'uploadDeploymentPackage').returns(BbPromise.resolve());
|
||||
const deployFunctionsStub = sinon
|
||||
.stub(awsDeploy, 'deployFunctions').returns(BbPromise.resolve());
|
||||
const updateStackStub = sinon
|
||||
.stub(awsDeploy, 'updateStack').returns(BbPromise.resolve());
|
||||
|
||||
return awsDeploy.hooks['deploy:deploy']().then(() => {
|
||||
expect(deployFunctionsStub.calledOnce).to.be.equal(true);
|
||||
expect(uploadDeploymentPackage.calledOnce).to.be.equal(true);
|
||||
expect(deployFunctionsStub.calledAfter(uploadDeploymentPackage)).to.be.equal(true);
|
||||
expect(updateStackStub.calledAfter(deployFunctionsStub)).to.be.equal(true);
|
||||
|
||||
awsDeploy.uploadDeploymentPackage.restore();
|
||||
awsDeploy.deployFunctions.restore();
|
||||
awsDeploy.updateStack.restore();
|
||||
});
|
||||
|
||||
@ -35,7 +35,7 @@ describe('updateStack', () => {
|
||||
Properties: {
|
||||
Code: {
|
||||
S3Bucket: 'new-service-dev-us-east-1',
|
||||
S3Key: '',
|
||||
S3Key: 'zip-file.zip',
|
||||
},
|
||||
FunctionName: 'new-service-first',
|
||||
Handler: 'first.function.handler',
|
||||
@ -75,17 +75,6 @@ describe('updateStack', () => {
|
||||
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve());
|
||||
});
|
||||
|
||||
it('should add the S3Key to the compiled function resources before updating the stack',
|
||||
() => awsDeploy.update().then(() => {
|
||||
expect(updateStackStub.calledOnce).to.be.equal(true);
|
||||
expect(updateStackStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
|
||||
expect(awsDeploy.serverless.service.resources.Resources
|
||||
.first.Properties.Code.S3Key).to.equal('zipFileOfFirstFunction');
|
||||
|
||||
awsDeploy.sdk.request.restore();
|
||||
})
|
||||
);
|
||||
|
||||
it('should update the stack', () => awsDeploy.update()
|
||||
.then(() => {
|
||||
expect(updateStackStub.calledOnce).to.be.equal(true);
|
||||
|
||||
108
lib/plugins/aws/deploy/tests/uploadDeploymentPackage.js
Normal file
108
lib/plugins/aws/deploy/tests/uploadDeploymentPackage.js
Normal file
@ -0,0 +1,108 @@
|
||||
'use strict';
|
||||
|
||||
const sinon = require('sinon');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const BbPromise = require('bluebird');
|
||||
const expect = require('chai').expect;
|
||||
const AwsDeploy = require('../index');
|
||||
const Serverless = require('../../../../Serverless');
|
||||
|
||||
describe('uploadDeploymentPackage', () => {
|
||||
let serverless;
|
||||
let awsDeploy;
|
||||
|
||||
beforeEach(() => {
|
||||
serverless = new Serverless();
|
||||
const options = {
|
||||
stage: 'dev',
|
||||
region: 'us-east-1',
|
||||
};
|
||||
awsDeploy = new AwsDeploy(serverless, options);
|
||||
awsDeploy.serverless.cli = new serverless.classes.CLI();
|
||||
});
|
||||
|
||||
describe('#getServiceObjectsFromS3Bucket()', () => {
|
||||
it('should resolve if no service objects are found', () => {
|
||||
const serviceObjects = {
|
||||
Contents: [],
|
||||
};
|
||||
|
||||
const listObjectsStub = sinon
|
||||
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve(serviceObjects));
|
||||
|
||||
return awsDeploy.getServiceObjectsFromS3Bucket().then(() => {
|
||||
expect(listObjectsStub.calledOnce).to.be.equal(true);
|
||||
expect(listObjectsStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
|
||||
awsDeploy.sdk.request.restore();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return all to be removed service objects from the S3 bucket', () => {
|
||||
const serviceObjects = {
|
||||
Contents: [
|
||||
{
|
||||
Key: 'first-service',
|
||||
},
|
||||
{
|
||||
Key: 'second-service',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const listObjectsStub = sinon
|
||||
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve(serviceObjects));
|
||||
|
||||
return awsDeploy.getServiceObjectsFromS3Bucket().then(() => {
|
||||
expect(listObjectsStub.calledOnce).to.be.equal(true);
|
||||
expect(listObjectsStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
|
||||
awsDeploy.sdk.request.restore();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#cleanupS3Bucket()', () => {
|
||||
let deleteObjectsStub;
|
||||
|
||||
beforeEach(() => {
|
||||
deleteObjectsStub = sinon
|
||||
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve());
|
||||
});
|
||||
|
||||
it('should resolve if no service objects are found in the S3 bucket', () => awsDeploy
|
||||
.cleanupS3Bucket().then(() => {
|
||||
expect(deleteObjectsStub.calledOnce).to.be.equal(false);
|
||||
awsDeploy.sdk.request.restore();
|
||||
})
|
||||
);
|
||||
|
||||
it('should remove all old service files from the S3 bucket if available', () => {
|
||||
const serviceObjects = [{ Key: 'first-service' }, { Key: 'second-service' }];
|
||||
|
||||
return awsDeploy.cleanupS3Bucket(serviceObjects).then(() => {
|
||||
expect(deleteObjectsStub.calledOnce).to.be.equal(true);
|
||||
expect(deleteObjectsStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
|
||||
awsDeploy.sdk.request.restore();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#uploadZipFileToS3Bucket()', () => {
|
||||
it('should upload the zip file to the S3 bucket', () => {
|
||||
const tmpDirPath = path.join(os.tmpdir(), (new Date).getTime().toString());
|
||||
const artifactFilePath = path.join(tmpDirPath, 'artifact.zip');
|
||||
serverless.utils.writeFileSync(artifactFilePath, 'artifact.zip file content');
|
||||
|
||||
awsDeploy.serverless.service.package.artifact = artifactFilePath;
|
||||
|
||||
const putObjectStub = sinon
|
||||
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve());
|
||||
|
||||
return awsDeploy.uploadZipFileToS3Bucket().then(() => {
|
||||
expect(putObjectStub.calledOnce).to.be.equal(true);
|
||||
expect(putObjectStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
|
||||
awsDeploy.sdk.request.restore();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
Loading…
x
Reference in New Issue
Block a user