Add core S3 bucket cleanup upon deployment

This commit is contained in:
Philipp Muens 2016-07-04 14:35:38 +01:00
parent c7335c3ee4
commit 7f5aec098b
4 changed files with 124 additions and 6 deletions

View File

@ -66,11 +66,13 @@ Here are the steps the AWS plugins take to compile and deploy the service on the
1. The [`serverless.yaml`](../understanding-serverless/serverless-yaml.md) and
[`serverless.env.yaml`](../understanding-serverless/serverless-env-yaml.md) files are loaded into memory
2. A default AWS CloudFormation template is loaded (`deploy:initializeResources`)
3. The CloudFormation template is deployed to AWS (`deploy:createProviderStacks`)
3. The CloudFormation template is deployed to AWS (A S3 bucket for the service gets created) (`deploy:createProviderStacks`)
4. The functions of the [`serverless.yaml`](../understanding-serverless/serverless-yaml.md) file are compiled to lambda
resources and stored into memory (`deploy:compileFunctions`)
5. Each functions events are compiled into CloudFormation resources and stored into memory (`deploy:compileEvents`)
6. The compiled function and event resources are attached to the core CloudFormation template and the updated
6. Old functions (if available) are removed from the S3 bucket (`deploy:deploy`)
7. The function code gets zipped up and is uploaded to S3 (`deploy:deploy`)
8. The compiled function and event resources are attached to the core CloudFormation template and the updated
CloudFormation template gets redeployed (`deploy:deploy`)
#### The code

View File

@ -23,9 +23,10 @@ merging them in.
Next up it hooks into the [`deploy:createProviderStacks`](/lib/plugins/deploy) lifecycle and deploys the
previously created CloudFormation template to AWS.
At the end it hooks into [`deploy:deploy`](/lib/plugins/deploy) lifecycle to zip the lambda function and
uploads it to the core S3 bucket (which is defined in the core CloudFormation template). Furthermore it updates the
stack with all the Resources which are defined in `serverless.service.resources.Resources`.
In the end it hooks into [`deploy:deploy`](/lib/plugins/deploy) lifecycle. At first it removes the old function .zip files
in the services S3 bucket. After that it zips the lambda functions and uploads them to the S3 bucket (which is defined in
the core CloudFormation template). Furthermore it updates the stack with all the Resources which are defined in
`serverless.service.resources.Resources`.
The stack status is checked every 5 seconds with the help of the CloudFormation API. It will return a success message if
the stack status is `CREATE_COMPLETE` or `UPDATE_COMPLETE` (depends if you deploy your service for the first time or

View File

@ -94,6 +94,54 @@ module.exports = {
return BbPromise.resolve();
},
getFunctionObjectsFromS3Bucket() {
const bucketName =
`${this.serverless.service.service}-${this.options.stage}-${this.options.region}`;
return this.sdk.request('S3',
'listObjectsV2',
{ Bucket: bucketName },
this.options.stage,
this.options.region)
.then((result) => {
if (result.Contents.length) {
const fileNames = result.Contents.map((object) => object.Key);
const objectsToRemove = [];
fileNames.forEach((fileName) => {
const regex = new RegExp(/^(.+)-.+\.zip$/);
if (fileName.match(regex)) {
objectsToRemove.push({ Key: fileName });
}
});
return BbPromise.resolve(objectsToRemove);
}
return BbPromise.resolve();
});
},
cleanupS3Bucket(objectsToRemove) {
if (objectsToRemove && objectsToRemove.length) {
this.serverless.cli.log('Removing old function versions...');
const bucketName =
`${this.serverless.service.service}-${this.options.stage}-${this.options.region}`;
return this.sdk.request('S3',
'deleteObjects',
{
Bucket: bucketName,
Delete: { Objects: objectsToRemove },
},
this.options.stage,
this.options.region);
}
return BbPromise.resolve();
},
uploadZipFilesToS3Bucket() {
this.serverless.cli.log('Uploading zip files to S3...');
const bucketName =
@ -123,6 +171,8 @@ module.exports = {
return BbPromise.bind(this)
.then(this.extractFunctionHandlers)
.then(this.zipFunctions)
.then(this.getFunctionObjectsFromS3Bucket)
.then(this.cleanupS3Bucket)
.then(this.uploadZipFilesToS3Bucket);
},
};

View File

@ -323,6 +323,71 @@ describe('deployFunctions', () => {
});
});
describe('#getFunctionObjectsFromS3Bucket()', () => {
it('should resolve if no function objects are found', () => {
const functionObjects = {
Contents: [],
};
const listObjectsStub = sinon
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve(functionObjects));
return awsDeploy.getFunctionObjectsFromS3Bucket().then(() => {
expect(listObjectsStub.calledOnce).to.be.equal(true);
expect(listObjectsStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
awsDeploy.sdk.request.restore();
});
});
it('should return all to be removed function objects from the S3 bucket', () => {
const functionObjects = {
Contents: [
{
Key: 'first-function',
},
{
Key: 'second-function',
},
],
};
const listObjectsStub = sinon
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve(functionObjects));
return awsDeploy.getFunctionObjectsFromS3Bucket().then(() => {
expect(listObjectsStub.calledOnce).to.be.equal(true);
expect(listObjectsStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
awsDeploy.sdk.request.restore();
});
});
});
describe('#cleanupS3Bucket()', () => {
let deleteObjectsStub;
beforeEach(() => {
deleteObjectsStub = sinon
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve());
});
it('should resolve if no function objects are found in the S3 bucket', () => {
return awsDeploy.cleanupS3Bucket().then(() => {
expect(deleteObjectsStub.calledOnce).to.be.equal(false);
awsDeploy.sdk.request.restore();
});
});
it('should remove all old function files from the S3 bucket if available', () => {
const functionObjects = [{ Key: 'first-function' }, { Key: 'second-function' }];
return awsDeploy.cleanupS3Bucket(functionObjects).then(() => {
expect(deleteObjectsStub.calledOnce).to.be.equal(true);
expect(deleteObjectsStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
awsDeploy.sdk.request.restore();
});
});
});
describe('#uploadZipFilesToS3Bucket()', () => {
it('should upload the zip files to the S3 bucket', () => {
awsDeploy.deployedFunctions = [
@ -335,7 +400,7 @@ describe('deployFunctions', () => {
const putObjectStub = sinon
.stub(awsDeploy.sdk, 'request').returns(BbPromise.resolve());
return awsDeploy.create().then(() => {
return awsDeploy.uploadZipFilesToS3Bucket().then(() => {
expect(putObjectStub.calledOnce).to.be.equal(true);
expect(putObjectStub.calledWith(awsDeploy.options.stage, awsDeploy.options.region));
awsDeploy.sdk.request.restore();