Last active
October 26, 2023 00:37
-
-
Save anniethiessen/650736631e2f6e94c8582f3039c18da4 to your computer and use it in GitHub Desktop.
Script file to build, package, and deploy AWS SAM application.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env bash | |
| : ' | |
| Script file to build, package, and deploy AWS SAM applications. | |
| Ensure AWS CLI and SAM CLI are installed and configured, | |
| define script variables and configuration file, then execute script. | |
| Accepts -v (verbose output) and -q (quiet output) arguments. | |
| Prerequisites: | |
| - aws-cli 1.29.62 -> https://pypi.org/project/awscli/ | |
| - aws-sam-cli 1.98.0 -> https://pypi.org/project/aws-sam-cli/ | |
| Configuration File: | |
| See https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html | |
| Script Variables: | |
| - CONDA_ENV: The conda environement that has required packages installed. | |
| Optional, if current environment has them installed. | |
| - CONFIG_FILE: Path and name of AWS SAM configuration file. | |
| Optional, defaults to "samconfig.toml". | |
| - CONFIG_ENV: Environment to use in the AWS configuration file. | |
| Optional, defaults to "default". | |
| - ARCHIVE_S3_BUCKET: S3 bucket to which archive files will be uploaded. | |
| The ""check"" function checks if it exists and provides an option to create it if necessary. | |
| Required, no default. | |
| - ARCHIVE_S3_BUCKET_REGION: AWS region of ""ARCHIVE_S3_BUCKET"", if created. | |
| Optional, defaults to "us-west-2". | |
| - ARCHIVE_S3_BUCKET_TAGS: List of tags to add to ""ARCHIVE_S3_BUCKET"", if created. | |
| Optional, defaults to (). | |
| - PERFORM_CHECK: Whether to perform the ""check"" step. | |
| The ""check"" function checks if required pre-existing resources exist. | |
| Currently, only ""ARCHIVE_S3_BUCKET"". | |
| Optional, defaults to "true". | |
| - PERFORM_BUILD: Whether to perform the ""build"" step. | |
| The ""build"" function creates a .aws-sam directory that structures your application | |
| in a format and location that sam local and the ""deploy"" step require. | |
| Should only be set to "false" if ""PERFORM_DEPLOY"" is also set to "false" | |
| or if .aws-sam files for ""deploy"" step already exist. | |
| Optional, defaults to "true". | |
| - PERFORM_PACKAGE: Whether to perform the ""package"" step. | |
| The ""package"" function packages local artifacts that the AWS SAM template references | |
| to S3 and creates a clean output copy of the template, to be used by the ""deploy"" step. | |
| Should only be set to "false" if ""PERFORM_DEPLOY"" is also set to "false" | |
| or if a clean template for ""deploy"" step already exists. | |
| Optional, defaults to "true". | |
| - PERFORM_DEPLOY: Whether to perform the ""deploy"" step. | |
| The ""deploy"" function deploys specified template by creating, then executing a change-set. | |
| Optional, defaults to "true". | |
| Usage Example: | |
| CONDA_ENV=xxx | |
| ARCHIVE_S3_BUCKET=xxx | |
| wget -cO - "<this_file_url>" > "temp.sh" | |
| chmod +x "temp.sh" | |
| source "temp.sh" "$@" | |
| rm -rf "temp.sh" | |
| ' | |
| #-------------------------------------------- | |
| #---------------- CONSTANTS ----------------- | |
| #-------------------------------------------- | |
| CONDA_ENV="${CONDA_ENV}" | |
| CONFIG_FILE="${CONFIG_FILE:-samconfig.toml}" | |
| CONFIG_ENV="${CONFIG_ENV:-default}" | |
| ARCHIVE_S3_BUCKET="${ARCHIVE_S3_BUCKET}" | |
| ARCHIVE_S3_BUCKET_REGION="${ARCHIVE_S3_BUCKET_REGION:-us-west-2}" | |
| ARCHIVE_S3_BUCKET_TAGS=${ARCHIVE_S3_BUCKET_TAGS:-()} | |
| PERFORM_CHECK=${PERFORM_CHECK:-true} | |
| PERFORM_BUILD=${PERFORM_BUILD:-true} | |
| PERFORM_PACKAGE=${PERFORM_PACKAGE:-true} | |
| PERFORM_DEPLOY=${PERFORM_DEPLOY:-true} | |
| #------- DO NOT EDIT BELOW THIS LINE -------- | |
| FORMAT_SCRIPT_URL="https://gist.githubusercontent.com/anniethiessen/efb6bc0e52ccfc8b330aa41364b53e97/raw/0012edc1f009a36d196f03f09fda68e70691860b/shell_script_essentials.sh" | |
| FORMAT_SCRIPT_NAME="shell_script_essentials.sh" | |
| #-------------------------------------------- | |
| #---------------- FUNCTIONS ----------------- | |
| #-------------------------------------------- | |
| function run_format_script { | |
| wget -cO - ${FORMAT_SCRIPT_URL} > ${FORMAT_SCRIPT_NAME} | |
| chmod +x ${FORMAT_SCRIPT_NAME} | |
| source ${FORMAT_SCRIPT_NAME} "$@" | |
| rm -rf ${FORMAT_SCRIPT_NAME} | |
| } | |
| function generate_s3_bucket_tags { | |
| local tag_set="" | |
| for tag in "${ARCHIVE_S3_BUCKET_TAGS[@]}" ; do | |
| local tag_key="${tag%%:*}" | |
| local tag_value="${tag##*:}" | |
| local tag_dict="{Key=${tag_key},Value=${tag_value}}" | |
| local tag_set="${tag_set}${tag_set:+,}${tag_dict}" | |
| done | |
| local tag_set="[${tag_set}]" | |
| echo "${tag_set}" | |
| } | |
| function check_s3_bucket { | |
| aws s3api head-bucket \ | |
| --bucket "${ARCHIVE_S3_BUCKET}" \ | |
| &> ${OUTPUT} | |
| local retval=$? | |
| if [[ ${retval} -eq 0 ]]; then | |
| output_success_message "S3 bucket ${ARCHIVE_S3_BUCKET} exists." | |
| else | |
| output_error_message "S3 bucket ${ARCHIVE_S3_BUCKET} does not exist." | |
| output_warning_message "Do you want to create S3 bucket ${S3_BUCKET}?" | |
| select response in "Yes" "No"; do | |
| case ${response} in | |
| Yes ) create_s3_bucket; break;; | |
| No ) exit_script;; | |
| esac | |
| done | |
| fi | |
| } | |
| function create_s3_bucket { | |
| aws s3 mb s3://"${ARCHIVE_S3_BUCKET}" \ | |
| --region "${ARCHIVE_S3_BUCKET_REGION}" \ | |
| &> ${OUTPUT} | |
| local retval=$? | |
| if [[ ${retval} -eq 0 ]]; then | |
| output_success_message "S3 bucket ${ARCHIVE_S3_BUCKET} created." | |
| else | |
| output_error_message "S3 bucket ${ARCHIVE_S3_BUCKET} creation error." ${PROMPT_VERBOSE} | |
| exit_script | |
| fi | |
| local tag_set | |
| tag_set=$(generate_s3_bucket_tags) | |
| aws s3api put-bucket-tagging \ | |
| --bucket "${ARCHIVE_S3_BUCKET}" \ | |
| --tagging "TagSet=${tag_set}" \ | |
| &> ${OUTPUT} | |
| local retval=$? | |
| if [[ ${retval} -eq 0 ]]; then | |
| output_success_message "S3 bucket tag set ${tag_set} added." | |
| else | |
| output_error_message "S3 bucket tag set ${tag_set} add error." ${PROMPT_VERBOSE} | |
| exit_script | |
| fi | |
| } | |
| function check { | |
| check_s3_bucket | |
| } | |
| function build { | |
| sam build \ | |
| --config-file "${CONFIG_FILE}" \ | |
| --config-env "${CONFIG_ENV}" \ | |
| &> ${OUTPUT} | |
| local retval=$? | |
| if [[ ${retval} -eq 0 ]]; then | |
| output_success_message "Artifacts built." | |
| else | |
| output_error_message "Artifact build error." ${PROMPT_VERBOSE} | |
| exit_script | |
| fi | |
| } | |
| function package { | |
| sam package \ | |
| --config-file "${CONFIG_FILE}" \ | |
| --config-env "${CONFIG_ENV}" \ | |
| &> ${OUTPUT} | |
| local retval=$? | |
| if [[ ${retval} -eq 0 ]]; then | |
| output_success_message "Application packaged." | |
| else | |
| output_error_message "Application packaging error." ${PROMPT_VERBOSE} | |
| exit_script | |
| fi | |
| } | |
| function deploy { | |
| sam deploy \ | |
| --config-file "${CONFIG_FILE}" \ | |
| --config-env "${CONFIG_ENV}" \ | |
| &> ${OUTPUT} | |
| local retval=$? | |
| if [[ ${retval} -eq 0 ]]; then | |
| output_success_message "Stack deployed." | |
| else | |
| output_error_message "Stack deployment error." ${PROMPT_VERBOSE} | |
| exit_script | |
| fi | |
| } | |
| #-------------------------------------------- | |
| #------------------ MAIN -------------------- | |
| #-------------------------------------------- | |
| run_format_script "$@" | |
| if [ -n "${CONDA_ENV}" ]; then | |
| eval "$(conda shell.bash hook)" | |
| conda activate ${CONDA_ENV} | |
| fi | |
| if [[ "${OUTPUT}" == "${VERBOSE_OUTPUT}" ]]; then | |
| PYTHON_VERSION=$( python --version 2>&1 ) | |
| AWSCLI_VERSION=$( aws --version 2>&1 ) | |
| SAMCLI_VERSION=$( sam --version 2>&1 ) | |
| output_warning_message "Using ${PYTHON_VERSION}, ${AWSCLI_VERSION}, and ${SAMCLI_VERSION}" | |
| fi | |
| output_header_message "----------------------------------------" | |
| output_header_message "[1/4] CHECK" | |
| output_header_message "checking required resources exist ..." | |
| output_header_message "----------------------------------------" | |
| if [ "${PERFORM_CHECK}" == true ] ; then | |
| check | |
| else output_warning_message "Checks skipped"; fi | |
| output_header_message "----------------------------------------" | |
| output_header_message "[2/4] BUILD" | |
| output_header_message "building deployment artifacts ..." | |
| output_header_message "----------------------------------------" | |
| if [ "${PERFORM_BUILD}" == true ] ; then | |
| build | |
| else output_warning_message "Build skipped"; fi | |
| output_header_message "----------------------------------------" | |
| output_header_message "[3/4] PACKAGE" | |
| output_header_message "packaging and uploading archive to S3 ..." | |
| output_header_message "----------------------------------------" | |
| if [ "${PERFORM_PACKAGE}" == true ] ; then | |
| package | |
| else output_warning_message "Package skipped"; fi | |
| output_header_message "----------------------------------------" | |
| output_header_message "[4/4] DEPLOY" | |
| output_header_message "deploying application to CloudFormation ..." | |
| output_header_message "----------------------------------------" | |
| if [ "${PERFORM_DEPLOY}" == true ] ; then | |
| deploy | |
| else output_warning_message "Deploy skipped"; fi |
Author
Author
Change Log:
v1: Initial
v2:
-Added CONDA_ENV variable: Conda environment is activated if defined
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
TODO:
-add sam validate step