diff --git a/ibm/mas_devops/roles/mongodb/defaults/main.yml b/ibm/mas_devops/roles/mongodb/defaults/main.yml index 100beb632e..f9bc210831 100644 --- a/ibm/mas_devops/roles/mongodb/defaults/main.yml +++ b/ibm/mas_devops/roles/mongodb/defaults/main.yml @@ -108,6 +108,15 @@ ibm_mongo_name_default: "mongo-{{ mas_instance_id }}" ibm_mongo_name: "{{ lookup('env', 'IBM_MONGO_NAME') | default(ibm_mongo_name_default, True) }}" ibm_mongo_admin_password: "{{ lookup('env', 'IBM_MONGO_ADMIN_PASSWORD') }}" +# aws documentdb backup vars +# ----------------------------------------------------------------------------- +# S3 bucket name where DocumentDB backups will be stored +docdb_backup_s3_bucket: "{{ lookup('env', 'DOCDB_BACKUP_S3_BUCKET') }}" + +# S3 key prefix (folder path) under which backups are organized +# Default structure: ///database/ +docdb_backup_s3_prefix: "{{ lookup('env', 'DOCDB_BACKUP_S3_PREFIX') | default('docdb-backups', True) }}" + # aws docdb_secret_rotate vars # ----------------------------------------------------------------------------- docdb_mongo_instance_name: "{{ lookup('env', 'DOCDB_MONGO_INSTANCE_NAME') }}" diff --git a/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/after-backup-restore.yml b/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/after-backup-restore.yml new file mode 100644 index 0000000000..b78101f7b1 --- /dev/null +++ b/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/after-backup-restore.yml @@ -0,0 +1,27 @@ +--- +# Clean up local temporary backup files +# ----------------------------------------------------------------------------- +- name: "Delete local backup working directory" + when: docdb_local_backup_dir is defined and docdb_local_backup_dir | length > 0 + file: + path: "{{ docdb_local_backup_dir }}" + state: absent + changed_when: true + ignore_errors: true + +- name: "Delete temporary DocumentDB CA certificate" + when: + - docdb_ca_file is defined + - docdb_ca_file | length > 0 + - docdb_ca_file.startswith('/tmp/') + file: + path: "{{ docdb_ca_file }}" + state: absent + changed_when: true + ignore_errors: true + +- name: "Debug: cleanup completed" + debug: + msg: + - "Cleaned up local backup directory .......... {{ docdb_local_backup_dir | default('N/A') }}" + - "Cleaned up CA certificate .................. {{ docdb_ca_file | default('N/A') }}" diff --git a/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/backup-database.yml b/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/backup-database.yml new file mode 100644 index 0000000000..87cfe0d016 --- /dev/null +++ b/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/backup-database.yml @@ -0,0 +1,338 @@ +--- +- name: "Backup AWS DocumentDB databases" + block: + # Set backup folder paths + # ------------------------------------------------------------------------- + - name: "Set fact: backup folder paths" + set_fact: + docdb_backup_data_dir: "{{ docdb_local_backup_dir }}/{{ masbr_job_data_type }}" + docdb_backup_dump_dir: "{{ docdb_local_backup_dir }}/{{ masbr_job_data_type }}/mongodump" + docdb_backup_index_dir: "{{ docdb_local_backup_dir }}/{{ masbr_job_data_type }}/indexes" + docdb_backup_log_file: "{{ docdb_local_backup_dir }}/{{ masbr_job_data_type }}/{{ masbr_job_name }}-backup.log" + + - name: "Create backup subdirectories" + file: + path: "{{ item }}" + state: directory + mode: "0755" + loop: + - "{{ docdb_backup_dump_dir }}" + - "{{ docdb_backup_index_dir }}" + changed_when: true + + # Set database name filter + # ------------------------------------------------------------------------- + - name: "Set fact: default database name filter (all MAS databases)" + when: mas_app_id is not defined or mas_app_id | length == 0 + set_fact: + # Backup all databases belonging to the specified MAS instance + docdb_db_filter: "^(mas|iot)(_|-){{ mas_instance_id }}(_|-)" + + - name: "Set fact: database name filter for {{ mas_app_id }}" + when: mas_app_id is defined and mas_app_id | length > 0 + block: + - name: "Set fact: database name filters for each MAS app" + set_fact: + docdb_db_all_filters: + iot: "iot_{{ mas_instance_id }}_" + visualinspection: "mas-{{ mas_instance_id }}-(visualinspection|edgeman)" + optimizer: "mas_{{ mas_instance_id }}_optimizer" + + - name: "Set fact: always backup core databases" + set_fact: + docdb_db_app_filters: + ["mas_{{ mas_instance_id }}_(core|catalog|adoptionusage)"] + + - name: "Set fact: append database name filter for {{ mas_app_id }}" + set_fact: + docdb_db_app_filters: > + {{ docdb_db_app_filters + [docdb_db_all_filters[mas_app_id] | default('')] }} + + - name: "Set fact: combined database name filter for {{ mas_app_id }}" + when: docdb_db_app_filters is defined and docdb_db_app_filters | length > 0 + set_fact: + docdb_db_filter: "{{ docdb_db_app_filters | select() | join('|') }}" + + - name: "Debug: database name filter" + debug: + msg: "Database filter ............................ {{ docdb_db_filter }}" + + # Get list of databases to back up + # ------------------------------------------------------------------------- + - name: "Get list of databases matching filter" + shell: > + mongosh --tls + --host '{{ docdb_primary_host }}' + --tlsCAFile '{{ docdb_ca_file }}' + --username '{{ docdb_master_username }}' + --password '{{ docdb_master_password }}' + --quiet + --eval "JSON.stringify(db.adminCommand({ listDatabases: 1, nameOnly: true, filter: { name: /{{ docdb_db_filter }}/ } }))" + 2>> {{ docdb_backup_log_file }} + register: _docdb_db_list_output + no_log: true + changed_when: false + + - name: "Set fact: list of databases to back up" + set_fact: + docdb_db_names: "{{ _docdb_db_list_output.stdout | from_json | json_query('databases') }}" + + - name: "Debug: databases to back up" + debug: + msg: "Databases to back up ....................... {{ docdb_db_names | map(attribute='name') | list }}" + + - name: "Fail if no databases found matching filter" + assert: + that: docdb_db_names | length > 0 + fail_msg: "No databases found matching filter '{{ docdb_db_filter }}' in DocumentDB cluster '{{ docdb_cluster_name }}'" + + # Run mongodump for each database + # ------------------------------------------------------------------------- + - name: "Run mongodump for database {{ item.name }}" + shell: > + mongodump + --host='{{ docdb_primary_host }}' + --ssl + --sslCAFile='{{ docdb_ca_file }}' + --username='{{ docdb_master_username }}' + --password='{{ docdb_master_password }}' + --authenticationDatabase=admin + --db='{{ item.name }}' + --out='{{ docdb_backup_dump_dir }}' + 2>&1 | tee -a {{ docdb_backup_log_file }} + loop: "{{ docdb_db_names }}" + register: _mongodump_output + no_log: true + changed_when: true + + - name: "Debug: mongodump output summary" + debug: + msg: "{{ _mongodump_output | json_query('results[*].stdout_lines[-1]') }}" + + # Extract indexes as JSON for each database + # ------------------------------------------------------------------------- + - name: "Extract indexes for database {{ item.name }}" + shell: > + mongosh --tls + --host '{{ docdb_primary_host }}' + --tlsCAFile '{{ docdb_ca_file }}' + --username '{{ docdb_master_username }}' + --password '{{ docdb_master_password }}' + --quiet + --eval " + var result = {}; + var db = db.getSiblingDB('{{ item.name }}'); + db.getCollectionNames().forEach(function(col) { + result[col] = db.getCollection(col).getIndexes(); + }); + print(JSON.stringify(result, null, 2)); + " + 2>> {{ docdb_backup_log_file }} + > '{{ docdb_backup_index_dir }}/{{ item.name }}-indexes.json' + loop: "{{ docdb_db_names }}" + register: _index_extract_output + no_log: true + changed_when: true + + - name: "Debug: index extraction completed" + debug: + msg: "Extracted indexes for databases ............ {{ docdb_db_names | map(attribute='name') | list }}" + + # Verify index files were created + # ------------------------------------------------------------------------- + - name: "Verify index files were created" + stat: + path: "{{ docdb_backup_index_dir }}/{{ item.name }}-indexes.json" + loop: "{{ docdb_db_names }}" + register: _index_files_stat + + - name: "Debug: index file sizes" + debug: + msg: "Index file {{ item.stat.path | basename }} size: {{ item.stat.size }} bytes" + loop: "{{ _index_files_stat.results }}" + when: item.stat.exists + + # Create a combined index manifest file + # ------------------------------------------------------------------------- + - name: "Create combined index manifest" + shell: > + echo '{' > '{{ docdb_backup_index_dir }}/all-indexes-manifest.json'; + first=true; + for f in {{ docdb_backup_index_dir }}/*-indexes.json; do + dbname=$(basename "$f" -indexes.json); + if [ "$first" = true ]; then + first=false; + else + echo ',' >> '{{ docdb_backup_index_dir }}/all-indexes-manifest.json'; + fi; + echo "\"$dbname\":" >> '{{ docdb_backup_index_dir }}/all-indexes-manifest.json'; + cat "$f" >> '{{ docdb_backup_index_dir }}/all-indexes-manifest.json'; + done; + echo '}' >> '{{ docdb_backup_index_dir }}/all-indexes-manifest.json' + changed_when: true + + # Create tar.gz archives + # ------------------------------------------------------------------------- + - name: "Create tar.gz archive of mongodump data" + shell: > + tar -czf '{{ docdb_backup_data_dir }}/{{ masbr_job_name }}-dump.tar.gz' + -C '{{ docdb_backup_dump_dir }}' . + 2>&1 | tee -a {{ docdb_backup_log_file }} + register: _tar_dump_output + changed_when: true + + - name: "Create tar.gz archive of index files" + shell: > + tar -czf '{{ docdb_backup_data_dir }}/{{ masbr_job_name }}-indexes.tar.gz' + -C '{{ docdb_backup_index_dir }}' . + 2>&1 | tee -a {{ docdb_backup_log_file }} + register: _tar_index_output + changed_when: true + + - name: "Get sizes of backup archives" + shell: > + du -sh + '{{ docdb_backup_data_dir }}/{{ masbr_job_name }}-dump.tar.gz' + '{{ docdb_backup_data_dir }}/{{ masbr_job_name }}-indexes.tar.gz' + register: _du_archives_output + changed_when: false + + - name: "Debug: backup archive sizes" + debug: + msg: "{{ _du_archives_output.stdout_lines }}" + + # Upload backup archives to S3 + # ------------------------------------------------------------------------- + - name: "Set fact: S3 destination prefix for this backup job" + set_fact: + docdb_s3_job_prefix: "{{ docdb_backup_s3_prefix | default('docdb-backups') }}/{{ docdb_cluster_name }}/{{ masbr_job_name }}/{{ masbr_job_data_type }}" + + - name: "Upload mongodump archive to S3" + shell: > + aws s3 cp + '{{ docdb_backup_data_dir }}/{{ masbr_job_name }}-dump.tar.gz' + 's3://{{ docdb_backup_s3_bucket }}/{{ docdb_s3_job_prefix }}/{{ masbr_job_name }}-dump.tar.gz' + --region '{{ aws_region }}' + 2>&1 | tee -a {{ docdb_backup_log_file }} + register: _s3_upload_dump_output + changed_when: true + + - name: "Debug: S3 upload dump result" + debug: + msg: "{{ _s3_upload_dump_output.stdout_lines }}" + + - name: "Upload index archive to S3" + shell: > + aws s3 cp + '{{ docdb_backup_data_dir }}/{{ masbr_job_name }}-indexes.tar.gz' + 's3://{{ docdb_backup_s3_bucket }}/{{ docdb_s3_job_prefix }}/{{ masbr_job_name }}-indexes.tar.gz' + --region '{{ aws_region }}' + 2>&1 | tee -a {{ docdb_backup_log_file }} + register: _s3_upload_index_output + changed_when: true + + - name: "Debug: S3 upload index result" + debug: + msg: "{{ _s3_upload_index_output.stdout_lines }}" + + # Upload individual per-database index JSON files to S3 (uncompressed for easy access) + # ------------------------------------------------------------------------- + - name: "Upload per-database index JSON file to S3" + shell: > + aws s3 cp + '{{ docdb_backup_index_dir }}/{{ item.name }}-indexes.json' + 's3://{{ docdb_backup_s3_bucket }}/{{ docdb_s3_job_prefix }}/indexes/{{ item.name }}-indexes.json' + --region '{{ aws_region }}' + 2>&1 | tee -a {{ docdb_backup_log_file }} + loop: "{{ docdb_db_names }}" + register: _s3_upload_index_json_output + changed_when: true + + - name: "Upload combined index manifest JSON to S3" + shell: > + aws s3 cp + '{{ docdb_backup_index_dir }}/all-indexes-manifest.json' + 's3://{{ docdb_backup_s3_bucket }}/{{ docdb_s3_job_prefix }}/indexes/all-indexes-manifest.json' + --region '{{ aws_region }}' + 2>&1 | tee -a {{ docdb_backup_log_file }} + register: _s3_upload_manifest_output + changed_when: true + + # Write backup metadata to S3 + # ------------------------------------------------------------------------- + - name: "Get current timestamp" + shell: date -u +"%Y-%m-%dT%H:%M:%SZ" + register: _backup_timestamp_output + changed_when: false + + - name: "Create backup metadata file" + copy: + dest: "{{ docdb_backup_data_dir }}/backup-metadata.json" + content: | + { + "backup_job_name": "{{ masbr_job_name }}", + "backup_timestamp": "{{ _backup_timestamp_output.stdout }}", + "docdb_cluster_name": "{{ docdb_cluster_name }}", + "docdb_engine_version": "{{ mongodb_version }}", + "aws_region": "{{ aws_region }}", + "mas_instance_id": "{{ mas_instance_id }}", + "mas_app_id": "{{ mas_app_id | default('') }}", + "databases_backed_up": {{ docdb_db_names | map(attribute='name') | list | to_json }}, + "s3_bucket": "{{ docdb_backup_s3_bucket }}", + "s3_prefix": "{{ docdb_s3_job_prefix }}", + "files": { + "dump_archive": "{{ masbr_job_name }}-dump.tar.gz", + "index_archive": "{{ masbr_job_name }}-indexes.tar.gz", + "index_manifest": "indexes/all-indexes-manifest.json" + } + } + mode: "0644" + changed_when: true + + - name: "Upload backup metadata to S3" + shell: > + aws s3 cp + '{{ docdb_backup_data_dir }}/backup-metadata.json' + 's3://{{ docdb_backup_s3_bucket }}/{{ docdb_s3_job_prefix }}/backup-metadata.json' + --region '{{ aws_region }}' + 2>&1 | tee -a {{ docdb_backup_log_file }} + register: _s3_upload_metadata_output + changed_when: true + + - name: "Debug: S3 backup location summary" + debug: + msg: + - "Backup job name ............................ {{ masbr_job_name }}" + - "S3 bucket .................................. {{ docdb_backup_s3_bucket }}" + - "S3 prefix .................................. {{ docdb_s3_job_prefix }}" + - "Dump archive ............................... s3://{{ docdb_backup_s3_bucket }}/{{ docdb_s3_job_prefix }}/{{ masbr_job_name }}-dump.tar.gz" + - "Index archive .............................. s3://{{ docdb_backup_s3_bucket }}/{{ docdb_s3_job_prefix }}/{{ masbr_job_name }}-indexes.tar.gz" + - "Index manifest ............................. s3://{{ docdb_backup_s3_bucket }}/{{ docdb_s3_job_prefix }}/indexes/all-indexes-manifest.json" + - "Backup metadata ............................ s3://{{ docdb_backup_s3_bucket }}/{{ docdb_s3_job_prefix }}/backup-metadata.json" + + rescue: + - name: "Debug: Backup failed" + debug: + msg: "Database backup failed. Check logs for details." + + always: + # Upload backup log to S3 + # ------------------------------------------------------------------------- + - name: "Upload backup log to S3" + when: + - docdb_backup_log_file is defined + - docdb_backup_s3_bucket is defined + - docdb_s3_job_prefix is defined + shell: > + aws s3 cp + '{{ docdb_backup_log_file }}' + 's3://{{ docdb_backup_s3_bucket }}/{{ docdb_s3_job_prefix }}/log/{{ masbr_job_name }}-backup.log' + --region '{{ aws_region }}' + register: _s3_upload_log_output + changed_when: true + ignore_errors: true + + - name: "Debug: backup log uploaded to S3" + when: _s3_upload_log_output is defined and _s3_upload_log_output.rc is defined and _s3_upload_log_output.rc == 0 + debug: + msg: "Backup log uploaded to ..................... s3://{{ docdb_backup_s3_bucket }}/{{ docdb_s3_job_prefix }}/log/{{ masbr_job_name }}-backup.log" diff --git a/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/before-backup-restore.yml b/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/before-backup-restore.yml new file mode 100644 index 0000000000..d6a5378925 --- /dev/null +++ b/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/before-backup-restore.yml @@ -0,0 +1,154 @@ +--- +# Set AWS DocumentDB backup variables +# ----------------------------------------------------------------------------- +- name: "Fail if docdb_cluster_name is not provided" + assert: + that: docdb_cluster_name is defined and docdb_cluster_name != "" + fail_msg: "docdb_cluster_name is required" + +- name: "Fail if aws_region is not provided" + assert: + that: aws_region is defined and aws_region != "" + fail_msg: "aws_region is required" + +- name: "Fail if docdb_backup_s3_bucket is not provided" + assert: + that: docdb_backup_s3_bucket is defined and docdb_backup_s3_bucket != "" + fail_msg: "docdb_backup_s3_bucket is required - specify the S3 bucket name to store backups" + +- name: "Fail if docdb_master_username is not provided" + assert: + that: docdb_master_username is defined and docdb_master_username != "" + fail_msg: "docdb_master_username is required" + +- name: "Fail if docdb_master_password is not provided" + assert: + that: docdb_master_password is defined and docdb_master_password != "" + fail_msg: "docdb_master_password is required" + +# Verify required tools are available +# ----------------------------------------------------------------------------- +- name: "Test if mongosh is installed" + shell: mongosh --version + register: _mongosh_version_output + ignore_errors: true + changed_when: false + +- name: "Fail if mongosh is not installed" + assert: + that: _mongosh_version_output.rc == 0 + fail_msg: "mongosh must be installed (https://www.mongodb.com/docs/mongodb-shell/install/)" + +- name: "Test if mongodump is installed" + shell: mongodump --version + register: _mongodump_version_output + ignore_errors: true + changed_when: false + +- name: "Fail if mongodump is not installed" + assert: + that: _mongodump_version_output.rc == 0 + fail_msg: "mongodump must be installed (https://www.mongodb.com/docs/database-tools/mongodump/)" + +- name: "Test if aws CLI is installed" + shell: aws --version + register: _aws_cli_version_output + ignore_errors: true + changed_when: false + +- name: "Fail if aws CLI is not installed" + assert: + that: _aws_cli_version_output.rc == 0 + fail_msg: "aws CLI must be installed (https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html)" + +# Set local backup working directory +# ----------------------------------------------------------------------------- +- name: "Set fact: local backup working directory" + set_fact: + docdb_local_backup_dir: "{{ masbr_local_temp_folder }}/docdb-backup/{{ masbr_job_name }}" + +- name: "Create local backup working directory" + file: + path: "{{ docdb_local_backup_dir }}/{{ masbr_job_data_type | default('database') }}" + state: directory + mode: "0755" + changed_when: true + +# Download AWS DocumentDB CA certificate +# ----------------------------------------------------------------------------- +- name: "Set fact: DocumentDB CA certificate path" + set_fact: + docdb_ca_file: "/tmp/rds-ca-{{ aws_region }}.pem" + +- name: "Check if region-specific CA certificate already exists" + stat: + path: "{{ docdb_ca_file }}" + register: _docdb_ca_stat + +- name: "Copy region-specific DocumentDB CA certificate from role files" + copy: + src: "providers/aws/ca/root-ca-rsa2048-{{ aws_region }}.pem" + dest: "{{ docdb_ca_file }}" + mode: "0644" + when: not _docdb_ca_stat.stat.exists + register: _docdb_ca_copy_result + ignore_errors: true + +- name: "Download combined AWS RDS CA bundle if region-specific cert not available" + when: not _docdb_ca_stat.stat.exists and _docdb_ca_copy_result is failed + shell: > + wget -q https://s3.amazonaws.com/rds-downloads/rds-combined-ca-bundle.pem + -O {{ docdb_ca_file }} + register: _docdb_ca_download_output + changed_when: true + +- name: "Fail if DocumentDB CA certificate is not available" + stat: + path: "{{ docdb_ca_file }}" + register: _docdb_ca_final_stat + +- name: "Assert DocumentDB CA certificate exists" + assert: + that: _docdb_ca_final_stat.stat.exists + fail_msg: "DocumentDB CA certificate not found at {{ docdb_ca_file }}" + +# Verify connectivity to DocumentDB cluster +# ----------------------------------------------------------------------------- +- name: "Set fact: DocumentDB connection string (primary endpoint)" + set_fact: + docdb_primary_host: "{{ docdb_cluster_endpoint }}:{{ docdb_cluster_port }}" + +- name: "Test connectivity to DocumentDB cluster" + shell: > + mongosh --tls + --host '{{ docdb_primary_host }}' + --tlsCAFile '{{ docdb_ca_file }}' + --username '{{ docdb_master_username }}' + --password '{{ docdb_master_password }}' + --eval 'db.adminCommand({ ping: 1 })' + --quiet + register: _docdb_connect_test + no_log: true + retries: 3 + delay: 10 + until: _docdb_connect_test.rc == 0 + changed_when: false + +- name: "Fail if cannot connect to DocumentDB cluster" + assert: + that: _docdb_connect_test.rc == 0 + fail_msg: "Cannot connect to DocumentDB cluster '{{ docdb_cluster_name }}' at {{ docdb_primary_host }}" + +# Output connection information +# ----------------------------------------------------------------------------- +- name: "Debug: DocumentDB backup connection information" + debug: + msg: + - "DocumentDB Cluster Name .................... {{ docdb_cluster_name }}" + - "DocumentDB Primary Host .................... {{ docdb_primary_host }}" + - "DocumentDB CA Certificate .................. {{ docdb_ca_file }}" + - "DocumentDB Master Username ................. {{ docdb_master_username }}" + - "AWS Region ................................. {{ aws_region }}" + - "S3 Backup Bucket ........................... {{ docdb_backup_s3_bucket }}" + - "S3 Backup Prefix ........................... {{ docdb_backup_s3_prefix | default('docdb-backups') }}" + - "Local Backup Directory ..................... {{ docdb_local_backup_dir }}" diff --git a/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/get-docdb-info.yml b/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/get-docdb-info.yml new file mode 100644 index 0000000000..8ce7ca79da --- /dev/null +++ b/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup-restore/get-docdb-info.yml @@ -0,0 +1,72 @@ +--- +# Get AWS DocumentDB cluster version and status +# ----------------------------------------------------------------------------- +- name: "Fail if docdb_cluster_name is not provided" + assert: + that: docdb_cluster_name is defined and docdb_cluster_name != "" + fail_msg: "docdb_cluster_name is required" + +- name: "Fail if aws_region is not provided" + assert: + that: aws_region is defined and aws_region != "" + fail_msg: "aws_region is required" + +# Verify AWS authentication is valid before making any AWS API calls +# ----------------------------------------------------------------------------- +- name: "Verify AWS authentication (check for expired SSO token)" + command: aws sts get-caller-identity --region '{{ aws_region }}' + register: _aws_auth_check + failed_when: false + changed_when: false + +- name: "Fail if AWS authentication is invalid or SSO token has expired" + assert: + that: _aws_auth_check.rc == 0 + fail_msg: >- + AWS authentication failed (rc={{ _aws_auth_check.rc }}). + {{ _aws_auth_check.stderr | default('') }} + If you are using AWS SSO, your session token may have expired. + Please re-authenticate by running: aws sso login --profile + Then retry the playbook. + +- name: "Get DocumentDB cluster information" + command: > + aws docdb describe-db-clusters + --db-cluster-identifier '{{ docdb_cluster_name }}' + --region '{{ aws_region }}' + register: _docdb_cluster_info_output + failed_when: > + _docdb_cluster_info_output.rc not in [0] and + ('DBClusterNotFoundFault' not in _docdb_cluster_info_output.stderr) + +- name: "Fail if DocumentDB cluster does not exist" + assert: + that: + - _docdb_cluster_info_output.rc == 0 + - _docdb_cluster_info_output.stdout | length > 0 + fail_msg: "DocumentDB cluster '{{ docdb_cluster_name }}' does not exist!" + +- name: "Set fact: DocumentDB cluster info" + set_fact: + _docdb_cluster_data: "{{ _docdb_cluster_info_output.stdout | from_json | json_query('DBClusters[0]') }}" + +- name: "Set fact: DocumentDB version and endpoint" + set_fact: + mongodb_version: "{{ _docdb_cluster_data.EngineVersion }}" + docdb_cluster_endpoint: "{{ _docdb_cluster_data.Endpoint }}" + docdb_cluster_port: "{{ _docdb_cluster_data.Port }}" + docdb_cluster_status: "{{ _docdb_cluster_data.Status }}" + +- name: "Fail if DocumentDB cluster is not available" + assert: + that: docdb_cluster_status == "available" + fail_msg: "DocumentDB cluster '{{ docdb_cluster_name }}' is not available (status: {{ docdb_cluster_status }})!" + +- name: "Debug: DocumentDB cluster information" + debug: + msg: + - "DocumentDB Cluster Name .................... {{ docdb_cluster_name }}" + - "DocumentDB Engine Version .................. {{ mongodb_version }}" + - "DocumentDB Cluster Endpoint ................ {{ docdb_cluster_endpoint }}" + - "DocumentDB Cluster Port .................... {{ docdb_cluster_port }}" + - "DocumentDB Cluster Status .................. {{ docdb_cluster_status }}" diff --git a/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup.yml b/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup.yml new file mode 100644 index 0000000000..cc15880b8f --- /dev/null +++ b/ibm/mas_devops/roles/mongodb/tasks/providers/aws/backup.yml @@ -0,0 +1,100 @@ +--- +# Check required variables for AWS DocumentDB backup +# ----------------------------------------------------------------------------- +- name: "Fail if mas_instance_id is not provided" + assert: + that: mas_instance_id is defined and mas_instance_id != "" + fail_msg: "mas_instance_id is required" + +- name: "Fail if docdb_cluster_name is not provided" + assert: + that: docdb_cluster_name is defined and docdb_cluster_name != "" + fail_msg: "docdb_cluster_name is required" + +- name: "Fail if aws_region is not provided" + assert: + that: aws_region is defined and aws_region != "" + fail_msg: "aws_region is required" + +- name: "Fail if docdb_backup_s3_bucket is not provided" + assert: + that: docdb_backup_s3_bucket is defined and docdb_backup_s3_bucket != "" + fail_msg: "docdb_backup_s3_bucket is required - specify the S3 bucket name to store backups" + +- name: "Fail if docdb_master_username is not provided" + assert: + that: docdb_master_username is defined and docdb_master_username != "" + fail_msg: "docdb_master_username is required" + +- name: "Fail if docdb_master_password is not provided" + assert: + that: docdb_master_password is defined and docdb_master_password != "" + fail_msg: "docdb_master_password is required" + +# Get DocumentDB cluster information +# ------------------------------------------------------------------------- +- name: "Get DocumentDB cluster information" + include_tasks: "{{ role_path }}/tasks/providers/aws/backup-restore/get-docdb-info.yml" + +# Set common backup job variables +# ----------------------------------------------------------------------------- +- name: "Set fact: common backup job variables" + set_fact: + masbr_job_component: + name: "mongodb" + instance: "{{ mas_instance_id }}" + app: "{{ mas_app_id | default('') }}" + namespace: "{{ mongodb_namespace }}" + provider: "{{ mongodb_provider }}" + version: "{{ mongodb_version }}" + masbr_job_data_list: + - seq: "1" + type: "database" + +# Before run tasks +# ------------------------------------------------------------------------- +- name: "Before run tasks" + include_tasks: "{{ role_path }}/../../common_tasks/backup_restore/before_run_tasks.yml" + vars: + _ignore_masbr_backup_data: true + _job_type: "backup" + _component_before_task_path: "{{ role_path }}/tasks/providers/aws/backup-restore/before-backup-restore.yml" + +- name: "Perform DocumentDB backup" + block: + # Update backup job status: New + # ------------------------------------------------------------------------- + - name: "Update backup job status: New" + include_tasks: "{{ role_path }}/../../common_tasks/backup_restore/update_job_status.yml" + vars: + _job_data_list: + - seq: "1" + phase: "New" + + # Run backup tasks for each data type + # ------------------------------------------------------------------------- + - name: "Run backup tasks for each data type" + include_tasks: "tasks/providers/aws/backup-restore/backup-{{ job_data_item.type }}.yml" + vars: + masbr_job_data_seq: "{{ job_data_item.seq }}" + masbr_job_data_type: "{{ job_data_item.type }}" + loop: "{{ masbr_job_data_list }}" + loop_control: + loop_var: job_data_item + + rescue: + # Update backup status: Failed + # ------------------------------------------------------------------------- + - name: "Update database backup status: Failed" + include_tasks: "{{ role_path }}/../../common_tasks/backup_restore/update_job_status.yml" + vars: + _job_status: + phase: "Failed" + + always: + # After run tasks + # ------------------------------------------------------------------------- + - name: "After run tasks" + include_tasks: "{{ role_path }}/../../common_tasks/backup_restore/after_run_tasks.yml" + vars: + _component_after_task_path: "{{ role_path }}/tasks/providers/aws/backup-restore/after-backup-restore.yml"