-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path_database.sh
More file actions
executable file
·148 lines (115 loc) · 5.18 KB
/
_database.sh
File metadata and controls
executable file
·148 lines (115 loc) · 5.18 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
#!/bin/bash
export SHELL=$(type -p bash)
function perform_full_oracle_backup() {
# Create the appropriate directory in Oracle
# See https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.Oracle.CommonDBATasks.Misc.html
# for how to create a directory in RDS Oracle
sqlplus "${DB_USERNAME}"/"${DB_PASSWORD}"@"${DB_HOSTNAME}/${DB_ORACLE_SERVICE_NAME}" <<EOF
CREATE or REPLACE DIRECTORY cloud_dpump_dir as '${DB_STAGING_DIR}';
GRANT READ, WRITE ON DIRECTORY cloud_dpump_dir TO ${DB_USERNAME};
EOF
# Export full database into proprietary Oracle format
# See:
# https://docs.oracle.com/database/121/SUTIL/GUID-1E134053-692A-4386-BB77-153CB4A6071A.htm#SUTIL887
# https://stackoverflow.com/questions/16415120/exp-command-accepts-host-and-port-to-export-remote-db-tables
# https://aws.amazon.com/premiumsupport/knowledge-center/rds-oracle-instant-client-datapump/
expdp "${DB_USERNAME}"/"${DB_PASSWORD}"@"${DB_HOSTNAME}/${DB_ORACLE_SERVICE_NAME}" FULL=YES \
DUMPFILE=cloud_dpump_dir:exp_full_%U.dmp FILESIZE=4G PARALLEL="${PARALLEL_PROCESSES}" \
LOGFILE=cloud_dpump_dir:exp_full.log JOB_NAME=exp_full
# REUSE_DUMPFILES=YES
# Upload dmp files in parallel
# TODO: execute this remotely if on a different host
ls $DB_STAGING_DIR | grep -E '\.dmp$' | parallel -j "${PARALLEL_PROCESSES}" cp_to_cloud "$DB_STAGING_DIR" {}
if [ ! -z "${DB_STAGING_DIR}" ]
then
rm -r ${DB_STAGING_DIR}/*
fi
}
function perform_postgres_table_backup() {
set -euo pipefail
full_db_staging_dir=$1
database=$2
process=$3
if [ "${DB_ENABLE_PLAIN_BACKUPS}" = "yes" ]
then
echo "Plain backup of $database (process ${process})"
set -o pipefail
if ! pg_dump -Fp -h "$DB_HOSTNAME" -U "$DB_USERNAME" "$database" | gzip > "$full_db_staging_dir""$database".sql.gz.in_progress; then
echo "${RED}[!!ERROR!!] Failed to produce plain backup database ${database}${NO_COLOR}" 1>&2
else
mv "$full_db_staging_dir""$database".sql.gz.in_progress "$full_db_staging_dir""$database".sql.gz
cp_to_cloud "$full_db_staging_dir" "${database}.sql.gz"
fi
set +o pipefail
fi
if [ "$DB_ENABLE_CUSTOM_BACKUPS" = "yes" ]
then
echo "Custom backup of $database (process ${process})"
if ! pg_dump -Fc -h "$DB_HOSTNAME" -U "$DB_USERNAME" "$database" -f "$full_db_staging_dir""$database".custom.in_progress; then
echo "${RED}[!!ERROR!!] Failed to produce custom backup database ${database}${NO_COLOR}"
else
mv "$full_db_staging_dir""$database".custom.in_progress "$full_db_staging_dir""$database".custom
cp_to_cloud "$full_db_staging_dir" "${database}.custom"
fi
fi
}
export -f perform_postgres_table_backup
export DB_ENABLE_PLAIN_BACKUPS
export DB_ENABLE_CUSTOM_BACKUPS
export DB_HOSTNAME
export DB_USERNAME
function perform_backups() {
###########################
#### START THE BACKUPS ####
###########################
if [ "${DB_TYPE}" = "oracle" ]; then
echo -e "${GREEN}${BOLD}\n\nPerforming full backups"
echo -e "--------------------------------------------\n${NORMAL}${NO_COLOR}"
perform_full_oracle_backup
if [ ! -z "${DB_STAGING_DIR}" ]
then
rm -r ${DB_STAGING_DIR}/*
fi
elif [ "${DB_TYPE}" = "postgres" ]; then
suffix="${DB_TODAY}/"
full_db_staging_dir="${DB_STAGING_DIR}${suffix}"
echo "Making backup directory in $full_db_staging_dir"
if ! mkdir -p "$full_db_staging_dir"; then
echo "Cannot create backup directory in $full_db_staging_dir. Go and fix it!" 1>&2
exit 1;
fi;
#######################
### GLOBALS BACKUPS ###
#######################
echo -e "${GREEN}${BOLD}\n\nPerforming globals backup"
echo -e "--------------------------------------------\n${NORMAL}${NO_COLOR}"
if [ "$DB_ENABLE_GLOBALS_BACKUPS" = "yes" ]
then
echo "Globals backup"
set -o pipefail
if ! pg_dumpall -g -h "$DB_HOSTNAME" -U "$DB_USERNAME" | gzip > "$full_db_staging_dir""globals".sql.gz.in_progress; then
echo "[!!ERROR!!] Failed to produce globals backup" 1>&2
else
mv "$full_db_staging_dir""globals".sql.gz.in_progress "$full_db_staging_dir""globals".sql.gz
cp_to_cloud "$full_db_staging_dir" "globals.sql.gz"
fi
set +o pipefail
else
echo "None"
fi
###########################
###### FULL BACKUPS #######
###########################
full_backup_query="select datname from pg_database where not datistemplate and datallowconn order by datname;"
echo -e "${GREEN}${BOLD}\n\nPerforming full backups"
echo -e "--------------------------------------------\n${NORMAL}${NO_COLOR}"
databases=$(psql -h "$DB_HOSTNAME" -U "$DB_USERNAME" -At -c "$full_backup_query" postgres)
# Backup using GNU Parallel
printf '%s\n' "$databases" | parallel -j "${PARALLEL_PROCESSES}" perform_postgres_table_backup "$full_db_staging_dir" {} {%}
if [ ! -z "${full_db_staging_dir}" ]
then
rm -r ${full_db_staging_dir}/*
fi
fi
echo -e "${GREEN}${BOLD}\n${CELEBRATE} All database backups complete!${NORMAL}${NO_COLOR}"
}