backup.sh 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297
  1. #!/bin/bash
  2. # Author: yeho <lj2007331 AT gmail.com>
  3. # BLOG: https://linuxeye.com
  4. #
  5. # Notes: OneinStack for CentOS/RedHat 7+ Debian 9+ and Ubuntu 16+
  6. #
  7. # Project home page:
  8. # https://oneinstack.com
  9. # https://github.com/oneinstack/oneinstack
  10. export PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin
  11. # Check if user is root
  12. [ $(id -u) != "0" ] && { echo "${CFAILURE}Error: You must be root to run this script${CEND}"; exit 1; }
  13. oneinstack_dir=$(dirname "`readlink -f $0`")
  14. pushd ${oneinstack_dir}/tools > /dev/null
  15. . ../options.conf
  16. [ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}
  17. DB_Local_BK() {
  18. for D in `echo ${db_name} | tr ',' ' '`
  19. do
  20. ./db_bk.sh ${D}
  21. done
  22. }
  23. DB_Remote_BK() {
  24. for D in `echo ${db_name} | tr ',' ' '`
  25. do
  26. ./db_bk.sh ${D}
  27. DB_GREP="DB_${D}_`date +%Y%m%d`"
  28. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  29. echo "file:::${backup_dir}/${DB_FILE} ${backup_dir} push" >> config_backup.txt
  30. echo "com:::[ -e "${backup_dir}/${DB_FILE}" ] && rm -rf ${backup_dir}/DB_${D}_$(date +%Y%m%d --date="${expired_days} days ago")_*.tgz" >> config_backup.txt
  31. done
  32. }
  33. DB_OSS_BK() {
  34. for D in `echo ${db_name} | tr ',' ' '`
  35. do
  36. ./db_bk.sh ${D}
  37. DB_GREP="DB_${D}_`date +%Y%m%d`"
  38. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  39. ossutil cp -f ${backup_dir}/${DB_FILE} oss://${oss_bucket}/`date +%F`/${DB_FILE}
  40. if [ $? -eq 0 ]; then
  41. ossutil rm -rf oss://${oss_bucket}/`date +%F --date="${expired_days} days ago"`/
  42. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  43. fi
  44. done
  45. }
  46. DB_COS_BK() {
  47. for D in `echo ${db_name} | tr ',' ' '`
  48. do
  49. ./db_bk.sh ${D}
  50. DB_GREP="DB_${D}_`date +%Y%m%d`"
  51. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  52. coscli sync ${backup_dir}/${DB_FILE} cos://${cos_bucket}/`date +%F`/${DB_FILE}
  53. if [ $? -eq 0 ]; then
  54. coscli rm -rf cos://${cos_bucket}/`date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  55. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  56. fi
  57. done
  58. }
  59. DB_UPYUN_BK() {
  60. for D in `echo ${db_name} | tr ',' ' '`
  61. do
  62. ./db_bk.sh ${D}
  63. DB_GREP="DB_${D}_`date +%Y%m%d`"
  64. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  65. upx put ${backup_dir}/${DB_FILE} /`date +%F`/${DB_FILE}
  66. if [ $? -eq 0 ]; then
  67. upx rm -a `date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  68. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  69. fi
  70. done
  71. }
  72. DB_QINIU_BK() {
  73. for D in `echo ${db_name} | tr ',' ' '`
  74. do
  75. ./db_bk.sh ${D}
  76. DB_GREP="DB_${D}_`date +%Y%m%d`"
  77. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  78. qshell rput ${qiniu_bucket} /`date +%F`/${DB_FILE} ${backup_dir}/${DB_FILE}
  79. if [ $? -eq 0 ]; then
  80. qshell listbucket ${qiniu_bucket} /`date +%F --date="${expired_days} days ago"` /tmp/qiniu.txt > /dev/null 2>&1
  81. qshell batchdelete -force ${qiniu_bucket} /tmp/qiniu.txt > /dev/null 2>&1
  82. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  83. rm -f /tmp/qiniu.txt
  84. fi
  85. done
  86. }
  87. DB_S3_BK() {
  88. for D in `echo ${db_name} | tr ',' ' '`
  89. do
  90. ./db_bk.sh ${D}
  91. DB_GREP="DB_${D}_`date +%Y%m%d`"
  92. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  93. aws s3 sync ${backup_dir}/${DB_FILE} s3://${s3_bucket}/`date +%F`/${DB_FILE}
  94. if [ $? -eq 0 ]; then
  95. aws s3 rm -r s3://${s3_bucket}/`date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  96. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  97. fi
  98. done
  99. }
  100. DB_DROPBOX_BK() {
  101. for D in `echo ${db_name} | tr ',' ' '`
  102. do
  103. ./db_bk.sh ${D}
  104. DB_GREP="DB_${D}_`date +%Y%m%d`"
  105. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  106. dbxcli put ${backup_dir}/${DB_FILE} `date +%F`/${DB_FILE}
  107. if [ $? -eq 0 ]; then
  108. dbxcli rm -f `date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  109. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  110. fi
  111. done
  112. }
  113. WEB_LOCAL_BK() {
  114. for W in `echo ${website_name} | tr ',' ' '`
  115. do
  116. ./website_bk.sh $W
  117. done
  118. }
  119. WEB_Remote_BK() {
  120. for W in `echo ${website_name} | tr ',' ' '`
  121. do
  122. if [ `du -sm "${wwwroot_dir}/${WebSite}" | awk '{print $1}'` -lt 2048 ]; then
  123. ./website_bk.sh $W
  124. Web_GREP="Web_${W}_`date +%Y%m%d`"
  125. Web_FILE=`ls -lrt ${backup_dir} | grep ${Web_GREP} | tail -1 | awk '{print $NF}'`
  126. echo "file:::${backup_dir}/${Web_FILE} ${backup_dir} push" >> config_backup.txt
  127. echo "com:::[ -e "${backup_dir}/${Web_FILE}" ] && rm -rf ${backup_dir}/Web_${W}_$(date +%Y%m%d --date="${expired_days} days ago")_*.tgz" >> config_backup.txt
  128. else
  129. echo "file:::${wwwroot_dir}/$W ${backup_dir} push" >> config_backup.txt
  130. fi
  131. done
  132. }
  133. WEB_OSS_BK() {
  134. for W in `echo $website_name | tr ',' ' '`
  135. do
  136. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  137. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  138. if [ ! -e "${PUSH_FILE}" ]; then
  139. pushd ${wwwroot_dir} > /dev/null
  140. tar czf ${PUSH_FILE} ./$W
  141. popd > /dev/null
  142. fi
  143. ossutil cp -f ${PUSH_FILE} oss://${oss_bucket}/`date +%F`/${PUSH_FILE##*/}
  144. if [ $? -eq 0 ]; then
  145. ossutil rm -rf oss://${oss_bucket}/`date +%F --date="${expired_days} days ago"`/
  146. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  147. fi
  148. done
  149. }
  150. WEB_COS_BK() {
  151. for W in `echo ${website_name} | tr ',' ' '`
  152. do
  153. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  154. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  155. if [ ! -e "${PUSH_FILE}" ]; then
  156. pushd ${wwwroot_dir} > /dev/null
  157. tar czf ${PUSH_FILE} ./$W
  158. popd > /dev/null
  159. fi
  160. coscli sync ${PUSH_FILE} cos://${cos_bucket}/`date +%F`/${PUSH_FILE##*/}
  161. if [ $? -eq 0 ]; then
  162. coscli rm -rf cos://${cos_bucket}/`date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  163. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  164. fi
  165. done
  166. }
  167. WEB_UPYUN_BK() {
  168. for W in `echo ${website_name} | tr ',' ' '`
  169. do
  170. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  171. [ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}
  172. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  173. if [ ! -e "${PUSH_FILE}" ]; then
  174. pushd ${wwwroot_dir} > /dev/null
  175. tar czf ${PUSH_FILE} ./$W
  176. popd > /dev/null
  177. fi
  178. upx put ${PUSH_FILE} /`date +%F`/${PUSH_FILE##*/}
  179. if [ $? -eq 0 ]; then
  180. upx rm -a `date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  181. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  182. fi
  183. done
  184. }
  185. WEB_QINIU_BK() {
  186. for W in `echo ${website_name} | tr ',' ' '`
  187. do
  188. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  189. [ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}
  190. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  191. if [ ! -e "${PUSH_FILE}" ]; then
  192. pushd ${wwwroot_dir} > /dev/null
  193. tar czf ${PUSH_FILE} ./$W
  194. popd > /dev/null
  195. fi
  196. qshell rput ${qiniu_bucket} /`date +%F`/${PUSH_FILE##*/} ${PUSH_FILE}
  197. if [ $? -eq 0 ]; then
  198. qshell listbucket ${qiniu_bucket} /`date +%F --date="${expired_days} days ago"` /tmp/qiniu.txt > /dev/null 2>&1
  199. qshell batchdelete -force ${qiniu_bucket} /tmp/qiniu.txt > /dev/null 2>&1
  200. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  201. rm -f /tmp/qiniu.txt
  202. fi
  203. done
  204. }
  205. WEB_S3_BK() {
  206. for W in `echo ${website_name} | tr ',' ' '`
  207. do
  208. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  209. [ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}
  210. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  211. if [ ! -e "${PUSH_FILE}" ]; then
  212. pushd ${wwwroot_dir} > /dev/null
  213. tar czf ${PUSH_FILE} ./$W
  214. popd > /dev/null
  215. fi
  216. aws s3 sync ${PUSH_FILE} s3://${s3_bucket}/`date +%F`/${PUSH_FILE##*/}
  217. if [ $? -eq 0 ]; then
  218. aws s3 rm -r s3://${s3_bucket}/`date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  219. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  220. fi
  221. done
  222. }
  223. WEB_DROPBOX_BK() {
  224. for W in `echo ${website_name} | tr ',' ' '`
  225. do
  226. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  227. [ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}
  228. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  229. if [ ! -e "${PUSH_FILE}" ]; then
  230. pushd ${wwwroot_dir} > /dev/null
  231. tar czf ${PUSH_FILE} ./$W
  232. popd > /dev/null
  233. fi
  234. dbxcli put ${PUSH_FILE} `date +%F`/${PUSH_FILE##*/}
  235. if [ $? -eq 0 ]; then
  236. dbxcli rm -f `date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  237. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  238. fi
  239. done
  240. }
  241. for DEST in `echo ${backup_destination} | tr ',' ' '`
  242. do
  243. if [ "${DEST}" == 'local' ]; then
  244. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_Local_BK
  245. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_LOCAL_BK
  246. fi
  247. if [ "${DEST}" == 'remote' ]; then
  248. echo "com:::[ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}" > config_backup.txt
  249. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_Remote_BK
  250. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_Remote_BK
  251. ./mabs.sh -c config_backup.txt -T -1 | tee -a mabs.log
  252. fi
  253. if [ "${DEST}" == 'oss' ]; then
  254. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_OSS_BK
  255. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_OSS_BK
  256. fi
  257. if [ "${DEST}" == 'cos' ]; then
  258. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_COS_BK
  259. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_COS_BK
  260. fi
  261. if [ "${DEST}" == 'upyun' ]; then
  262. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_UPYUN_BK
  263. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_UPYUN_BK
  264. fi
  265. if [ "${DEST}" == 'qiniu' ]; then
  266. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_QINIU_BK
  267. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_QINIU_BK
  268. fi
  269. if [ "${DEST}" == 's3' ]; then
  270. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_S3_BK
  271. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_S3_BK
  272. fi
  273. if [ "${DEST}" == 'dropbox' ]; then
  274. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_DROPBOX_BK
  275. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_DROPBOX_BK
  276. fi
  277. done