backup.sh 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356
  1. #!/bin/bash
  2. # Author: yeho <lj2007331 AT gmail.com>
  3. # BLOG: https://linuxeye.com
  4. #
  5. # Notes: OneinStack for CentOS/RedHat 6+ Debian 8+ and Ubuntu 14+
  6. #
  7. # Project home page:
  8. # https://oneinstack.com
  9. # https://github.com/oneinstack/oneinstack
  10. # Check if user is root
  11. [ $(id -u) != "0" ] && { echo "${CFAILURE}Error: You must be root to run this script${CEND}"; exit 1; }
  12. oneinstack_dir=$(dirname "`readlink -f $0`")
  13. pushd ${oneinstack_dir}/tools > /dev/null
  14. . ../options.conf
  15. [ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}
  16. DB_Local_BK() {
  17. for D in `echo ${db_name} | tr ',' ' '`
  18. do
  19. ./db_bk.sh ${D}
  20. done
  21. }
  22. DB_Remote_BK() {
  23. for D in `echo ${db_name} | tr ',' ' '`
  24. do
  25. ./db_bk.sh ${D}
  26. DB_GREP="DB_${D}_`date +%Y%m%d`"
  27. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  28. echo "file:::${backup_dir}/${DB_FILE} ${backup_dir} push" >> config_backup.txt
  29. echo "com:::[ -e "${backup_dir}/${DB_FILE}" ] && rm -rf ${backup_dir}/DB_${D}_$(date +%Y%m%d --date="${expired_days} days ago")_*.tgz" >> config_backup.txt
  30. done
  31. }
  32. DB_OSS_BK() {
  33. for D in `echo ${db_name} | tr ',' ' '`
  34. do
  35. ./db_bk.sh ${D}
  36. DB_GREP="DB_${D}_`date +%Y%m%d`"
  37. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  38. /usr/local/bin/ossutil cp -f ${backup_dir}/${DB_FILE} oss://${oss_bucket}/`date +%F`/${DB_FILE}
  39. if [ $? -eq 0 ]; then
  40. /usr/local/bin/ossutil rm -rf oss://${oss_bucket}/`date +%F --date="${expired_days} days ago"`/
  41. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  42. fi
  43. done
  44. }
  45. DB_COS_BK() {
  46. for D in `echo ${db_name} | tr ',' ' '`
  47. do
  48. ./db_bk.sh ${D}
  49. DB_GREP="DB_${D}_`date +%Y%m%d`"
  50. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  51. ${python_install_dir}/bin/coscmd upload ${backup_dir}/${DB_FILE} /`date +%F`/${DB_FILE}
  52. if [ $? -eq 0 ]; then
  53. ${python_install_dir}/bin/coscmd delete -r -f `date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  54. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  55. fi
  56. done
  57. }
  58. DB_UPYUN_BK() {
  59. for D in `echo ${db_name} | tr ',' ' '`
  60. do
  61. ./db_bk.sh ${D}
  62. DB_GREP="DB_${D}_`date +%Y%m%d`"
  63. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  64. /usr/local/bin/upx put ${backup_dir}/${DB_FILE} /`date +%F`/${DB_FILE}
  65. if [ $? -eq 0 ]; then
  66. /usr/local/bin/upx rm -a `date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  67. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  68. fi
  69. done
  70. }
  71. DB_QINIU_BK() {
  72. for D in `echo ${db_name} | tr ',' ' '`
  73. do
  74. ./db_bk.sh ${D}
  75. DB_GREP="DB_${D}_`date +%Y%m%d`"
  76. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  77. /usr/local/bin/qshell rput ${qiniu_bucket} /`date +%F`/${DB_FILE} ${backup_dir}/${DB_FILE}
  78. if [ $? -eq 0 ]; then
  79. /usr/local/bin/qshell listbucket ${qiniu_bucket} /`date +%F --date="${expired_days} days ago"` /tmp/qiniu.txt > /dev/null 2>&1
  80. /usr/local/bin/qshell batchdelete -force ${qiniu_bucket} /tmp/qiniu.txt > /dev/null 2>&1
  81. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  82. rm -f /tmp/qiniu.txt
  83. fi
  84. done
  85. }
  86. DB_S3_BK() {
  87. for D in `echo ${db_name} | tr ',' ' '`
  88. do
  89. ./db_bk.sh ${D}
  90. DB_GREP="DB_${D}_`date +%Y%m%d`"
  91. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  92. ${python_install_dir}/bin/s3cmd put ${backup_dir}/${DB_FILE} s3://${s3_bucket}/`date +%F`/${DB_FILE}
  93. if [ $? -eq 0 ]; then
  94. ${python_install_dir}/bin/s3cmd rm -r s3://${s3_bucket}/`date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  95. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  96. fi
  97. done
  98. }
  99. DB_GDRIVE_BK() {
  100. # get the IP information
  101. IPADDR=$(../include/get_ipaddr.py)
  102. IPADDR=${IPADDR:-127.0.0.1}
  103. Parent_root_id=$(/usr/local/bin/gdrive list --no-header -q "trashed = false and name = '${IPADDR}'" | awk '{print $1}' | head -1)
  104. [ -z "${Parent_root_id}" ] && sleep 60 && Parent_root_id=$(/usr/local/bin/gdrive mkdir ${IPADDR} | awk '{print $2}')
  105. sleep 60
  106. Parent_sub_id=$(/usr/local/bin/gdrive list --no-header -q "'${Parent_root_id}' in parents and trashed = false and name = '`date +%F`'" | awk '{print $1}' | head -1)
  107. [ -z "${Parent_sub_id}" ] && sleep 60 && Parent_sub_id=$(/usr/local/bin/gdrive mkdir -p ${Parent_root_id} `date +%F` | awk '{print $2}')
  108. sleep 60
  109. for D in `echo ${db_name} | tr ',' ' '`
  110. do
  111. ./db_bk.sh ${D}
  112. DB_GREP="DB_${D}_`date +%Y%m%d`"
  113. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  114. /usr/local/bin/gdrive upload -p ${Parent_sub_id} ${backup_dir}/${DB_FILE}
  115. sleep 120
  116. if [ $? -eq 0 ]; then
  117. Parent_expired_id=$(/usr/local/bin/gdrive list --no-header -q "'${Parent_root_id}' in parents and trashed = false and name = '`date +%F --date="${expired_days} days ago"`'" | awk '{print $1}' | head -1)
  118. [ -n "${Parent_expired_id}" ] && sleep 60 && /usr/local/bin/gdrive delete -r ${Parent_expired_id} > /dev/null 2>&1
  119. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  120. sleep 60
  121. fi
  122. done
  123. }
  124. DB_DROPBOX_BK() {
  125. for D in `echo ${db_name} | tr ',' ' '`
  126. do
  127. ./db_bk.sh ${D}
  128. DB_GREP="DB_${D}_`date +%Y%m%d`"
  129. DB_FILE=`ls -lrt ${backup_dir} | grep ${DB_GREP} | tail -1 | awk '{print $NF}'`
  130. /usr/local/bin/dbxcli put ${backup_dir}/${DB_FILE} `date +%F`/${DB_FILE}
  131. if [ $? -eq 0 ]; then
  132. /usr/local/bin/dbxcli rm -f `date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  133. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${backup_dir}/${DB_FILE}
  134. fi
  135. done
  136. }
  137. WEB_LOCAL_BK() {
  138. for W in `echo ${website_name} | tr ',' ' '`
  139. do
  140. ./website_bk.sh $W
  141. done
  142. }
  143. WEB_Remote_BK() {
  144. for W in `echo ${website_name} | tr ',' ' '`
  145. do
  146. if [ `du -sm "${wwwroot_dir}/${WebSite}" | awk '{print $1}'` -lt 2048 ]; then
  147. ./website_bk.sh $W
  148. Web_GREP="Web_${W}_`date +%Y%m%d`"
  149. Web_FILE=`ls -lrt ${backup_dir} | grep ${Web_GREP} | tail -1 | awk '{print $NF}'`
  150. echo "file:::${backup_dir}/${Web_FILE} ${backup_dir} push" >> config_backup.txt
  151. echo "com:::[ -e "${backup_dir}/${Web_FILE}" ] && rm -rf ${backup_dir}/Web_${W}_$(date +%Y%m%d --date="${expired_days} days ago")_*.tgz" >> config_backup.txt
  152. else
  153. echo "file:::${wwwroot_dir}/$W ${backup_dir} push" >> config_backup.txt
  154. fi
  155. done
  156. }
  157. WEB_OSS_BK() {
  158. for W in `echo $website_name | tr ',' ' '`
  159. do
  160. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  161. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  162. if [ ! -e "${PUSH_FILE}" ]; then
  163. pushd ${wwwroot_dir} > /dev/null
  164. tar czf ${PUSH_FILE} ./$W
  165. popd > /dev/null
  166. fi
  167. /usr/local/bin/ossutil cp -f ${PUSH_FILE} oss://${oss_bucket}/`date +%F`/${PUSH_FILE##*/}
  168. if [ $? -eq 0 ]; then
  169. /usr/local/bin/ossutil rm -rf oss://${oss_bucket}/`date +%F --date="${expired_days} days ago"`/
  170. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  171. fi
  172. done
  173. }
  174. WEB_COS_BK() {
  175. for W in `echo ${website_name} | tr ',' ' '`
  176. do
  177. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  178. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  179. if [ ! -e "${PUSH_FILE}" ]; then
  180. pushd ${wwwroot_dir} > /dev/null
  181. tar czf ${PUSH_FILE} ./$W
  182. popd > /dev/null
  183. fi
  184. ${python_install_dir}/bin/coscmd upload ${PUSH_FILE} /`date +%F`/${PUSH_FILE##*/}
  185. if [ $? -eq 0 ]; then
  186. ${python_install_dir}/bin/coscmd delete -r -f `date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  187. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  188. fi
  189. done
  190. }
  191. WEB_UPYUN_BK() {
  192. for W in `echo ${website_name} | tr ',' ' '`
  193. do
  194. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  195. [ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}
  196. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  197. if [ ! -e "${PUSH_FILE}" ]; then
  198. pushd ${wwwroot_dir} > /dev/null
  199. tar czf ${PUSH_FILE} ./$W
  200. popd > /dev/null
  201. fi
  202. /usr/local/bin/upx put ${PUSH_FILE} /`date +%F`/${PUSH_FILE##*/}
  203. if [ $? -eq 0 ]; then
  204. /usr/local/bin/upx rm -a `date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  205. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  206. fi
  207. done
  208. }
  209. WEB_QINIU_BK() {
  210. for W in `echo ${website_name} | tr ',' ' '`
  211. do
  212. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  213. [ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}
  214. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  215. if [ ! -e "${PUSH_FILE}" ]; then
  216. pushd ${wwwroot_dir} > /dev/null
  217. tar czf ${PUSH_FILE} ./$W
  218. popd > /dev/null
  219. fi
  220. /usr/local/bin/qshell rput ${qiniu_bucket} /`date +%F`/${PUSH_FILE##*/} ${PUSH_FILE}
  221. if [ $? -eq 0 ]; then
  222. /usr/local/bin/qshell listbucket ${qiniu_bucket} /`date +%F --date="${expired_days} days ago"` /tmp/qiniu.txt > /dev/null 2>&1
  223. /usr/local/bin/qshell batchdelete -force ${qiniu_bucket} /tmp/qiniu.txt > /dev/null 2>&1
  224. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  225. rm -f /tmp/qiniu.txt
  226. fi
  227. done
  228. }
  229. WEB_S3_BK() {
  230. for W in `echo ${website_name} | tr ',' ' '`
  231. do
  232. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  233. [ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}
  234. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  235. if [ ! -e "${PUSH_FILE}" ]; then
  236. pushd ${wwwroot_dir} > /dev/null
  237. tar czf ${PUSH_FILE} ./$W
  238. popd > /dev/null
  239. fi
  240. ${python_install_dir}/bin/s3cmd put ${PUSH_FILE} s3://${s3_bucket}/`date +%F`/${PUSH_FILE##*/}
  241. if [ $? -eq 0 ]; then
  242. ${python_install_dir}/bin/s3cmd rm -r s3://${s3_bucket}/`date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  243. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  244. fi
  245. done
  246. }
  247. WEB_GDRIVE_BK() {
  248. # get the IP information
  249. IPADDR=$(../include/get_ipaddr.py)
  250. IPADDR=${IPADDR:-127.0.0.1}
  251. Parent_root_id=$(/usr/local/bin/gdrive list --no-header -q "trashed = false and name = '${IPADDR}'" | awk '{print $1}' | head -1)
  252. [ -z "${Parent_root_id}" ] && sleep 60 && Parent_root_id=$(/usr/local/bin/gdrive mkdir ${IPADDR} | awk '{print $2}')
  253. sleep 60
  254. Parent_sub_id=$(/usr/local/bin/gdrive list --no-header -q "'${Parent_root_id}' in parents and trashed = false and name = '`date +%F`'" | awk '{print $1}' | head -1)
  255. [ -z "${Parent_sub_id}" ] && sleep 60 && Parent_sub_id=$(/usr/local/bin/gdrive mkdir -p ${Parent_root_id} `date +%F` | awk '{print $2}')
  256. sleep 60
  257. for W in `echo ${website_name} | tr ',' ' '`
  258. do
  259. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  260. [ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}
  261. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  262. if [ ! -e "${PUSH_FILE}" ]; then
  263. pushd ${wwwroot_dir} > /dev/null
  264. tar czf ${PUSH_FILE} ./$W
  265. popd > /dev/null
  266. fi
  267. /usr/local/bin/gdrive upload -p ${Parent_sub_id} ${PUSH_FILE}
  268. sleep 120
  269. if [ $? -eq 0 ]; then
  270. Parent_expired_id=$(/usr/local/bin/gdrive list --no-header -q "'${Parent_root_id}' in parents and trashed = false and name = '`date +%F --date="${expired_days} days ago"`'" | awk '{print $1}' | head -1)
  271. [ -n "${Parent_expired_id}" ] && sleep 60 && /usr/local/bin/gdrive delete -r ${Parent_expired_id} > /dev/null 2>&1
  272. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  273. sleep 60
  274. fi
  275. done
  276. }
  277. WEB_DROPBOX_BK() {
  278. for W in `echo ${website_name} | tr ',' ' '`
  279. do
  280. [ ! -e "${wwwroot_dir}/${WebSite}" ] && { echo "[${wwwroot_dir}/${WebSite}] not exist"; break; }
  281. [ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}
  282. PUSH_FILE="${backup_dir}/Web_${W}_$(date +%Y%m%d_%H).tgz"
  283. if [ ! -e "${PUSH_FILE}" ]; then
  284. pushd ${wwwroot_dir} > /dev/null
  285. tar czf ${PUSH_FILE} ./$W
  286. popd > /dev/null
  287. fi
  288. /usr/local/bin/dbxcli put ${PUSH_FILE} `date +%F`/${PUSH_FILE##*/}
  289. if [ $? -eq 0 ]; then
  290. /usr/local/bin/dbxcli rm -f `date +%F --date="${expired_days} days ago"` > /dev/null 2>&1
  291. [ -z "`echo ${backup_destination} | grep -ow 'local'`" ] && rm -f ${PUSH_FILE}
  292. fi
  293. done
  294. }
  295. for DEST in `echo ${backup_destination} | tr ',' ' '`
  296. do
  297. if [ "${DEST}" == 'local' ]; then
  298. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_Local_BK
  299. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_LOCAL_BK
  300. fi
  301. if [ "${DEST}" == 'remote' ]; then
  302. echo "com:::[ ! -e "${backup_dir}" ] && mkdir -p ${backup_dir}" > config_backup.txt
  303. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_Remote_BK
  304. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_Remote_BK
  305. ./mabs.sh -c config_backup.txt -T -1 | tee -a mabs.log
  306. fi
  307. if [ "${DEST}" == 'oss' ]; then
  308. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_OSS_BK
  309. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_OSS_BK
  310. fi
  311. if [ "${DEST}" == 'cos' ]; then
  312. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_COS_BK
  313. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_COS_BK
  314. fi
  315. if [ "${DEST}" == 'upyun' ]; then
  316. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_UPYUN_BK
  317. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_UPYUN_BK
  318. fi
  319. if [ "${DEST}" == 'qiniu' ]; then
  320. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_QINIU_BK
  321. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_QINIU_BK
  322. fi
  323. if [ "${DEST}" == 's3' ]; then
  324. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_S3_BK
  325. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_S3_BK
  326. fi
  327. if [ "${DEST}" == 'gdrive' ]; then
  328. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_GDRIVE_BK
  329. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_GDRIVE_BK
  330. fi
  331. if [ "${DEST}" == 'dropbox' ]; then
  332. [ -n "`echo ${backup_content} | grep -ow db`" ] && DB_DROPBOX_BK
  333. [ -n "`echo ${backup_content} | grep -ow web`" ] && WEB_DROPBOX_BK
  334. fi
  335. done