lj2007331@gmail.com 8 rokov pred
rodič
commit
a495d006e4

+ 17 - 16
backup_setup.sh

@@ -34,9 +34,9 @@ while :; do echo
   echo -e "\t${CMSG}4${CEND}. Localhost and Remote host"
   echo -e "\t${CMSG}5${CEND}. Localhost and Qcloud COS"
   echo -e "\t${CMSG}6${CEND}. Remote host and Qcloud COS"
-  read -p "Please input a number:(Default 1 press Enter) " DESC_BK
+  read -p "Please input a number:(Default 1 press Enter) " DESC_BK 
   [ -z "$DESC_BK" ] && DESC_BK=1
-  if [[ ! $DESC_BK =~ ^[1-6]$ ]]; then
+  if [[ ! $DESC_BK =~ ^[1-6]$ ]];then 
     echo "${CWARNING}input error! Please only input number 1,2,3,4,5,6${CEND}"
   else
     break
@@ -57,7 +57,7 @@ while :; do echo
   echo -e "\t${CMSG}3${CEND}. Database and Website"
   read -p "Please input a number:(Default 1 press Enter) " CONTENT_BK
   [ -z "$CONTENT_BK" ] && CONTENT_BK=1
-  if [[ ! $CONTENT_BK =~ ^[1-3]$ ]]; then
+  if [[ ! $CONTENT_BK =~ ^[1-3]$ ]];then 
     echo "${CWARNING}input error! Please only input number 1,2,3${CEND}"
   else
     break
@@ -68,10 +68,10 @@ done
 [ "$CONTENT_BK" == '2' ] && sed -i 's@^backup_content=.*@backup_content=web@' ./options.conf
 [ "$CONTENT_BK" == '3' ] && sed -i 's@^backup_content=.*@backup_content=db,web@' ./options.conf
 
-if [ "$DESC_BK" != '3' ]; then
+if [ "$DESC_BK" != '3' ];then 
   while :; do echo
     echo "Please enter the directory for save the backup file: "
-    read -p "(Default directory: $backup_dir): " NEW_backup_dir
+    read -p "(Default directory: $backup_dir): " NEW_backup_dir 
     [ -z "$NEW_backup_dir" ] && NEW_backup_dir="$backup_dir"
     if [ -z "`echo $NEW_backup_dir| grep '^/'`" ]; then
       echo "${CWARNING}input error! ${CEND}"
@@ -84,13 +84,13 @@ fi
 
 while :; do echo
   echo "Pleas enter a valid backup number of days: "
-  read -p "(Default days: 5): " expired_days
+  read -p "(Default days: 5): " expired_days 
   [ -z "$expired_days" ] && expired_days=5
   [ -n "`echo $expired_days | sed -n "/^[0-9]\+$/p"`" ] && break || echo "${CWARNING}input error! Please only enter numbers! ${CEND}"
 done
 sed -i "s@^expired_days=.*@expired_days=$expired_days@" ./options.conf
 
-if [ "$CONTENT_BK" != '2' ]; then
+if [ "$CONTENT_BK" != '2' ];then
   databases=`$db_install_dir/bin/mysql -uroot -p$dbrootpwd -e "show databases\G" | grep Database | awk '{print $2}' | grep -Evw "(performance_schema|information_schema|mysql|sys)"`
   while :; do echo
     echo "Please enter one or more name for database, separate multiple database names with commas: "
@@ -129,17 +129,17 @@ echo "You have to backup the content:"
 [ "$CONTENT_BK" != '2' ] && echo "Database: ${CMSG}$db_name${CEND}"
 [ "$CONTENT_BK" != '1' ] && echo "Website: ${CMSG}$website_name${CEND}"
 
-if [[ "$DESC_BK" =~ ^[2,4,6]$ ]]; then
+if [[ "$DESC_BK" =~ ^[2,4,6]$ ]];then 
   > tools/iplist.txt
   while :; do echo
     read -p "Please enter the remote host ip: " remote_ip
     [ -z "$remote_ip" -o "$remote_ip" == '127.0.0.1' ] && continue
     echo
     read -p "Please enter the remote host port(Default: 22) : " remote_port
-    [ -z "$remote_port" ] && remote_port=22
+    [ -z "$remote_port" ] && remote_port=22 
     echo
     read -p "Please enter the remote host user(Default: root) : " remote_user
-    [ -z "$remote_user" ] && remote_user=root
+    [ -z "$remote_user" ] && remote_user=root 
     echo
     read -p "Please enter the remote host password: " remote_password
     IPcode=$(echo "ibase=16;$(echo "$remote_ip" | xxd -ps -u)"|bc|tr -d '\\'|tr -d '\n')
@@ -147,11 +147,11 @@ if [[ "$DESC_BK" =~ ^[2,4,6]$ ]]; then
     PWcode=$(echo "ibase=16;$(echo "$remote_password" | xxd -ps -u)"|bc|tr -d '\\'|tr -d '\n')
     [ -e "~/.ssh/known_hosts" ] && grep $remote_ip ~/.ssh/known_hosts | sed -i "/$remote_ip/d" ~/.ssh/known_hosts
     ./tools/mssh.exp ${IPcode}P $remote_user ${PWcode}P ${Portcode}P true 10
-    if [ $? -eq 0 ]; then
+    if [ $? -eq 0 ];then
       [ -z "`grep $remote_ip tools/iplist.txt`" ] && echo "$remote_ip $remote_port $remote_user $remote_password" >> tools/iplist.txt || echo "${CWARNING}$remote_ip has been added! ${CEND}"
       while :; do
-        read -p "Do you want to add more host ? [y/n]: " more_host_yn
-        if [ "$more_host_yn" != 'y' -a "$more_host_yn" != 'n' ]; then
+        read -p "Do you want to add more host ? [y/n]: " more_host_yn 
+        if [ "$more_host_yn" != 'y' -a "$more_host_yn" != 'n' ];then
           echo "${CWARNING}input error! Please only input 'y' or 'n'${CEND}"
         else
           break
@@ -162,25 +162,26 @@ if [[ "$DESC_BK" =~ ^[2,4,6]$ ]]; then
   done
 fi
 
-if [[ "$DESC_BK" =~ ^[3,5,6]$ ]]; then
+if [[ "$DESC_BK" =~ ^[3,5,6]$ ]];then 
   [ ! -e "${python_install_dir}/bin/python" ] && Install_Python
   [ ! -e "${python_install_dir}/lib/python2.7/site-packages/requests" ] && ${python_install_dir}/bin/pip install requests
   while :; do echo
     echo 'Please select your backup datacenter:'
-    echo -e "\t ${CMSG}1${CEND}. 华南(广州)  ${CMSG}2${CEND}. 华北(天津)"
+    echo -e "\t ${CMSG}1${CEND}. 华南(广州)  ${CMSG}2${CEND}. 华北(天津)" 
     echo -e "\t ${CMSG}3${CEND}. 华东(上海)  ${CMSG}4${CEND}. 新加坡"
     read -p "Please input a number:(Default 1 press Enter) " Location
     [ -z "$Location" ] && Location=1
     if [ ${Location} -ge 1 >/dev/null 2>&1 -a ${Location} -le 4 >/dev/null 2>&1 ]; then
       break
     else
-      echo "${CWARNING}input error! Please only input number 1,2,3,4${CEND}"
+      echo "${CWARNING}input error! Please only input number 1~4${CEND}"
     fi
   done
   [ "$Location" == '1' ] && region=gz
   [ "$Location" == '2' ] && region=tj
   [ "$Location" == '3' ] && region=sh
   [ "$Location" == '4' ] && region=sgp
+  [ -e "/root/.coscredentials" ] && rm -rf /root/.coscredentials
   while :; do echo
     read -p "Please enter the Qcloud COS appid: " appid 
     [ -z "$appid" ] && continue

+ 1 - 1
config/discuz.conf

@@ -8,5 +8,5 @@ rewrite ^([^\.]*)/blog-([0-9]+)-([0-9]+)\.html$ $1/home.php?mod=space&uid=$2&do=
 rewrite ^([^\.]*)/(fid|tid)-([0-9]+)\.html$ $1/index.php?action=$2&value=$3 last;
 rewrite ^([^\.]*)/([a-z]+[a-z0-9_]*)-([a-z0-9_\-]+)\.html$ $1/plugin.php?id=$2:$3 last;
 #if (!-e $request_filename) {
-#	return 404;
+#  return 404;
 #}

+ 2 - 2
config/drupal.conf

@@ -1,3 +1,3 @@
 if (!-e $request_filename) {
-    rewrite ^/(.*)$ /index.php?q=$1 last;
-    }
+  rewrite ^/(.*)$ /index.php?q=$1 last;
+}

+ 30 - 31
config/ecshop.conf

@@ -1,32 +1,31 @@
-if (!-e $request_filename)
-{
-    rewrite "^/index\.html" /index.php last;
-    rewrite "^/category$" /index.php last;
-    rewrite "^/feed-c([0-9]+)\.xml$" /feed.php?cat=$1 last;
-    rewrite "^/feed-b([0-9]+)\.xml$" /feed.php?brand=$1 last;
-    rewrite "^/feed\.xml$" /feed.php last;
-    rewrite "^/category-([0-9]+)-b([0-9]+)-min([0-9]+)-max([0-9]+)-attr([^-]*)-([0-9]+)-(.+)-([a-zA-Z]+)(.*)\.html$" /category.php?id=$1&brand=$2&price_min=$3&price_max=$4&filter_attr=$5&page=$6&sort=$7&order=$8 last;
-    rewrite "^/category-([0-9]+)-b([0-9]+)-min([0-9]+)-max([0-9]+)-attr([^-]*)(.*)\.html$" /category.php?id=$1&brand=$2&price_min=$3&price_max=$4&filter_attr=$5 last;
-    rewrite "^/category-([0-9]+)-b([0-9]+)-([0-9]+)-(.+)-([a-zA-Z]+)(.*)\.html$" /category.php?id=$1&brand=$2&page=$3&sort=$4&order=$5 last;
-    rewrite "^/category-([0-9]+)-b([0-9]+)-([0-9]+)(.*)\.html$" /category.php?id=$1&brand=$2&page=$3 last;
-    rewrite "^/category-([0-9]+)-b([0-9]+)(.*)\.html$" /category.php?id=$1&brand=$2 last;
-    rewrite "^/category-([0-9]+)(.*)\.html$" /category.php?id=$1 last;
-    rewrite "^/goods-([0-9]+)(.*)\.html" /goods.php?id=$1 last;
-    rewrite "^/article_cat-([0-9]+)-([0-9]+)-(.+)-([a-zA-Z]+)(.*)\.html$" /article_cat.php?id=$1&page=$2&sort=$3&order=$4 last;
-    rewrite "^/article_cat-([0-9]+)-([0-9]+)(.*)\.html$" /article_cat.php?id=$1&page=$2 last;
-    rewrite "^/article_cat-([0-9]+)(.*)\.html$" /article_cat.php?id=$1 last;
-    rewrite "^/article-([0-9]+)(.*)\.html$" /article.php?id=$1 last;
-    rewrite "^/brand-([0-9]+)-c([0-9]+)-([0-9]+)-(.+)-([a-zA-Z]+)\.html" /brand.php?id=$1&cat=$2&page=$3&sort=$4&order=$5 last;
-    rewrite "^/brand-([0-9]+)-c([0-9]+)-([0-9]+)(.*)\.html" /brand.php?id=$1&cat=$2&page=$3 last;
-    rewrite "^/brand-([0-9]+)-c([0-9]+)(.*)\.html" /brand.php?id=$1&cat=$2 last;
-    rewrite "^/brand-([0-9]+)(.*)\.html" /brand.php?id=$1 last;
-    rewrite "^/tag-(.*)\.html" /search.php?keywords=$1 last;
-    rewrite "^/snatch-([0-9]+)\.html$" /snatch.php?id=$1 last;
-    rewrite "^/group_buy-([0-9]+)\.html$" /group_buy.php?act=view&id=$1 last;
-    rewrite "^/auction-([0-9]+)\.html$" /auction.php?act=view&id=$1 last;
-    rewrite "^/exchange-id([0-9]+)(.*)\.html$" /exchange.php?id=$1&act=view last;
-    rewrite "^/exchange-([0-9]+)-min([0-9]+)-max([0-9]+)-([0-9]+)-(.+)-([a-zA-Z]+)(.*)\.html$" /exchange.php?cat_id=$1&integral_min=$2&integral_max=$3&page=$4&sort=$5&order=$6 last;
-    rewrite "^/exchange-([0-9]+)-([0-9]+)-(.+)-([a-zA-Z]+)(.*)\.html$" /exchange.php?cat_id=$1&page=$2&sort=$3&order=$4 last;
-    rewrite "^/exchange-([0-9]+)-([0-9]+)(.*)\.html$" /exchange.php?cat_id=$1&page=$2 last;
-    rewrite "^/exchange-([0-9]+)(.*)\.html$" /exchange.php?cat_id=$1 last;
+if (!-e $request_filename) {
+  rewrite "^/index\.html" /index.php last;
+  rewrite "^/category$" /index.php last;
+  rewrite "^/feed-c([0-9]+)\.xml$" /feed.php?cat=$1 last;
+  rewrite "^/feed-b([0-9]+)\.xml$" /feed.php?brand=$1 last;
+  rewrite "^/feed\.xml$" /feed.php last;
+  rewrite "^/category-([0-9]+)-b([0-9]+)-min([0-9]+)-max([0-9]+)-attr([^-]*)-([0-9]+)-(.+)-([a-zA-Z]+)(.*)\.html$" /category.php?id=$1&brand=$2&price_min=$3&price_max=$4&filter_attr=$5&page=$6&sort=$7&order=$8 last;
+  rewrite "^/category-([0-9]+)-b([0-9]+)-min([0-9]+)-max([0-9]+)-attr([^-]*)(.*)\.html$" /category.php?id=$1&brand=$2&price_min=$3&price_max=$4&filter_attr=$5 last;
+  rewrite "^/category-([0-9]+)-b([0-9]+)-([0-9]+)-(.+)-([a-zA-Z]+)(.*)\.html$" /category.php?id=$1&brand=$2&page=$3&sort=$4&order=$5 last;
+  rewrite "^/category-([0-9]+)-b([0-9]+)-([0-9]+)(.*)\.html$" /category.php?id=$1&brand=$2&page=$3 last;
+  rewrite "^/category-([0-9]+)-b([0-9]+)(.*)\.html$" /category.php?id=$1&brand=$2 last;
+  rewrite "^/category-([0-9]+)(.*)\.html$" /category.php?id=$1 last;
+  rewrite "^/goods-([0-9]+)(.*)\.html" /goods.php?id=$1 last;
+  rewrite "^/article_cat-([0-9]+)-([0-9]+)-(.+)-([a-zA-Z]+)(.*)\.html$" /article_cat.php?id=$1&page=$2&sort=$3&order=$4 last;
+  rewrite "^/article_cat-([0-9]+)-([0-9]+)(.*)\.html$" /article_cat.php?id=$1&page=$2 last;
+  rewrite "^/article_cat-([0-9]+)(.*)\.html$" /article_cat.php?id=$1 last;
+  rewrite "^/article-([0-9]+)(.*)\.html$" /article.php?id=$1 last;
+  rewrite "^/brand-([0-9]+)-c([0-9]+)-([0-9]+)-(.+)-([a-zA-Z]+)\.html" /brand.php?id=$1&cat=$2&page=$3&sort=$4&order=$5 last;
+  rewrite "^/brand-([0-9]+)-c([0-9]+)-([0-9]+)(.*)\.html" /brand.php?id=$1&cat=$2&page=$3 last;
+  rewrite "^/brand-([0-9]+)-c([0-9]+)(.*)\.html" /brand.php?id=$1&cat=$2 last;
+  rewrite "^/brand-([0-9]+)(.*)\.html" /brand.php?id=$1 last;
+  rewrite "^/tag-(.*)\.html" /search.php?keywords=$1 last;
+  rewrite "^/snatch-([0-9]+)\.html$" /snatch.php?id=$1 last;
+  rewrite "^/group_buy-([0-9]+)\.html$" /group_buy.php?act=view&id=$1 last;
+  rewrite "^/auction-([0-9]+)\.html$" /auction.php?act=view&id=$1 last;
+  rewrite "^/exchange-id([0-9]+)(.*)\.html$" /exchange.php?id=$1&act=view last;
+  rewrite "^/exchange-([0-9]+)-min([0-9]+)-max([0-9]+)-([0-9]+)-(.+)-([a-zA-Z]+)(.*)\.html$" /exchange.php?cat_id=$1&integral_min=$2&integral_max=$3&page=$4&sort=$5&order=$6 last;
+  rewrite "^/exchange-([0-9]+)-([0-9]+)-(.+)-([a-zA-Z]+)(.*)\.html$" /exchange.php?cat_id=$1&page=$2&sort=$3&order=$4 last;
+  rewrite "^/exchange-([0-9]+)-([0-9]+)(.*)\.html$" /exchange.php?cat_id=$1&page=$2 last;
+  rewrite "^/exchange-([0-9]+)(.*)\.html$" /exchange.php?cat_id=$1 last;
 }

+ 1 - 1
config/joomla.conf

@@ -1,3 +1,3 @@
 location / {
-    try_files $uri $uri/ /index.php?$args;
+  try_files $uri $uri/ /index.php?$args;
 }

+ 1 - 1
config/laravel.conf

@@ -1,3 +1,3 @@
 location / {
-    try_files $uri $uri/ /index.php?$query_string;
+  try_files $uri $uri/ /index.php?$query_string;
 }

+ 9 - 12
config/opencart.conf

@@ -1,16 +1,13 @@
 location = /sitemap.xml {
-    rewrite ^(.*)$ /index.php?route=feed/google_sitemap break;
-    }
-
+  rewrite ^(.*)$ /index.php?route=feed/google_sitemap break;
+}
 location = /googlebase.xml {
-    rewrite ^(.*)$ /index.php?route=feed/google_base break;
-    }
-
+  rewrite ^(.*)$ /index.php?route=feed/google_base break;
+}
 location / {
-    # This try_files directive is used to enable SEO-friendly URLs for OpenCart
-    try_files $uri $uri/ @opencart;
-    }
-
+  # This try_files directive is used to enable SEO-friendly URLs for OpenCart
+  try_files $uri $uri/ @opencart;
+}
 location @opencart {
-    rewrite ^/(.+)$ /index.php?_route_=$1 last;
-    }
+  rewrite ^/(.+)$ /index.php?_route_=$1 last;
+}

+ 4 - 4
config/thinkphp.conf

@@ -1,6 +1,6 @@
 location / {
-    if (!-e $request_filename) {
-        rewrite ^(.*)$ /index.php?s=$1 last;
-        break;
-    }
+  if (!-e $request_filename) {
+    rewrite ^(.*)$ /index.php?s=$1 last;
+    break;
+  }
 }

+ 3 - 9
config/typecho.conf

@@ -1,9 +1,3 @@
-if (-f $request_filename/index.html){
-    rewrite (.*) $1/index.html break;
-    }
-if (-f $request_filename/index.php){
-    rewrite (.*) $1/index.php;
-    }
-if (!-e $request_filename){
-    rewrite (.*) /index.php;
-    }
+if (!-e $request_filename) {
+  rewrite ^(.*)$ /index.php$1 last;
+}

+ 2 - 2
config/wordpress.conf

@@ -1,4 +1,4 @@
 location / {
-    try_files $uri $uri/ /index.php?$args;
-    }
+  try_files $uri $uri/ /index.php?$args;
+}
 rewrite /wp-admin$ $scheme://$host$uri/ permanent;

+ 7 - 7
include/get_char.sh

@@ -9,11 +9,11 @@
 #       https://github.com/lj2007331/oneinstack
 
 get_char() {
-    SAVEDSTTY=`stty -g`
-    stty -echo
-    stty cbreak
-    dd if=/dev/tty bs=1 count=1 2> /dev/null
-    stty -raw
-    stty echo
-    stty $SAVEDSTTY
+  SAVEDSTTY=`stty -g`
+  stty -echo
+  stty cbreak
+  dd if=/dev/tty bs=1 count=1 2> /dev/null
+  stty -raw
+  stty echo
+  stty $SAVEDSTTY
 }

+ 10 - 0
tools/cos/__init__.py

@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+from qcloud_cos import CosClient
+from qcloud_cos import UploadFileRequest
+from qcloud_cos import CreateFolderRequest
+from qcloud_cos import DelFileRequest
+from qcloud_cos import DelFolderRequest
+from qcloud_cos import ListFolderRequest
+import threadpool

+ 31 - 0
tools/cos/qcloud_cos/__init__.py

@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+from .cos_client import CosClient
+from .cos_client import CosConfig
+from .cos_client import CredInfo
+from .cos_request import UploadFileRequest
+from .cos_request import UploadSliceFileRequest
+from .cos_request import UpdateFileRequest
+from .cos_request import UpdateFolderRequest
+from .cos_request import DelFolderRequest
+from .cos_request import DelFileRequest
+from .cos_request import CreateFolderRequest
+from .cos_request import StatFileRequest
+from .cos_request import StatFolderRequest
+from .cos_request import ListFolderRequest
+from .cos_request import DownloadFileRequest
+from .cos_request import MoveFileRequest
+from .cos_auth import Auth
+from .cos_cred import CredInfo
+
+
+import logging
+
+try:
+    from logging import NullHandler
+except ImportError:
+    class NullHandler(logging.Handler):
+        def emit(self, record):
+            pass
+
+logging.getLogger(__name__).addHandler(NullHandler())

+ 70 - 0
tools/cos/qcloud_cos/cos_auth.py

@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import random
+import time
+import urllib
+import hmac
+import hashlib
+import binascii
+import base64
+
+
+class Auth(object):
+    def __init__(self, cred):
+        self.cred = cred
+
+    def app_sign(self, bucket, cos_path, expired, upload_sign=True):
+        appid = self.cred.get_appid()
+        bucket = bucket.encode('utf8')
+        secret_id = self.cred.get_secret_id().encode('utf8')
+        now = int(time.time())
+        rdm = random.randint(0, 999999999)
+        cos_path = urllib.quote(cos_path.encode('utf8'), '~/')
+        if upload_sign:
+            fileid = '/%s/%s%s' % (appid, bucket, cos_path)
+        else:
+            fileid = cos_path
+
+        if expired != 0 and expired < now:
+            expired = now + expired
+
+        sign_tuple = (appid, secret_id, expired, now, rdm, fileid, bucket)
+
+        plain_text = 'a=%s&k=%s&e=%d&t=%d&r=%d&f=%s&b=%s' % sign_tuple
+        secret_key = self.cred.get_secret_key().encode('utf8')
+        sha1_hmac = hmac.new(secret_key, plain_text, hashlib.sha1)
+        hmac_digest = sha1_hmac.hexdigest()
+        hmac_digest = binascii.unhexlify(hmac_digest)
+        sign_hex = hmac_digest + plain_text
+        sign_base64 = base64.b64encode(sign_hex)
+        return sign_base64
+
+    def sign_once(self, bucket, cos_path):
+        """单次签名(针对删除和更新操作)
+
+        :param bucket: bucket名称
+        :param cos_path: 要操作的cos路径, 以'/'开始
+        :return: 签名字符串
+        """
+        return self.app_sign(bucket, cos_path, 0)
+
+    def sign_more(self, bucket, cos_path, expired):
+        """多次签名(针对上传文件,创建目录, 获取文件目录属性, 拉取目录列表)
+
+        :param bucket: bucket名称
+        :param cos_path: 要操作的cos路径, 以'/'开始
+        :param expired: 签名过期时间, UNIX时间戳, 如想让签名在30秒后过期, 即可将expired设成当前时间加上30秒
+        :return: 签名字符串
+        """
+        return self.app_sign(bucket, cos_path, expired)
+
+    def sign_download(self, bucket, cos_path, expired):
+        """下载签名(用于获取后拼接成下载链接,下载私有bucket的文件)
+
+        :param bucket: bucket名称
+        :param cos_path: 要下载的cos文件路径, 以'/'开始
+        :param expired:  签名过期时间, UNIX时间戳, 如想让签名在30秒后过期, 即可将expired设成当前时间加上30秒
+        :return: 签名字符串
+        """
+        return self.app_sign(bucket, cos_path, expired, False)

+ 176 - 0
tools/cos/qcloud_cos/cos_client.py

@@ -0,0 +1,176 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import requests
+from cos_cred import CredInfo
+from cos_config import CosConfig
+from cos_op import FileOp
+from cos_op import FolderOp
+from cos_request import UploadFileRequest
+from cos_request import UploadSliceFileRequest
+from cos_request import UpdateFileRequest
+from cos_request import UpdateFolderRequest
+from cos_request import DelFileRequest
+from cos_request import DelFolderRequest
+from cos_request import CreateFolderRequest
+from cos_request import StatFolderRequest
+from cos_request import StatFileRequest
+from cos_request import ListFolderRequest
+from cos_request import DownloadFileRequest
+try:
+    from requests.packages.urllib3.exceptions import InsecureRequestWarning
+    requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
+except ImportError:
+    pass
+
+
+class CosClient(object):
+    """Cos客户端类"""
+
+    def __init__(self, appid, secret_id, secret_key, region="shanghai"):
+        """ 设置用户的相关信息
+
+        :param appid: appid
+        :param secret_id: secret_id
+        :param secret_key: secret_key
+        """
+        self._cred = CredInfo(appid, secret_id, secret_key)
+        self._config = CosConfig(region=region)
+        self._http_session = requests.session()
+        self._file_op = FileOp(self._cred, self._config, self._http_session)
+        self._folder_op = FolderOp(self._cred, self._config, self._http_session)
+
+    def set_config(self, config):
+        """设置config"""
+        assert isinstance(config, CosConfig)
+        self._config = config
+        self._file_op.set_config(config)
+        self._folder_op.set_config(config)
+
+    def get_config(self):
+        """获取config"""
+        return self._config
+
+    def set_cred(self, cred):
+        """设置用户的身份信息
+
+        :param cred:
+        :return:
+        """
+        assert isinstance(cred, CredInfo)
+        self._cred = cred
+        self._file_op.set_cred(cred)
+        self._folder_op.set_cred(cred)
+
+    def get_cred(self):
+        """获取用户的相关信息
+
+        :return:
+        """
+        return self._cred
+
+    def upload_file(self, request):
+        """ 上传文件(自动根据文件大小,选择上传策略, 强烈推荐使用),上传策略: 8MB以下适用单文件上传, 8MB(含)适用分片上传
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, UploadFileRequest)
+        return self._file_op.upload_file(request)
+
+    def upload_single_file(self, request):
+        """单文件上传接口, 适用用小文件8MB以下, 最大不得超过20MB, 否则会返回参数错误
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, UploadFileRequest)
+        return self._file_op.upload_single_file(request)
+
+    def upload_slice_file(self, request):
+        """ 分片上传接口, 适用于大文件8MB及以上
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, UploadSliceFileRequest)
+        return self._file_op.upload_slice_file(request)
+
+    def del_file(self, request):
+        """ 删除文件
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, DelFileRequest)
+        return self._file_op.del_file(request)
+
+    def move_file(self, request):
+        return self._file_op.move_file(request)
+
+    def stat_file(self, request):
+        """获取文件属性
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, StatFileRequest)
+        return self._file_op.stat_file(request)
+
+    def update_file(self, request):
+        """更新文件属性
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, UpdateFileRequest)
+        return self._file_op.update_file(request)
+
+    def download_file(self, request):
+        assert isinstance(request, DownloadFileRequest)
+        return self._file_op.download_file(request)
+
+    def create_folder(self, request):
+        """创建目录
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, CreateFolderRequest)
+        return self._folder_op.create_folder(request)
+
+    def del_folder(self, request):
+        """删除目录
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, DelFolderRequest)
+        return self._folder_op.del_folder(request)
+
+    def stat_folder(self, request):
+        """获取folder属性请求
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, StatFolderRequest)
+        return self._folder_op.stat_folder(request)
+
+    def update_folder(self, request):
+        """更新目录属性
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, UpdateFolderRequest)
+        return self._folder_op.update_folder(request)
+
+    def list_folder(self, request):
+        """获取目录下的文件和目录列表
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, ListFolderRequest)
+        return self._folder_op.list_folder(request)

+ 225 - 0
tools/cos/qcloud_cos/cos_common.py

@@ -0,0 +1,225 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import print_function
+import struct
+import io
+
+try:
+    range = xrange
+except NameError:
+    pass
+
+
+def _left_rotate(n, b):
+    """Left rotate a 32-bit integer n by b bits."""
+    return ((n << b) | (n >> (32 - b))) & 0xffffffff
+
+
+def _process_chunk(chunk, h0, h1, h2, h3, h4):
+    """Process a chunk of data and return the new digest variables."""
+    assert len(chunk) == 64
+
+    w = [0] * 80
+
+    # Break chunk into sixteen 4-byte big-endian words w[i]
+    for i in range(16):
+        w[i] = struct.unpack(b'>I', chunk[i * 4:i * 4 + 4])[0]
+
+    # Extend the sixteen 4-byte words into eighty 4-byte words
+    for i in range(16, 80):
+        w[i] = _left_rotate(w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16], 1)
+
+    # Initialize hash value for this chunk
+    a = h0
+    b = h1
+    c = h2
+    d = h3
+    e = h4
+
+    for i in range(80):
+        if 0 <= i <= 19:
+            # Use alternative 1 for f from FIPS PB 180-1 to avoid bitwise not
+            f = d ^ (b & (c ^ d))
+            k = 0x5A827999
+        elif 20 <= i <= 39:
+            f = b ^ c ^ d
+            k = 0x6ED9EBA1
+        elif 40 <= i <= 59:
+            f = (b & c) | (b & d) | (c & d)
+            k = 0x8F1BBCDC
+        elif 60 <= i <= 79:
+            f = b ^ c ^ d
+            k = 0xCA62C1D6
+
+        a, b, c, d, e = ((_left_rotate(a, 5) + f + e + k + w[i]) & 0xffffffff,
+                         a, _left_rotate(b, 30), c, d)
+
+    # Add this chunk's hash to result so far
+    h0 = (h0 + a) & 0xffffffff
+    h1 = (h1 + b) & 0xffffffff
+    h2 = (h2 + c) & 0xffffffff
+    h3 = (h3 + d) & 0xffffffff
+    h4 = (h4 + e) & 0xffffffff
+
+    return h0, h1, h2, h3, h4
+
+
+class Sha1Hash(object):
+    """A class that mimics that hashlib api and implements the SHA-1 algorithm."""
+
+    name = 'python-sha1'
+    digest_size = 20
+    block_size = 64
+
+    def __init__(self):
+        # Initial digest variables
+        self._h = (
+            0x67452301,
+            0xEFCDAB89,
+            0x98BADCFE,
+            0x10325476,
+            0xC3D2E1F0,
+        )
+
+        # bytes object with 0 <= len < 64 used to store the end of the message
+        # if the message length is not congruent to 64
+        self._unprocessed = b''
+        # Length in bytes of all data that has been processed so far
+        self._message_byte_length = 0
+
+    def update(self, arg):
+        """Update the current digest.
+        This may be called repeatedly, even after calling digest or hexdigest.
+
+        Arguments:
+            arg: bytes, bytearray, or BytesIO object to read from.
+        """
+        if isinstance(arg, (bytes, bytearray)):
+            arg = io.BytesIO(arg)
+
+        # Try to build a chunk out of the unprocessed data, if any
+        chunk = self._unprocessed + arg.read(64 - len(self._unprocessed))
+
+        # Read the rest of the data, 64 bytes at a time
+        while len(chunk) == 64:
+            self._h = _process_chunk(chunk, *self._h)
+            self._message_byte_length += 64
+            chunk = arg.read(64)
+
+        self._unprocessed = chunk
+        return self
+
+    def digest(self):
+        """Produce the final hash value (big-endian) as a bytes object"""
+        return b''.join(struct.pack(b'>I', h) for h in self._produce_digest())
+
+    def hexdigest(self):
+        """Produce the final hash value (big-endian) as a hex string"""
+        return '%08x%08x%08x%08x%08x' % self._produce_digest()
+
+    def inner_digest(self):
+
+        tmp = struct.unpack(">5I", struct.pack("<5I", *self._h))
+        return '%08x%08x%08x%08x%08x' % tmp
+
+    def _produce_digest(self):
+        """Return finalized digest variables for the data processed so far."""
+        # Pre-processing:
+        message = self._unprocessed
+        message_byte_length = self._message_byte_length + len(message)
+
+        # append the bit '1' to the message
+        message += b'\x80'
+
+        # append 0 <= k < 512 bits '0', so that the resulting message length (in bytes)
+        # is congruent to 56 (mod 64)
+        message += b'\x00' * ((56 - (message_byte_length + 1) % 64) % 64)
+
+        # append length of message (before pre-processing), in bits, as 64-bit big-endian integer
+        message_bit_length = message_byte_length * 8
+        message += struct.pack(b'>Q', message_bit_length)
+
+        # Process the final chunk
+        # At this point, the length of the message is either 64 or 128 bytes.
+        h = _process_chunk(message[:64], *self._h)
+        if len(message) == 64:
+            return h
+        return _process_chunk(message[64:], *h)
+
+
+def sha1(data):
+    """SHA-1 Hashing Function
+    A custom SHA-1 hashing function implemented entirely in Python.
+    Arguments:
+        data: A bytes or BytesIO object containing the input message to hash.
+    Returns:
+        A hex SHA-1 digest of the input message.
+    """
+    return Sha1Hash().update(data).hexdigest()
+
+
+class Sha1Util(object):
+
+    @staticmethod
+    def get_sha1_by_slice(file_name, slice_size):
+        """ Get SHA array based on Qcloud Slice Upload Interface
+
+        :param file_name: local file path
+        :param slice_size: slice size in bit
+        :return: sha array like [{“offset”:0, “datalen”:1024,”datasha”:”aaa”}, {}, {}]
+        """
+        from os import path
+
+        with open(file_name, 'rb') as f:
+
+            result = []
+            file_size = path.getsize(file_name)
+            sha1_obj = Sha1Hash()
+            for current_offset in range(0, file_size, slice_size):
+
+                data_length = min(slice_size, file_size - current_offset)
+                sha1_obj.update(f.read(data_length))
+                sha1_val = sha1_obj.inner_digest()
+                result.append({"offset": current_offset, "datalen": data_length, "datasha": sha1_val})
+
+            result[-1]['datasha'] = sha1_obj.hexdigest()
+            return result
+
+
+if __name__ == '__main__':
+    # Imports required for command line parsing. No need for these elsewhere
+    import argparse
+    import sys
+    import os
+
+    # Parse the incoming arguments
+    parser = argparse.ArgumentParser()
+    parser.add_argument('input', nargs='?',
+                        help='input file or message to hash')
+    args = parser.parse_args()
+
+    data = None
+
+    if args.input is None:
+        # No argument given, assume message comes from standard input
+        try:
+            # sys.stdin is opened in text mode, which can change line endings,
+            # leading to incorrect results. Detach fixes this issue, but it's
+            # new in Python 3.1
+            data = sys.stdin.detach()
+        except AttributeError:
+            # Linux ans OSX both use \n line endings, so only windows is a
+            # problem.
+            if sys.platform == "win32":
+                import msvcrt
+
+                msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
+            data = sys.stdin
+    elif os.path.isfile(args.input):
+        # An argument is given and it's a valid file. Read it
+        data = open(args.input, 'rb')
+    else:
+        data = args.input
+
+    # Show the final digest
+    print('sha1-digest:', sha1(data))

+ 125 - 0
tools/cos/qcloud_cos/cos_config.py

@@ -0,0 +1,125 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+class CosRegionInfo(object):
+
+    def __init__(self, region=None, hostname=None, download_hostname=None, *args, **kwargs):
+        self._hostname = None
+        self._download_hostname = None
+
+        if region in ['sh', 'shanghai']:
+            self._hostname = 'sh.file.myqcloud.com'
+            self._download_hostname = 'cossh.myqcloud.com'
+
+        elif region in ['gz', 'guangzhou']:
+            self._hostname = 'gz.file.myqcloud.com'
+            self._download_hostname = 'cosgz.myqcloud.com'
+
+        elif region in ['tj', 'tianjin', 'tianjing']:  # bug: for compact previous release
+            self._hostname = 'tj.file.myqcloud.com'
+            self._download_hostname = 'costj.myqcloud.com'
+
+        elif region in ['sgp', 'singapore']:
+            self._hostname = 'sgp.file.myqcloud.com'
+            self._download_hostname = 'cosspg.myqcloud.com'
+
+        elif region is not None:
+            self._hostname = '{region}.file.myqcloud.com'.format(region=region)
+            self._download_hostname = 'cos{region}.myqcloud.com'.format(region=region)
+        else:
+            if hostname and download_hostname:
+                self._hostname = hostname
+                self._download_hostname = download_hostname
+            else:
+                raise ValueError("region or [hostname, download_hostname] must be set, and region should be sh/gz/tj/sgp")
+
+    @property
+    def hostname(self):
+        assert self._hostname is not None
+        return self._hostname
+
+    @property
+    def download_hostname(self):
+        assert self._download_hostname is not None
+        return self._download_hostname
+
+
+class CosConfig(object):
+    """CosConfig 有关cos的配置"""
+
+    def __init__(self, timeout=300, sign_expired=300, enable_https=False, *args, **kwargs):
+        self._region = CosRegionInfo(*args, **kwargs)
+        self._user_agent = 'cos-python-sdk-v4'
+        self._timeout = timeout
+        self._sign_expired = sign_expired
+        self._enable_https = enable_https
+        if self._enable_https:
+            self._protocol = "https"
+        else:
+            self._protocol = "http"
+
+    def get_endpoint(self):
+        """获取域名地址
+
+        :return:
+        """
+        # tmpl = "%s://%s/files/v2"
+        return self._protocol + "://" + self._region.hostname + "/files/v2"
+
+    def get_download_hostname(self):
+        return self._region.download_hostname
+
+    def get_user_agent(self):
+        """获取HTTP头中的user_agent
+
+        :return:
+        """
+        return self._user_agent
+
+    def set_timeout(self, time_out):
+        """设置连接超时, 单位秒
+
+        :param time_out:
+        :return:
+        """
+        assert isinstance(time_out, int)
+        self._timeout = time_out
+
+    def get_timeout(self):
+        """获取连接超时,单位秒
+
+        :return:
+        """
+        return self._timeout
+
+    def set_sign_expired(self, expired):
+        """设置签名过期时间, 单位秒
+
+        :param expired:
+        :return:
+        """
+        assert isinstance(expired, int)
+        self._sign_expired = expired
+
+    def get_sign_expired(self):
+        """获取签名过期时间, 单位秒
+
+        :return:
+        """
+        return self._sign_expired
+
+    @property
+    def enable_https(self):
+        assert self._enable_https is not None
+        return self._enable_https
+
+    @enable_https.setter
+    def enable_https(self, val):
+        if val != self._enable_https:
+            if val:
+                self._enable_https = val
+                self._protocol = "https"
+            else:
+                self._enable_https = val
+                self._protocol = "http"

+ 36 - 0
tools/cos/qcloud_cos/cos_cred.py

@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+# -*- coding:utf-8 -*-
+
+from cos_params_check import ParamCheck
+
+
+class CredInfo(object):
+    """CredInfo用户的身份信息"""
+    def __init__(self, appid, secret_id, secret_key):
+        self._appid = appid
+        self._secret_id = secret_id
+        self._secret_key = secret_key
+        self._param_check = ParamCheck()
+
+    def get_appid(self):
+        return self._appid
+
+    def get_secret_id(self):
+        return self._secret_id
+
+    def get_secret_key(self):
+        return self._secret_key
+
+    def check_params_valid(self):
+        if not self._param_check.check_param_int('appid', self._appid):
+            return False
+        if not self._param_check.check_param_unicode('secret_id', self._secret_id):
+            return False
+        return self._param_check.check_param_unicode('secret_key', self._secret_key)
+
+    def get_err_tips(self):
+        """获取错误信息
+
+        :return:
+        """
+        return self._param_check.get_err_tips()

+ 14 - 0
tools/cos/qcloud_cos/cos_err.py

@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+class CosErr(object):
+    """sdk错误码"""
+    PARAMS_ERROR = -1  # 参数错误
+    NETWORK_ERROR = -2  # 网络错误
+    SERVER_ERROR = -3  # server端返回错误
+    UNKNOWN_ERROR = -4  # 未知错误
+
+    @staticmethod
+    def get_err_msg(errcode, err_info):
+        return {u'code': errcode, u'message': err_info}

+ 703 - 0
tools/cos/qcloud_cos/cos_op.py

@@ -0,0 +1,703 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import os
+import time
+import json
+import hashlib
+import urllib
+from contextlib import closing
+import cos_auth
+from cos_err import CosErr
+from cos_request import UploadFileRequest
+from cos_request import UploadSliceFileRequest
+from cos_request import UpdateFileRequest
+from cos_request import DelFileRequest
+from cos_request import StatFileRequest
+from cos_request import CreateFolderRequest
+from cos_request import UpdateFolderRequest
+from cos_request import StatFolderRequest
+from cos_request import DelFolderRequest
+from cos_request import ListFolderRequest, DownloadFileRequest, MoveFileRequest
+from cos_common import Sha1Util
+
+from logging import getLogger
+from traceback import format_exc
+
+logger = getLogger(__name__)
+
+
+class BaseOp(object):
+    """
+    BaseOp基本操作类型
+    """
+
+    def __init__(self, cred, config, http_session):
+        """ 初始化类
+
+        :param cred: 用户的身份信息
+        :param config: cos_config配置类
+        :param http_session: http 会话
+        """
+        self._cred = cred
+        self._config = config
+        self._http_session = http_session
+        self._expired_period = self._config.get_sign_expired()
+
+    def set_cred(self, cred):
+        """设置用户的身份信息
+
+        :param cred:
+        :return:
+        """
+        self._cred = cred
+
+    def set_config(self, config):
+        """ 设置config
+
+        :param config:
+        :return:
+        """
+        self._config = config
+        self._expired_period = self._config.get_sign_expired()
+
+    def _build_url(self, bucket, cos_path):
+        """生成url
+
+        :param bucket:
+        :param cos_path:
+        :return:
+        """
+        bucket = bucket.encode('utf8')
+        end_point = self._config.get_endpoint().rstrip('/').encode('utf8')
+        appid = self._cred.get_appid()
+        cos_path = urllib.quote(cos_path.encode('utf8'), '~/')
+        url = '%s/%s/%s%s' % (end_point, appid, bucket, cos_path)
+        return url
+
+    def build_download_url(self, bucket, cos_path, sign):
+        # Only support http now
+        appid = self._cred.get_appid()
+        hostname = self._config.get_download_hostname()
+        cos_path = urllib.quote(cos_path)
+        url_tmpl = 'http://{bucket}-{appid}.{hostname}{cos_path}?sign={sign}'
+
+        return url_tmpl.format(bucket=bucket, appid=appid, hostname=hostname, cos_path=cos_path, sign=sign)
+
+    def send_request(self, method, bucket, cos_path, **kwargs):
+        """ 发送http请求
+
+        :param method:
+        :param bucket:
+        :param cos_path:
+        :param args:
+        :return:
+        """
+        url = self._build_url(bucket, cos_path)
+        logger.debug("sending request, method: %s, bucket: %s, cos_path: %s" % (method, bucket, cos_path))
+
+        try:
+            if method == 'POST':
+                http_resp = self._http_session.post(url, verify=False, **kwargs)
+            else:
+                http_resp = self._http_session.get(url, verify=False, **kwargs)
+
+            status_code = http_resp.status_code
+            if status_code == 200 or status_code == 400:
+                return http_resp.json()
+            else:
+                logger.warning("request failed, response message: %s" % http_resp.text)
+                err_detail = 'url:%s, status_code:%d' % (url, status_code)
+                return CosErr.get_err_msg(CosErr.NETWORK_ERROR, err_detail)
+        except Exception as e:
+            logger.exception("request failed, return SERVER_ERROR")
+            err_detail = 'url:%s, exception:%s traceback:%s' % (url, str(e), format_exc())
+            return CosErr.get_err_msg(CosErr.SERVER_ERROR, err_detail)
+
+    def _check_params(self, request):
+        """检查用户输入参数, 检查通过返回None, 否则返回一个代表错误原因的dict
+
+        :param request:
+        :return:
+        """
+        if not self._cred.check_params_valid():
+            return CosErr.get_err_msg(CosErr.PARAMS_ERROR, self._cred.get_err_tips())
+        if not request.check_params_valid():
+            return CosErr.get_err_msg(CosErr.PARAMS_ERROR, request.get_err_tips())
+        return None
+
+    def del_base(self, request):
+        """删除文件或者目录, is_file_op为True表示是文件操作
+
+        :param request:
+        :return:
+        """
+        check_params_ret = self._check_params(request)
+        if check_params_ret is not None:
+            return check_params_ret
+
+        auth = cos_auth.Auth(self._cred)
+        bucket = request.get_bucket_name()
+        cos_path = request.get_cos_path()
+        sign = auth.sign_once(bucket, cos_path)
+
+        http_header = dict()
+        http_header['Authorization'] = sign
+        http_header['Content-Type'] = 'application/json'
+        http_header['User-Agent'] = self._config.get_user_agent()
+
+        http_body = {'op': 'delete'}
+
+        timeout = self._config.get_timeout()
+        return self.send_request('POST', bucket, cos_path, headers=http_header, data=json.dumps(http_body), timeout=timeout)
+
+    def stat_base(self, request):
+        """获取文件和目录的属性
+
+        :param request:
+        :return:
+        """
+        check_params_ret = self._check_params(request)
+        if check_params_ret is not None:
+            return check_params_ret
+
+        auth = cos_auth.Auth(self._cred)
+        bucket = request.get_bucket_name()
+        cos_path = request.get_cos_path()
+        expired = int(time.time()) + self._expired_period
+        sign = auth.sign_more(bucket, cos_path, expired)
+
+        http_header = dict()
+        http_header['Authorization'] = sign
+        http_header['User-Agent'] = self._config.get_user_agent()
+
+        http_body = dict()
+        http_body['op'] = 'stat'
+
+        timeout = self._config.get_timeout()
+        return self.send_request('GET', bucket, cos_path, headers=http_header, params=http_body, timeout=timeout)
+
+
+class FileOp(BaseOp):
+    """FileOp 文件相关操作"""
+
+    def __init__(self, cred, config, http_session):
+        """ 初始化类
+
+        :param cred: 用户的身份信息
+        :param config: cos_config配置类
+        :param http_session: http 会话
+        """
+        BaseOp.__init__(self, cred, config, http_session)
+        # 单文件上传的最大上限是20MB
+        self.max_single_file = 20 * 1024 * 1024
+
+    @staticmethod
+    def _sha1_content(content):
+        """获取content的sha1
+
+        :param content:
+        :return:
+        """
+        sha1_obj = hashlib.sha1()
+        sha1_obj.update(content)
+        return sha1_obj.hexdigest()
+
+    def update_file(self, request):
+        """更新文件
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, UpdateFileRequest)
+        logger.debug("request: " + str(request.get_custom_headers()))
+        check_params_ret = self._check_params(request)
+        if check_params_ret is not None:
+            return check_params_ret
+
+        logger.debug("params verify successfully")
+        auth = cos_auth.Auth(self._cred)
+        bucket = request.get_bucket_name()
+        cos_path = request.get_cos_path()
+        sign = auth.sign_once(bucket, cos_path)
+
+        http_header = dict()
+        http_header['Authorization'] = sign
+        http_header['Content-Type'] = 'application/json'
+        http_header['User-Agent'] = self._config.get_user_agent()
+
+        http_body = dict()
+        http_body['op'] = 'update'
+
+        if request.get_biz_attr() is not None:
+            http_body['biz_attr'] = request.get_biz_attr()
+
+        if request.get_authority() is not None:
+            http_body['authority'] = request.get_authority()
+
+        if request.get_custom_headers() is not None and len(request.get_custom_headers()) is not 0:
+            http_body['custom_headers'] = request.get_custom_headers()
+        logger.debug("Update Request Header: " + json.dumps(http_body))
+        timeout = self._config.get_timeout()
+        return self.send_request('POST', bucket, cos_path, headers=http_header, data=json.dumps(http_body), timeout=timeout)
+
+    def del_file(self, request):
+        """删除文件
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, DelFileRequest)
+        return self.del_base(request)
+
+    def stat_file(self, request):
+        """获取文件的属性
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, StatFileRequest)
+        return self.stat_base(request)
+
+    def upload_file(self, request):
+        """上传文件, 根据用户的文件大小,选择单文件上传和分片上传策略
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, UploadFileRequest)
+        check_params_ret = self._check_params(request)
+        if check_params_ret is not None:
+            return check_params_ret
+
+        local_path = request.get_local_path()
+        file_size = os.path.getsize(local_path)
+
+        suit_single_file_zie = 8 * 1024 * 1024
+        if file_size < suit_single_file_zie:
+            return self.upload_single_file(request)
+        else:
+            bucket = request.get_bucket_name()
+            cos_path = request.get_cos_path()
+            local_path = request.get_local_path()
+            slice_size = 1024 * 1024
+            biz_attr = request.get_biz_attr()
+            upload_slice_request = UploadSliceFileRequest(bucket, cos_path, local_path, slice_size, biz_attr)
+            upload_slice_request.set_insert_only(request.get_insert_only())
+            return self.upload_slice_file(upload_slice_request)
+
+    def upload_single_file(self, request):
+        """ 单文件上传
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, UploadFileRequest)
+        check_params_ret = self._check_params(request)
+        if check_params_ret is not None:
+            return check_params_ret
+
+        local_path = request.get_local_path()
+        file_size = os.path.getsize(local_path)
+        # 判断文件是否超过单文件最大上限, 如果超过则返回错误
+        # 并提示用户使用别的接口
+        if file_size > self.max_single_file:
+            return CosErr.get_err_msg(CosErr.NETWORK_ERROR, 'file is too big, please use upload_file interface')
+
+        auth = cos_auth.Auth(self._cred)
+        bucket = request.get_bucket_name()
+        cos_path = request.get_cos_path()
+        expired = int(time.time()) + self._expired_period
+        sign = auth.sign_more(bucket, cos_path, expired)
+
+        http_header = dict()
+        http_header['Authorization'] = sign
+        http_header['User-Agent'] = self._config.get_user_agent()
+
+        with open(local_path, 'rb') as f:
+            file_content = f.read()
+
+        http_body = dict()
+        http_body['op'] = 'upload'
+        http_body['filecontent'] = file_content
+        http_body['sha'] = FileOp._sha1_content(file_content)
+        http_body['biz_attr'] = request.get_biz_attr()
+        http_body['insertOnly'] = str(request.get_insert_only())
+
+        timeout = self._config.get_timeout()
+        ret = self.send_request('POST', bucket, cos_path, headers=http_header, files=http_body, timeout=timeout)
+
+        if request.get_insert_only() != 0:
+            return ret
+
+        if ret[u'code'] == 0:
+            return ret
+
+        # try to delete object, and re-post request
+        del_request = DelFileRequest(bucket_name=request.get_bucket_name(), cos_path=request.get_cos_path())
+        ret = self.del_file(del_request)
+        if ret[u'code'] == 0:
+            return self.send_request('POST', bucket, cos_path, headers=http_header, files=http_body, timeout=timeout)
+        else:
+            return ret
+
+    def _upload_slice_file(self, request):
+        assert isinstance(request, UploadSliceFileRequest)
+        check_params_ret = self._check_params(request)
+        if check_params_ret is not None:
+            return check_params_ret
+
+        local_path = request.get_local_path()
+        slice_size = request.get_slice_size()
+        enable_sha1 = request.enable_sha1
+
+        if enable_sha1 is True:
+            sha1_by_slice_list = Sha1Util.get_sha1_by_slice(local_path, slice_size)
+            request.sha1_list = sha1_by_slice_list
+            request.sha1_content = sha1_by_slice_list[-1]["datasha"]
+        else:
+            request.sha1_list = None
+            request.sha1_content = None
+
+        control_ret = self._upload_slice_control(request)
+
+        # 表示控制分片已经产生错误信息
+        if control_ret[u'code'] != 0:
+            return control_ret
+
+        # 命中秒传
+        if u'access_url' in control_ret[u'data']:
+            return control_ret
+
+        local_path = request.get_local_path()
+        file_size = os.path.getsize(local_path)
+        slice_size = control_ret[u'data'][u'slice_size']
+        offset = 0
+        session = control_ret[u'data'][u'session']
+        # ?concurrency
+        if request._max_con <= 1 or (
+                u'serial_upload' in control_ret[u'data'] and control_ret[u'data'][u'serial_upload'] == 1):
+
+            logger.info("upload file serially")
+            slice_idx = 0
+            with open(local_path, 'rb') as local_file:
+
+                while offset < file_size:
+                    file_content = local_file.read(slice_size)
+
+                    data_ret = self._upload_slice_data(request, file_content, session, offset)
+
+                    if data_ret[u'code'] == 0:
+                        if u'access_url' in data_ret[u'data']:
+                            return data_ret
+                    else:
+                        return data_ret
+
+                    offset += slice_size
+                    slice_idx += 1
+        else:
+            logger.info('upload file concurrently')
+            from threadpool import SimpleThreadPool
+            pool = SimpleThreadPool(request._max_con)
+
+            slice_idx = 0
+            with open(local_path, 'rb') as local_file:
+
+                while offset < file_size:
+                    file_content = local_file.read(slice_size)
+
+                    pool.add_task(self._upload_slice_data, request, file_content, session, offset)
+
+                    offset += slice_size
+                    slice_idx += 1
+
+            pool.wait_completion()
+            result = pool.get_result()
+            if not result['success_all']:
+                return {u'code': 1, u'message': str(result)}
+
+        data_ret = self._upload_slice_finish(request, session, file_size)
+        return data_ret
+
+    def upload_slice_file(self, request):
+        """分片文件上传(串行)
+
+        :param request:
+        :return:
+        """
+        ret = self._upload_slice_file(request)
+
+        if ret[u'code'] == 0:
+            return ret
+
+        if request.get_insert_only() == 0:
+            del_request = DelFileRequest(request.get_bucket_name(), request.get_cos_path())
+            ret = self.del_file(del_request)
+            if ret[u'code'] == 0:
+                return self._upload_slice_file(request)
+            else:
+                return ret
+        else:
+            return ret
+
+    def _upload_slice_finish(self, request, session, filesize):
+        auth = cos_auth.Auth(self._cred)
+        bucket = request.get_bucket_name()
+        cos_path = request.get_cos_path()
+        expired = int(time.time()) + self._expired_period
+        sign = auth.sign_more(bucket, cos_path, expired)
+
+        http_header = dict()
+        http_header['Authorization'] = sign
+        http_header['User-Agent'] = self._config.get_user_agent()
+
+        http_body = dict()
+        http_body['op'] = "upload_slice_finish"
+        http_body['session'] = session
+        http_body['filesize'] = str(filesize)
+        if request.sha1_list is not None:
+            http_body['sha'] = request.sha1_list[-1]["datasha"]
+        timeout = self._config.get_timeout()
+
+        return self.send_request('POST', bucket, cos_path, headers=http_header, files=http_body, timeout=timeout)
+
+    def _upload_slice_control(self, request):
+        """串行分片第一步, 上传控制分片
+
+        :param request:
+        :return:
+        """
+        auth = cos_auth.Auth(self._cred)
+        bucket = request.get_bucket_name()
+        cos_path = request.get_cos_path()
+        expired = int(time.time()) + self._expired_period
+        sign = auth.sign_more(bucket, cos_path, expired)
+
+        http_header = dict()
+        http_header['Authorization'] = sign
+        http_header['User-Agent'] = self._config.get_user_agent()
+
+        local_path = request.get_local_path()
+        file_size = os.path.getsize(local_path)
+        slice_size = request.get_slice_size()
+        biz_atrr = request.get_biz_attr()
+
+        http_body = dict()
+        http_body['op'] = 'upload_slice_init'
+        if request.enable_sha1:
+            http_body['sha'] = request.sha1_list[-1]["datasha"]
+            http_body['uploadparts'] = json.dumps(request.sha1_list)
+        http_body['filesize'] = str(file_size)
+        http_body['slice_size'] = str(slice_size)
+        http_body['biz_attr'] = biz_atrr
+        http_body['insertOnly'] = str(request.get_insert_only())
+
+        timeout = self._config.get_timeout()
+        return self.send_request('POST', bucket, cos_path, headers=http_header, files=http_body, timeout=timeout)
+
+    def _upload_slice_data(self, request, file_content, session, offset, retry=3):
+        """串行分片第二步, 上传数据分片
+
+        :param request:
+        :param file_content:
+        :param session:
+        :param offset:
+        :return:
+        """
+        bucket = request.get_bucket_name()
+        cos_path = request.get_cos_path()
+        auth = cos_auth.Auth(self._cred)
+        expired = int(time.time()) + self._expired_period
+        sign = auth.sign_more(bucket, cos_path, expired)
+
+        http_header = dict()
+        http_header['Authorization'] = sign
+        http_header['User-Agent'] = self._config.get_user_agent()
+
+        http_body = dict()
+        http_body['op'] = 'upload_slice_data'
+        http_body['filecontent'] = file_content
+        http_body['session'] = session
+        http_body['offset'] = str(offset)
+        if request.sha1_content is not None:
+            http_body['sha'] = request.sha1_content
+
+        timeout = self._config.get_timeout()
+
+        for _ in range(retry):
+            ret = self.send_request('POST', bucket, cos_path, headers=http_header, files=http_body, timeout=timeout)
+            if ret['code'] == 0:
+                return ret
+        else:
+            return ret
+
+    def __download_url(self, uri, filename):
+        session = self._http_session
+
+        with closing(session.get(uri, stream=True, timeout=150)) as ret:
+            if ret.status_code in [200, 206]:
+
+                if 'Content-Length' in ret.headers:
+                    content_len = int(ret.headers['Content-Length'])
+                else:
+                    raise IOError("download failed without Content-Length header")
+
+                file_len = 0
+                with open(filename, 'wb') as f:
+                    for chunk in ret.iter_content(chunk_size=1024):
+                        if chunk:
+                            file_len += len(chunk)
+                            f.write(chunk)
+                    f.flush()
+                if file_len != content_len:
+                    raise IOError("download failed with incomplete file")
+            else:
+                raise IOError("download failed with status code:" + str(ret.status_code))
+
+    def download_file(self, request):
+        assert isinstance(request, DownloadFileRequest)
+
+        auth = cos_auth.Auth(self._cred)
+        sign = auth.sign_download(request.get_bucket_name(), request.get_cos_path(), self._config.get_sign_expired())
+        url = self.build_download_url(request.get_bucket_name(), request.get_cos_path(), sign)
+        logger.info("Uri is %s" % url)
+        try:
+            self.__download_url(url, request._local_filename)
+            return {u'code': 0, u'message': "download successfully"}
+        except Exception as e:
+            return {u'code': 1, u'message': "download failed, exception: " + str(e)}
+
+    def __move_file(self, request):
+
+        auth = cos_auth.Auth(self._cred)
+        bucket = request.get_bucket_name()
+        cos_path = request.get_cos_path()
+        sign = auth.sign_once(bucket, cos_path)
+
+        http_header = dict()
+        http_header['Authorization'] = sign
+        http_header['User-Agent'] = self._config.get_user_agent()
+
+        http_body = dict()
+        http_body['op'] = 'move'
+        http_body['dest_fileid'] = request.dest_path
+        http_body['to_over_write'] = str(1 if request.overwrite else 0)
+
+        timeout = self._config.get_timeout()
+        return self.send_request('POST', bucket, cos_path, headers=http_header, params=http_body, timeout=timeout)
+
+    def move_file(self, request):
+
+        assert isinstance(request, MoveFileRequest)
+        return self.__move_file(request)
+
+
+class FolderOp(BaseOp):
+    """FolderOp 目录相关操作"""
+    def __init__(self, cred, config, http_session):
+        BaseOp.__init__(self, cred, config, http_session)
+
+    def update_folder(self, request):
+        """更新目录
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, UpdateFolderRequest)
+        check_params_ret = self._check_params(request)
+        if check_params_ret is not None:
+            return check_params_ret
+
+        auth = cos_auth.Auth(self._cred)
+        bucket = request.get_bucket_name()
+        cos_path = request.get_cos_path()
+        sign = auth.sign_once(bucket, cos_path)
+
+        http_header = dict()
+        http_header['Authorization'] = sign
+        http_header['Content-Type'] = 'application/json'
+        http_header['User-Agent'] = self._config.get_user_agent()
+
+        http_body = dict()
+        http_body['op'] = 'update'
+        http_body['biz_attr'] = request.get_biz_attr()
+
+        timeout = self._config.get_timeout()
+        return self.send_request('POST', bucket, cos_path, headers=http_header, data=json.dumps(http_body), timeout=timeout)
+
+    def del_folder(self, request):
+        """删除目录
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, DelFolderRequest)
+        return self.del_base(request)
+
+    def stat_folder(self, request):
+        """获取目录属性
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, StatFolderRequest)
+        return self.stat_base(request)
+
+    def create_folder(self, request):
+        """创建目录
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, CreateFolderRequest)
+        check_params_ret = self._check_params(request)
+        if check_params_ret is not None:
+            return check_params_ret
+
+        auth = cos_auth.Auth(self._cred)
+        bucket = request.get_bucket_name()
+        cos_path = request.get_cos_path()
+        expired = int(time.time()) + self._expired_period
+        sign = auth.sign_more(bucket, cos_path, expired)
+
+        http_header = dict()
+        http_header['Authorization'] = sign
+        http_header['Content-Type'] = 'application/json'
+        http_header['User-Agent'] = self._config.get_user_agent()
+
+        http_body = dict()
+        http_body['op'] = 'create'
+        http_body['biz_attr'] = request.get_biz_attr()
+
+        timeout = self._config.get_timeout()
+        return self.send_request('POST', bucket, cos_path, headers=http_header, data=json.dumps(http_body), timeout=timeout)
+
+    def list_folder(self, request):
+        """list目录
+
+        :param request:
+        :return:
+        """
+        assert isinstance(request, ListFolderRequest)
+        check_params_ret = self._check_params(request)
+        if check_params_ret is not None:
+            return check_params_ret
+
+        http_body = dict()
+        http_body['op'] = 'list'
+        http_body['num'] = request.get_num()
+
+        http_body['context'] = request.get_context()
+
+        auth = cos_auth.Auth(self._cred)
+        bucket = request.get_bucket_name()
+        list_path = request.get_cos_path() + request.get_prefix()
+        expired = int(time.time()) + self._expired_period
+        sign = auth.sign_more(bucket, list_path, expired)
+
+        http_header = dict()
+        http_header['Authorization'] = sign
+        http_header['User-Agent'] = self._config.get_user_agent()
+
+        timeout = self._config.get_timeout()
+        return self.send_request('GET', bucket, list_path, headers=http_header, params=http_body, timeout=timeout)

+ 225 - 0
tools/cos/qcloud_cos/cos_params_check.py

@@ -0,0 +1,225 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import os
+import re
+
+
+class ParamCheck(object):
+    """BaseRequest基本类型的请求"""
+    def __init__(self):
+        self._err_tips = u''
+
+    def get_err_tips(self):
+        """获取错误信息
+
+        :return:
+        """
+        return self._err_tips
+
+    def check_param_unicode(self, param_name, param_value):
+        """检查参数是否是unicode
+
+        :param param_name: param_name 参数名
+        :param param_value: param_value 参数值
+        :return:
+        """
+        if param_value is None:
+            self._err_tips = param_name + ' is None!'
+            return False
+        if not isinstance(param_value, unicode):
+            self._err_tips = param_name + ' is not unicode!'
+            return False
+        return True
+
+    def check_param_int(self, param_name, param_value):
+        """检查参数是否是int
+
+        :param param_name: param_name 参数名
+        :param param_value: param_value 参数值
+        :return:
+        """
+        if param_value is None:
+            self._err_tips = param_name + ' is None!'
+            return False
+        if not isinstance(param_value, int):
+            self._err_tips = param_name + ' is not int!'
+            return False
+        return True
+
+    def check_cos_path_valid(self, cos_path, is_file_path):
+        """检查cos_path是否合法
+
+        路径必须以/开始,文件路径则不能以/结束, 目录路径必须以/结束
+
+        :param cos_path:
+        :param is_file_path:
+        :return: True for valid path, other False
+        """
+        if cos_path[0] != u'/':
+            self._err_tips = 'cos path must start with /'
+            return False
+
+        last_letter = cos_path[len(cos_path) - 1]
+        if is_file_path and last_letter == u'/':
+            self._err_tips = 'for file operation, cos_path must not end with /'
+            return False
+        elif not is_file_path and last_letter != u'/':
+            self._err_tips = 'for folder operation, cos_path must end with /'
+            return False
+        else:
+            pass
+
+        illegal_letters = ['?', '*', ':', '|', '\\', '<', '>', '"']
+        for illegal_letter in illegal_letters:
+            if cos_path.find(illegal_letter) != -1:
+                self._err_tips = 'cos path contain illegal letter %s' % illegal_letter
+                return False
+
+        pattern = re.compile(r'/(\s*)/')
+        if pattern.search(cos_path):
+            self._err_tips = 'cos path contain illegal letter / /'
+            return False
+        return True
+
+    def check_not_cos_root(self, cos_path):
+        """检查不是cos的根路径
+
+        不能对根路径操作的有 1 update 2 create 3 delete
+        :param cos_path:
+        :return:
+        """
+        if cos_path == u'/':
+            self._err_tips = 'bucket operation is not supported by sdk,'
+            ' please use cos console: https://console.qcloud.com/cos'
+            return False
+        else:
+            return True
+
+    def check_local_file_valid(self, local_path):
+        """检查本地文件有效(存在并且可读)
+
+        :param local_path:
+        :return:
+        """
+        if not os.path.exists(local_path):
+            self._err_tips = 'local_file %s not exist!' % local_path
+            return False
+        if not os.path.isfile(local_path):
+            self._err_tips = 'local_file %s is not regular file!' % local_path
+            return False
+        if not os.access(local_path, os.R_OK):
+            self._err_tips = 'local_file %s is not readable!' % local_path
+            return False
+        return True
+
+    def check_slice_size(self, slice_size):
+        """检查分片大小有效
+
+        :param slice_size:
+        :return:
+        """
+        min_size = 64 * 1024           # 512KB
+        max_size = 3 * 1024 * 1024     # 20MB
+
+        if max_size >= slice_size >= min_size:
+            return True
+        else:
+            self._err_tips = 'slice_size is invalid, only accept [%d, %d]' \
+                    % (min_size, max_size)
+            return False
+
+    def check_insert_only(self, insert_only):
+        """检查文件上传的insert_only参数
+
+        :param insert_only:
+        :return:
+        """
+        if insert_only != 1 and insert_only != 0:
+            self._err_tips = 'insert_only only support 0 and 1'
+            return False
+        else:
+            return True
+
+    def check_move_over_write(self, to_over_write):
+        """检查move的over write标志
+
+        :param to_over_write:
+        :return:
+        """
+        if to_over_write != 1 and to_over_write != 0:
+            self._err_tips = 'to_over_write only support 0 and 1'
+            return False
+        else:
+            return True
+
+    def check_file_authority(self, authority):
+        """检查文件的authority属性
+
+        合法的取值只有eInvalid, eWRPrivate, eWPrivateRPublic和空值
+        :param authority:
+        :return:
+        """
+        if authority != u''and authority != u'eInvalid' and authority != u'eWRPrivate' and authority != u'eWPrivateRPublic':
+            self._err_tips = 'file authority valid value is: eInvalid, eWRPrivate, eWPrivateRPublic'
+            return False
+        else:
+            return True
+
+    def check_x_cos_meta_dict(self, x_cos_meta_dict):
+        """检查x_cos_meta_dict, key和value都必须是UTF8编码
+
+        :param x_cos_meta_dict:
+        :return:
+        """
+        prefix_len = len('x-cos-meta-')
+        for key in x_cos_meta_dict.keys():
+            if not self.check_param_unicode('x-cos-meta-key', key):
+                return False
+            if not self.check_param_unicode('x-cos-meta-value', x_cos_meta_dict[key]):
+                return False
+            if key[0:prefix_len] != u'x-cos-meta-':
+                self._err_tips = 'x-cos-meta key must start with x-cos-meta-'
+                return False
+            if len(key) == prefix_len:
+                self._err_tips = 'x-cos-meta key must not just be x-cos-meta-'
+                return False
+            if len(x_cos_meta_dict[key]) == 0:
+                self._err_tips = 'x-cos-meta value must not be empty'
+                return False
+        return True
+
+    def check_update_flag(self, flag):
+        """检查更新文件的flag
+
+        :param flag:
+        :return:
+        """
+        if flag == 0:
+            self._err_tips = 'no any attribute to be updated!'
+            return False
+        else:
+            return True
+
+    def check_list_order(self, list_order):
+        """ 检查list folder的order
+
+        :param list_order: 合法取值0(正序), 1(逆序)
+        :return:
+        """
+        if list_order != 0 and list_order != 1:
+            self._err_tips = 'list order is invalid, please use 0(positive) or 1(reverse)!'
+            return False
+        else:
+            return True
+
+    def check_list_pattern(self, list_pattern):
+        """检查list folder的pattern
+
+        :param list_pattern: 合法取值eListBoth, eListDirOnly, eListFileOnly
+        :return:
+        """
+        if list_pattern != u'eListBoth' and list_pattern != u'eListDirOnly' and list_pattern != u'eListFileOnly':
+            self._err_tips = 'list pattern is invalid, please use eListBoth or eListDirOnly or eListFileOnly'
+            return False
+        else:
+            return True

+ 626 - 0
tools/cos/qcloud_cos/cos_request.py

@@ -0,0 +1,626 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""the request type in tencent qcloud cos"""
+
+from cos_params_check import ParamCheck
+import collections
+
+
+class BaseRequest(object):
+    """BaseRequest基本类型的请求"""
+
+    def __init__(self, bucket_name, cos_path):
+        """ 类初始化
+
+        :param bucket_name: bucket的名称
+        :param cos_path: cos的绝对路径, 即从bucket下的根/开始
+        """
+        self._bucket_name = bucket_name.strip()
+        self._cos_path = cos_path.strip()
+        self._param_check = ParamCheck()
+
+    def set_bucket_name(self, bucket_name=u''):
+        """设置bucket_name
+
+        :param bucket_name:
+        :return:
+        """
+        self._bucket_name = bucket_name.strip()
+
+    def get_bucket_name(self):
+        """获取bucket_name
+
+        :return:
+        """
+        return self._bucket_name
+
+    def set_cos_path(self, cos_path=u''):
+        """设置cos_path
+
+        :param cos_path:
+        :return:
+        """
+        self._cos_path = cos_path.strip()
+
+    def get_cos_path(self):
+        """获取cos_path
+
+        :return:
+        """
+        return self._cos_path
+
+    def get_err_tips(self):
+        """获取错误信息
+
+        :return:
+        """
+        return self._param_check.get_err_tips()
+
+    def check_params_valid(self):
+        """检查参数是否合法
+
+        :return:
+        """
+        if not self._param_check.check_param_unicode('bucket', self._bucket_name):
+            return False
+        return self._param_check.check_param_unicode('cos_path', self._cos_path)
+
+
+class CreateFolderRequest(BaseRequest):
+    """CreateFolderRequest  创建目录类型的请求"""
+
+    def __init__(self, bucket_name, cos_path, biz_attr=u''):
+        """
+
+        :param bucket_name: bucket的名称
+        :param cos_path: cos的绝对路径, 从bucket根/开始
+        :param biz_attr: 目录的属性
+        """
+        super(CreateFolderRequest, self).__init__(bucket_name, cos_path)
+        self._biz_attr = biz_attr
+
+    def set_biz_attr(self, biz_attr):
+        """设置biz_attr
+
+        :param biz_attr:
+        :return:
+        """
+        self._biz_attr = biz_attr
+
+    def get_biz_attr(self):
+        """ 获取biz_attr
+
+        :return:
+        """
+        return self._biz_attr
+
+    def check_params_valid(self):
+        """检查参数是否合法
+
+        :return:
+        """
+        if not super(CreateFolderRequest, self).check_params_valid():
+            return False
+        if not self._param_check.check_param_unicode('biz_attr', self._biz_attr):
+            return False
+        if not self._param_check.check_cos_path_valid(self._cos_path, is_file_path=False):
+            return False
+        return self._param_check.check_not_cos_root(self._cos_path)
+
+
+class UploadFileRequest(BaseRequest):
+    """
+    UploadFileRequest  单文件上传请求
+    """
+
+    def __init__(self, bucket_name, cos_path, local_path, biz_attr=u'', insert_only=1):
+        """
+
+        :param bucket_name:  bucket的名称
+        :param cos_path: cos的绝对路径(目的路径), 从bucket根/开始
+        :param local_path: 上传的本地文件路径(源路径)
+        :param biz_attr: 文件的属性
+        :param insert_only: 是否覆盖写, 0覆盖, 1不覆盖,返回错误
+        """
+        super(UploadFileRequest, self).__init__(bucket_name, cos_path)
+        self._local_path = local_path.strip()
+        self._biz_attr = biz_attr
+        self._insert_only = insert_only
+
+    def set_local_path(self, local_path):
+        """设置local_path
+
+        :param local_path:
+        :return:
+        """
+        self._local_path = local_path.strip()
+
+    def get_local_path(self):
+        """获取local_path
+
+        :return:
+        """
+        return self._local_path
+
+    def set_biz_attr(self, biz_attr):
+        """设置biz_attr
+
+        :param biz_attr:
+        :return:
+        """
+        self._biz_attr = biz_attr
+
+    def get_biz_attr(self):
+        """获取biz_attr
+
+        :return:
+        """
+        return self._biz_attr
+
+    def set_insert_only(self, insert_only):
+        """设置insert_only,0表示如果文件存在, 则覆盖
+
+        :param insert_only:
+        :return:
+        """
+        self._insert_only = insert_only
+
+    def get_insert_only(self):
+        """获取insert_only
+
+        :return:
+        """
+        return self._insert_only
+
+    def check_params_valid(self):
+        """检查参数是否有效
+
+        :return:
+        """
+        if not super(UploadFileRequest, self).check_params_valid():
+            return False
+        if not self._param_check.check_cos_path_valid(self._cos_path, is_file_path=True):
+            return False
+        if not self._param_check.check_param_unicode('biz_attr', self._biz_attr):
+            return False
+        if not self._param_check.check_param_unicode('local_path', self._local_path):
+            return False
+        if not self._param_check.check_local_file_valid(self._local_path):
+            return False
+        if not self._param_check.check_param_int('insert_only', self._insert_only):
+            return False
+        return self._param_check.check_insert_only(self._insert_only)
+
+
+class UploadSliceFileRequest(UploadFileRequest):
+    """
+    UploadSliceFileRequest  分片文件上传请求
+    """
+
+    def __init__(self, bucket_name, cos_path, local_path, slice_size=1024*1024, biz_attr=u'', enable_sha1=False, max_con=1):
+        """
+
+        :param bucket_name: bucket的名称
+        :param cos_path: cos的绝对路径(目的路径), 从bucket根/开始
+        :param local_path: 上传的本地文件路径(源路径)
+        :param slice_size: 文件的属性
+        :param biz_attr: 分片大小(字节, 默认1MB)
+        :param enable_sha1: 是否启用sha1校验
+        """
+        super(UploadSliceFileRequest, self).__init__(bucket_name, cos_path, local_path, biz_attr)
+        self._slice_size = slice_size
+        self._enable_sha1 = enable_sha1
+        self._max_con = max_con
+
+    @property
+    def enable_sha1(self):
+        return self._enable_sha1
+
+    @enable_sha1.setter
+    def enable_sha1(self, val):
+        if val in (True, False):
+            self._enable_sha1 = val
+        else:
+            raise ValueError("enable_sha1 should be True/False")
+
+    def set_slice_size(self, slice_size):
+        """设置分片大小
+
+        :param slice_size:
+        :return:
+        """
+        self._slice_size = slice_size
+
+    def get_slice_size(self):
+        """获取分片大小
+
+        :return:
+        """
+        return self._slice_size
+
+    def check_params_valid(self):
+        """检查参数是否有效
+
+        :return:
+        """
+        if not super(UploadSliceFileRequest, self).check_params_valid():
+            return False
+
+        if self._enable_sha1 and self._slice_size != 1024*1024:
+            self._param_check._err_tips = 'slice_size is invalid, slice must be 1MB with enable_sha1'
+            return False
+
+        return self._param_check.check_slice_size(self._slice_size)
+
+
+class UpdateFolderRequest(BaseRequest):
+    """UpdateFolderRequest 更新目录请求"""
+
+    def __init__(self, bucket_name, cos_path, biz_attr=u''):
+        """
+
+        :param bucket_name: bucket name
+        :param cos_path: the path on cos
+        :param biz_attr: biz attributes
+        """
+        super(UpdateFolderRequest, self).__init__(bucket_name, cos_path)
+        self._biz_attr = biz_attr
+
+    def set_biz_attr(self, biz_attr):
+        """设置biz_attr
+
+        :param biz_attr:
+        :return:
+        """
+        self._biz_attr = biz_attr
+
+    def get_biz_attr(self):
+        """获取biz_attr
+
+        :return:
+        """
+        return self._biz_attr
+
+    def check_params_valid(self):
+        """检查参数是否有效
+
+        :return:
+        """
+        if not super(UpdateFolderRequest, self).check_params_valid():
+            return False
+        if not self._param_check.check_cos_path_valid(self._cos_path, is_file_path=False):
+            return False
+        if not self._param_check.check_not_cos_root(self._cos_path):
+            return False
+        return self._param_check.check_param_unicode('biz_attr', self._biz_attr)
+
+
+class UpdateFileRequest(BaseRequest):
+    """UpdateFileRequest 更新文件请求 """
+
+    def __init__(self, bucket_name, cos_path):
+        """ 初始化类
+
+            biz_attr:     要更新的文件的属性
+            authority:              文件权限:
+                             eInvalid(继承bucket),
+                             eWRPrivate(私有读写),
+                             eWPrivateRPublic(私有写, 公有读)
+            customer_header:        用户自定义的HTTP请求头,包括以下成员
+            cache_control:          文件的缓存机制,参见HTTP的Cache-Control
+            content_type:           文件的MIME信息,参见HTTP的Content-Type
+            content_disposition:    MIME协议的扩展,参见HTTP的Content-Disposition
+            content_language:       文件的语言, 参见HTTP的Content-Language
+            content_encoding:       body的编码, 参见HTTP的Content-Encoding
+            _x_cos_meta_dict:       用户自定义的属性, key是以x-cos-meta-开头,value为属性值
+
+        :param bucket_name: bucket的名称
+        :param cos_path: cos的绝对路径, 从bucket根/开始
+        """
+        super(UpdateFileRequest, self).__init__(bucket_name, cos_path)
+        self._biz_attr = None
+        self._custom_headers = {}
+        self._authority = None
+        self._cache_control = None
+        self._content_type = None
+        self._content_disposition = None
+        self._content_language = None
+        self._content_encoding = None
+        self._x_cos_meta_dict = dict()
+
+    def set_biz_attr(self, biz_attr):
+        """设置biz_attr"""
+        self._biz_attr = biz_attr
+
+    def get_biz_attr(self):
+        """获取biz_attr"""
+        return self._biz_attr
+
+    # 设置authority, 合法取值如下所示
+    # eInvalid(继承bucket),
+    # eWRPrivate(私有读写),
+    # eWPrivateRPublic(私有写, 公有读)
+    def set_authority(self, authority):
+        """设置authority,
+
+        合法取值:eInvalid(继承bucket),eWRPrivate(私有读写),eWPrivateRPublic(私有写, 公有读)
+        :param authority:
+        :return:
+        """
+        self._authority = authority
+
+    def get_authority(self):
+        """获取authority"""
+        return self._authority
+
+    def set_cache_control(self, cache_control):
+        """设置缓存机制Cache-Control"""
+        self._cache_control = cache_control
+        self._custom_headers[u'Cache-Control'] = cache_control
+
+    def set_content_type(self, content_type):
+        """设置Content-Type"""
+        self._content_type = content_type
+        self._custom_headers['Content-Type'] = content_type
+
+    def set_content_disposition(self, content_disposition):
+        """设置Content-Disposition"""
+        self._content_disposition = content_disposition
+        self._custom_headers['Content-Disposition'] = content_disposition
+
+    def set_content_language(self, content_language):
+        """设置Content-Language"""
+        self._content_language = content_language
+        self._custom_headers['Content-Language'] = content_language
+
+    def set_content_encoding(self, content_encoding):
+        """设置Content-Encoding"""
+        self._content_encoding = content_encoding
+        self._custom_headers['Content-Encoding'] = content_encoding
+
+    def set_x_cos_meta(self, key, value):
+        """设置自定义的x-cos-meta
+
+        key以x-cos-meta-开头,例如自定义key为u'x-cos-meta-len', value为u'1024'
+        :param key:
+        :param value:
+        :return:
+        """
+        self._x_cos_meta_dict[key] = value
+        self._custom_headers[key] = value
+
+    def _convert_dict(self, data):
+        """convert a dict's keys & values from `unicode` to `str`
+
+        :param data:
+        :return:
+        """
+        if isinstance(data, basestring):
+            return str(data)
+        elif isinstance(data, collections.Mapping):
+            return dict(map(self._convert_dict, data.iteritems()))
+        elif isinstance(data, collections.Iterable):
+            return type(data)(map(self._convert_dict, data))
+        else:
+            return data
+
+    def get_custom_headers(self):
+        """ 获取自定义的HTTP头"""
+        return self._convert_dict(self._custom_headers)
+
+    def check_params_valid(self):
+        """ 检查参数是否合法"""
+        if not super(UpdateFileRequest, self).check_params_valid():
+            return False
+
+        if not self._param_check.check_cos_path_valid(self._cos_path, is_file_path=True):
+            return False
+
+        if self._biz_attr is not None:
+            if not self._param_check.check_param_unicode('biz_attr', self._biz_attr):
+                return False
+
+        if self._authority is not None:
+            if not self._param_check.check_param_unicode('authority', self._authority):
+                return False
+
+        if self._authority is not None:
+            if not self._param_check.check_file_authority(self._authority):
+                return False
+
+        if self._cache_control is not None:
+            if not self._param_check.check_param_unicode('cache_control', self._cache_control):
+                return False
+
+        if self._content_type is not None:
+            if not self._param_check.check_param_unicode('content_type', self._content_type):
+                return False
+
+        if self._content_disposition is not None:
+            if not self._param_check.check_param_unicode('content_disposition', self._content_disposition):
+                return False
+
+        if self._content_language is not None:
+            if not self._param_check.check_param_unicode('content_language', self._content_language):
+                return False
+
+        if self._content_encoding is not None:
+            if not self._param_check.check_param_unicode('content_encoding', self._content_encoding):
+                return False
+
+        return self._param_check.check_x_cos_meta_dict(self._x_cos_meta_dict)
+
+
+class StatFileRequest(BaseRequest):
+    """StatRequest 获取文件属性请求"""
+
+    def __init__(self, bucket_name, cos_path):
+        """
+        :param bucket_name: bucket的名称
+        :param cos_path: cos的文件路径, 从bucket根/开始, 不以/结束
+        """
+        super(StatFileRequest, self).__init__(bucket_name, cos_path)
+
+    def check_params_valid(self):
+        """检查参数是否合法"""
+        if not super(StatFileRequest, self).check_params_valid():
+            return False
+        return self._param_check.check_cos_path_valid(self._cos_path, is_file_path=True)
+
+
+class StatFolderRequest(BaseRequest):
+    """StatRequest 获取目录属性请求 """
+
+    def __init__(self, bucket_name, cos_path):
+        """
+
+        :param bucket_name: bucket的名称
+        :param cos_path: cos的目录路径, 从bucket根/开始, 以/结束
+        """
+        super(StatFolderRequest, self).__init__(bucket_name, cos_path)
+
+    def check_params_valid(self):
+        """检查参数是否合法"""
+        if not super(StatFolderRequest, self).check_params_valid():
+            return False
+        return self._param_check.check_cos_path_valid(self._cos_path, is_file_path=False)
+
+
+class DelFileRequest(BaseRequest):
+    """ DelFileRequest 删除文件请求 """
+
+    def __init__(self, bucket_name, cos_path):
+        """
+
+        :param bucket_name: bucket的名称
+        :param cos_path: cos的文件路径, 从bucket根/开始, 不以/结束
+        """
+        super(DelFileRequest, self).__init__(bucket_name, cos_path)
+
+    def check_params_valid(self):
+        """检查参数是否合法"""
+        if not super(DelFileRequest, self).check_params_valid():
+            return False
+        return self._param_check.check_cos_path_valid(self._cos_path, is_file_path=True)
+
+
+class DelFolderRequest(BaseRequest):
+    """DelFolderRequest 删除目录请求"""
+
+    def __init__(self, bucket_name, cos_path):
+        """
+
+        :param bucket_name: bucket的名称
+        :param cos_path: cos的目录路径, 从bucket根/开始, 以/结束
+        """
+        super(DelFolderRequest, self).__init__(bucket_name, cos_path)
+
+    def check_params_valid(self):
+        """ 检查参数合法"""
+        if not super(DelFolderRequest, self).check_params_valid():
+            return False
+        if not self._param_check.check_cos_path_valid(self._cos_path, is_file_path=False):
+            return False
+        return self._param_check.check_not_cos_root(self._cos_path)
+
+
+class ListFolderRequest(BaseRequest):
+    """ListFolderRequest 获取目录列表的请求"""
+
+    def __init__(self, bucket_name, cos_path, num=199, prefix=u'', context=u''):
+        """
+        :param bucket_name: bucket的名称
+        :param cos_path: cos的绝对路径, 从bucket根/开始
+        :param num: 搜索数量
+        :param prefix: 搜索前缀
+        :param context: 搜索上下文
+        """
+        super(ListFolderRequest, self).__init__(bucket_name, cos_path)
+        self._num = num
+        self._prefix = prefix
+        self._context = context
+
+    def set_num(self, num):
+        """设置List数量
+
+        :param num:
+        :return:
+        """
+        self._num = num
+
+    def get_num(self):
+        """获取List数量
+
+        :return:
+        """
+        """
+
+        :return:
+        """
+        return self._num
+
+    def set_prefix(self, prefix):
+        """设置前缀"""
+        self._prefix = prefix
+
+    def get_prefix(self):
+        """获取前缀"""
+        return self._prefix
+
+    def set_context(self, context):
+        """设置搜索上下文"""
+        self._context = context
+
+    def get_context(self):
+        """获取搜索上下文"""
+        return self._context
+
+    def check_params_valid(self):
+        """检查参数是否有效"""
+        if not super(ListFolderRequest, self).check_params_valid():
+            return False
+        if not self._param_check.check_cos_path_valid(self._cos_path, is_file_path=False):
+            return False
+        if not self._param_check.check_param_unicode('prefix', self._prefix):
+            return False
+        return self._param_check.check_param_unicode('context', self._context)
+
+
+class DownloadFileRequest(BaseRequest):
+    def __init__(self, bucket_name, cos_path, local_filename, range_start=None, range_end=None, *args, **kwargs):
+        super(DownloadFileRequest, self).__init__(bucket_name, cos_path)
+
+        self._local_filename = local_filename
+        self._range_start = range_start
+        self._range_end = range_end
+
+    def check_params_valid(self):
+        if not super(DownloadFileRequest, self).check_params_valid():
+            return False
+
+        from os import path
+        if path.exists(self._local_filename):
+            return False
+
+
+class MoveFileRequest(BaseRequest):
+
+    def __init__(self, bucket_name, cos_path, dest_path, overwrite=False):
+        super(MoveFileRequest, self).__init__(bucket_name, cos_path)
+        self._dest_path = dest_path
+        if isinstance(overwrite, bool):
+            if overwrite:
+                self._overwrite = 1
+            else:
+                self._overwrite = 0
+        else:
+            raise ValueError("overwrite must be an instance of Boolean")
+
+    @property
+    def dest_path(self):
+        return self._dest_path
+
+    @property
+    def overwrite(self):
+        return self._overwrite

+ 423 - 0
tools/cos/threadpool.py

@@ -0,0 +1,423 @@
+# -*- coding: UTF-8 -*-
+"""Easy to use object-oriented thread pool framework.
+
+A thread pool is an object that maintains a pool of worker threads to perform
+time consuming operations in parallel. It assigns jobs to the threads
+by putting them in a work request queue, where they are picked up by the
+next available thread. This then performs the requested operation in the
+background and puts the results in another queue.
+
+The thread pool object can then collect the results from all threads from
+this queue as soon as they become available or after all threads have
+finished their work. It's also possible, to define callbacks to handle
+each result as it comes in.
+
+The basic concept and some code was taken from the book "Python in a Nutshell,
+2nd edition" by Alex Martelli, O'Reilly 2006, ISBN 0-596-10046-9, from section
+14.5 "Threaded Program Architecture". I wrapped the main program logic in the
+ThreadPool class, added the WorkRequest class and the callback system and
+tweaked the code here and there. Kudos also to Florent Aide for the exception
+handling mechanism.
+
+Basic usage::
+
+    >>> pool = ThreadPool(poolsize)
+    >>> requests = makeRequests(some_callable, list_of_args, callback)
+    >>> [pool.putRequest(req) for req in requests]
+    >>> pool.wait()
+
+See the end of the module code for a brief, annotated usage example.
+
+Website : http://chrisarndt.de/projects/threadpool/
+
+"""
+__docformat__ = "restructuredtext en"
+
+__all__ = [
+    'makeRequests',
+    'NoResultsPending',
+    'NoWorkersAvailable',
+    'ThreadPool',
+    'WorkRequest',
+    'WorkerThread'
+]
+
+__author__ = "Christopher Arndt"
+__version__ = '1.3.2'
+__license__ = "MIT license"
+
+
+# standard library modules
+import sys
+import threading
+import traceback
+
+try:
+    import Queue            # Python 2
+except ImportError:
+    import queue as Queue   # Python 3
+
+
+# exceptions
+class NoResultsPending(Exception):
+    """All work requests have been processed."""
+    pass
+
+class NoWorkersAvailable(Exception):
+    """No worker threads available to process remaining requests."""
+    pass
+
+
+# internal module helper functions
+def _handle_thread_exception(request, exc_info):
+    """Default exception handler callback function.
+
+    This just prints the exception info via ``traceback.print_exception``.
+
+    """
+    traceback.print_exception(*exc_info)
+
+
+# utility functions
+def makeRequests(callable_, args_list, callback=None,
+        exc_callback=_handle_thread_exception):
+    """Create several work requests for same callable with different arguments.
+
+    Convenience function for creating several work requests for the same
+    callable where each invocation of the callable receives different values
+    for its arguments.
+
+    ``args_list`` contains the parameters for each invocation of callable.
+    Each item in ``args_list`` should be either a 2-item tuple of the list of
+    positional arguments and a dictionary of keyword arguments or a single,
+    non-tuple argument.
+
+    See docstring for ``WorkRequest`` for info on ``callback`` and
+    ``exc_callback``.
+
+    """
+    requests = []
+    for item in args_list:
+        if isinstance(item, tuple):
+            requests.append(
+                WorkRequest(callable_, item[0], item[1], callback=callback,
+                    exc_callback=exc_callback)
+            )
+        else:
+            requests.append(
+                WorkRequest(callable_, [item], None, callback=callback,
+                    exc_callback=exc_callback)
+            )
+    return requests
+
+
+# classes
+class WorkerThread(threading.Thread):
+    """Background thread connected to the requests/results queues.
+
+    A worker thread sits in the background and picks up work requests from
+    one queue and puts the results in another until it is dismissed.
+
+    """
+
+    def __init__(self, requests_queue, results_queue, poll_timeout=5, **kwds):
+        """Set up thread in daemonic mode and start it immediatedly.
+
+        ``requests_queue`` and ``results_queue`` are instances of
+        ``Queue.Queue`` passed by the ``ThreadPool`` class when it creates a
+        new worker thread.
+
+        """
+        threading.Thread.__init__(self, **kwds)
+        self.setDaemon(1)
+        self._requests_queue = requests_queue
+        self._results_queue = results_queue
+        self._poll_timeout = poll_timeout
+        self._dismissed = threading.Event()
+        self.start()
+
+    def run(self):
+        """Repeatedly process the job queue until told to exit."""
+        while True:
+            if self._dismissed.isSet():
+                # we are dismissed, break out of loop
+                break
+            # get next work request. If we don't get a new request from the
+            # queue after self._poll_timout seconds, we jump to the start of
+            # the while loop again, to give the thread a chance to exit.
+            try:
+                request = self._requests_queue.get(True, self._poll_timeout)
+            #except Queue.Empty:
+            except:
+                continue
+            else:
+                if self._dismissed.isSet():
+                    # we are dismissed, put back request in queue and exit loop
+                    self._requests_queue.put(request)
+                    break
+                try:
+                    result = request.callable(*request.args, **request.kwds)
+                    self._results_queue.put((request, result))
+                except:
+                    request.exception = True
+                    self._results_queue.put((request, sys.exc_info()))
+
+    def dismiss(self):
+        """Sets a flag to tell the thread to exit when done with current job.
+        """
+        self._dismissed.set()
+
+
+class WorkRequest:
+    """A request to execute a callable for putting in the request queue later.
+
+    See the module function ``makeRequests`` for the common case
+    where you want to build several ``WorkRequest`` objects for the same
+    callable but with different arguments for each call.
+
+    """
+
+    def __init__(self, callable_, args=None, kwds=None, requestID=None,
+            callback=None, exc_callback=_handle_thread_exception):
+        """Create a work request for a callable and attach callbacks.
+
+        A work request consists of the a callable to be executed by a
+        worker thread, a list of positional arguments, a dictionary
+        of keyword arguments.
+
+        A ``callback`` function can be specified, that is called when the
+        results of the request are picked up from the result queue. It must
+        accept two anonymous arguments, the ``WorkRequest`` object and the
+        results of the callable, in that order. If you want to pass additional
+        information to the callback, just stick it on the request object.
+
+        You can also give custom callback for when an exception occurs with
+        the ``exc_callback`` keyword parameter. It should also accept two
+        anonymous arguments, the ``WorkRequest`` and a tuple with the exception
+        details as returned by ``sys.exc_info()``. The default implementation
+        of this callback just prints the exception info via
+        ``traceback.print_exception``. If you want no exception handler
+        callback, just pass in ``None``.
+
+        ``requestID``, if given, must be hashable since it is used by
+        ``ThreadPool`` object to store the results of that work request in a
+        dictionary. It defaults to the return value of ``id(self)``.
+
+        """
+        if requestID is None:
+            self.requestID = id(self)
+        else:
+            try:
+                self.requestID = hash(requestID)
+            except TypeError:
+                raise TypeError("requestID must be hashable.")
+        self.exception = False
+        self.callback = callback
+        self.exc_callback = exc_callback
+        self.callable = callable_
+        self.args = args or []
+        self.kwds = kwds or {}
+
+    def __str__(self):
+        return "<WorkRequest id=%s args=%r kwargs=%r exception=%s>" % \
+            (self.requestID, self.args, self.kwds, self.exception)
+
+class ThreadPool:
+    """A thread pool, distributing work requests and collecting results.
+
+    See the module docstring for more information.
+
+    """
+
+    def __init__(self, num_workers, q_size=0, resq_size=0, poll_timeout=5):
+        """Set up the thread pool and start num_workers worker threads.
+
+        ``num_workers`` is the number of worker threads to start initially.
+
+        If ``q_size > 0`` the size of the work *request queue* is limited and
+        the thread pool blocks when the queue is full and it tries to put
+        more work requests in it (see ``putRequest`` method), unless you also
+        use a positive ``timeout`` value for ``putRequest``.
+
+        If ``resq_size > 0`` the size of the *results queue* is limited and the
+        worker threads will block when the queue is full and they try to put
+        new results in it.
+
+        .. warning:
+            If you set both ``q_size`` and ``resq_size`` to ``!= 0`` there is
+            the possibilty of a deadlock, when the results queue is not pulled
+            regularly and too many jobs are put in the work requests queue.
+            To prevent this, always set ``timeout > 0`` when calling
+            ``ThreadPool.putRequest()`` and catch ``Queue.Full`` exceptions.
+
+        """
+        self._requests_queue = Queue.Queue(q_size)
+        self._results_queue = Queue.Queue(resq_size)
+        self.workers = []
+        self.dismissedWorkers = []
+        self.workRequests = {}
+        self.createWorkers(num_workers, poll_timeout)
+
+    def createWorkers(self, num_workers, poll_timeout=5):
+        """Add num_workers worker threads to the pool.
+
+        ``poll_timout`` sets the interval in seconds (int or float) for how
+        ofte threads should check whether they are dismissed, while waiting for
+        requests.
+
+        """
+        for i in range(num_workers):
+            self.workers.append(WorkerThread(self._requests_queue,
+                self._results_queue, poll_timeout=poll_timeout))
+
+    def dismissWorkers(self, num_workers, do_join=False):
+        """Tell num_workers worker threads to quit after their current task."""
+        dismiss_list = []
+        for i in range(min(num_workers, len(self.workers))):
+            worker = self.workers.pop()
+            worker.dismiss()
+            dismiss_list.append(worker)
+
+        if do_join:
+            for worker in dismiss_list:
+                worker.join()
+        else:
+            self.dismissedWorkers.extend(dismiss_list)
+
+    def joinAllDismissedWorkers(self):
+        """Perform Thread.join() on all worker threads that have been dismissed.
+        """
+        for worker in self.dismissedWorkers:
+            worker.join()
+        self.dismissedWorkers = []
+
+    def putRequest(self, request, block=True, timeout=None):
+        """Put work request into work queue and save its id for later."""
+        assert isinstance(request, WorkRequest)
+        # don't reuse old work requests
+        assert not getattr(request, 'exception', None)
+        self._requests_queue.put(request, block, timeout)
+        self.workRequests[request.requestID] = request
+
+    def poll(self, block=False):
+        """Process any new results in the queue."""
+        while True:
+            # still results pending?
+            if not self.workRequests:
+                raise NoResultsPending
+            # are there still workers to process remaining requests?
+            elif block and not self.workers:
+                raise NoWorkersAvailable
+            try:
+                # get back next results
+                request, result = self._results_queue.get(block=block)
+                # has an exception occured?
+                if request.exception and request.exc_callback:
+                    request.exc_callback(request, result)
+                # hand results to callback, if any
+                if request.callback and not \
+                       (request.exception and request.exc_callback):
+                    request.callback(request, result)
+                del self.workRequests[request.requestID]
+            #except Queue.Empty:
+            except:
+                break
+
+    def wait(self):
+        """Wait for results, blocking until all have arrived."""
+        while 1:
+            try:
+                self.poll(True)
+            except NoResultsPending:
+                break
+
+
+################
+# USAGE EXAMPLE
+################
+
+if __name__ == '__main__':
+    import random
+    import time
+
+    # the work the threads will have to do (rather trivial in our example)
+    def do_something(data):
+        time.sleep(random.randint(1,5))
+        result = round(random.random() * data, 5)
+        # just to show off, we throw an exception once in a while
+        if result > 5:
+            raise RuntimeError("Something extraordinary happened!")
+        return result
+
+    # this will be called each time a result is available
+    def print_result(request, result):
+        print("**** Result from request #%s: %r" % (request.requestID, result))
+
+    # this will be called when an exception occurs within a thread
+    # this example exception handler does little more than the default handler
+    def handle_exception(request, exc_info):
+        if not isinstance(exc_info, tuple):
+            # Something is seriously wrong...
+            print(request)
+            print(exc_info)
+            raise SystemExit
+        print("**** Exception occured in request #%s: %s" % \
+          (request.requestID, exc_info))
+
+    # assemble the arguments for each job to a list...
+    data = [random.randint(1,10) for i in range(20)]
+    # ... and build a WorkRequest object for each item in data
+    requests = makeRequests(do_something, data, print_result, handle_exception)
+    # to use the default exception handler, uncomment next line and comment out
+    # the preceding one.
+    #requests = makeRequests(do_something, data, print_result)
+
+    # or the other form of args_lists accepted by makeRequests: ((,), {})
+    data = [((random.randint(1,10),), {}) for i in range(20)]
+    requests.extend(
+        makeRequests(do_something, data, print_result, handle_exception)
+        #makeRequests(do_something, data, print_result)
+        # to use the default exception handler, uncomment next line and comment
+        # out the preceding one.
+    )
+
+    # we create a pool of 3 worker threads
+    print("Creating thread pool with 3 worker threads.")
+    main = ThreadPool(3)
+
+    # then we put the work requests in the queue...
+    for req in requests:
+        main.putRequest(req)
+        print("Work request #%s added." % req.requestID)
+    # or shorter:
+    # [main.putRequest(req) for req in requests]
+
+    # ...and wait for the results to arrive in the result queue
+    # by using ThreadPool.wait(). This would block until results for
+    # all work requests have arrived:
+    # main.wait()
+
+    # instead we can poll for results while doing something else:
+    i = 0
+    while True:
+        try:
+            time.sleep(0.5)
+            main.poll()
+            print("Main thread working...")
+            print("(active worker threads: %i)" % (threading.activeCount()-1, ))
+            if i == 10:
+                print("**** Adding 3 more worker threads...")
+                main.createWorkers(3)
+            if i == 20:
+                print("**** Dismissing 2 worker threads...")
+                main.dismissWorkers(2)
+            i += 1
+        except KeyboardInterrupt:
+            print("**** Interrupted!")
+            break
+        except NoResultsPending:
+            print("**** No pending results.")
+            break
+    if main.dismissedWorkers:
+        print("Joining all dismissed worker threads...")
+        main.joinAllDismissedWorkers()

+ 501 - 0
tools/coscmd

@@ -0,0 +1,501 @@
+#!/usr/bin/env python
+#coding:utf-8
+
+import sys,os
+import datetime
+import random
+import threading
+import time
+import datetime
+import logging
+import ConfigParser
+from optparse import OptionParser
+from logging.handlers import RotatingFileHandler
+from time import strftime, localtime
+from time import sleep
+from datetime import date
+from datetime import timedelta
+from cos import CosClient
+from cos import UploadFileRequest
+from cos import CreateFolderRequest
+from cos import DelFileRequest
+from cos import DelFolderRequest
+from cos import ListFolderRequest
+from cos import threadpool 
+
+MAX_RETRY_TIMES = 3
+LOG_SAVE_EVERY_NUM = 1024
+ONE_TASK_DEL_FILE_NUMS = 50
+log_level = 1
+log_file_name = "del_file.log"
+dir_thread_num = 2
+file_thread_num = 5
+log_out_to_screen = 1
+delete_folder_fail_exist = 0
+
+CONFIGFILE = "%s/.coscredentials" % os.path.expanduser('~')
+CONFIGSECTION = 'COSCredentials'
+
+HAS_FORK = hasattr(os, 'fork')
+
+HELP = \
+'''coscmd:
+    config         --appid=[appid] --id=[secret_id] --key=[secret_key] --region=[region] --bucket=[bucket] 
+    ls             cosdir
+    mkdir          dirname
+    put            localfile  cosdir 
+    rm(delete,del) object
+    '''
+
+CMD_LIST = {}
+def cmd_configure(args, options):
+    if options.appid is None or options.secret_id is None or options.secret_key is None  or options.region is None or options.bucket is None:
+        print("%s miss parameters, use --appid=[appid] --id=[secret_id] --key=[secret_key] --region=[region] --bucket=[bucket] to specify appid/id/key/region/bucket pair" % args[0])
+        sys.exit(-1)
+    config = ConfigParser.RawConfigParser()
+    config.add_section(CONFIGSECTION)
+    config.set(CONFIGSECTION, 'appid', options.appid)
+    config.set(CONFIGSECTION, 'secret_id', options.secret_id)
+    config.set(CONFIGSECTION, 'secret_key', options.secret_key)
+    if options.region in ['sh','gz','tj','sgp']:
+        config.set(CONFIGSECTION, 'region', options.region)
+    else:
+        print("input region error, setup use : --region={sh,gz,tj,sgp}")
+        sys.exit(-1)
+    config.set(CONFIGSECTION, 'bucket', options.bucket)
+    cfgfile = open(CONFIGFILE, 'w+')
+    config.write(cfgfile)
+    print("Your configuration is saved into %s ." % CONFIGFILE)
+    cfgfile.close()
+    import stat
+    os.chmod(CONFIGFILE, stat.S_IREAD | stat.S_IWRITE)
+
+def cmd_loadconfigure():
+    config = ConfigParser.ConfigParser()
+    config.read(CONFIGFILE)
+    global appid
+    global secret_id
+    global secret_key
+    global region 
+    global bucket
+    appid = int(config.get(CONFIGSECTION, 'appid'))
+    secret_id = config.get(CONFIGSECTION, 'secret_id').decode('utf-8')
+    secret_key = config.get(CONFIGSECTION, 'secret_key').decode('utf-8')
+    region = config.get(CONFIGSECTION, 'region')
+    bucket = config.get(CONFIGSECTION, 'bucket').decode('utf-8')
+    if len(secret_id) == 0 or len(secret_key) == 0 or len(region) == 0 or len(bucket) == 0:
+        print("can't get appid/secret_id/secret_key/region/bucket, setup use : config --appid=[appid] --id=[secret_id] --key=[secret_key] --region=[region] --bucket=[bucket]")
+        sys.exit(1)
+
+def cmd_lsdir(COSDIR):
+    cosdir = COSDIR.decode('utf-8')
+    request = ListFolderRequest(bucket, cosdir)
+    list_folder_ret = cos_client.list_folder(request)
+    if list_folder_ret[u'code'] == 0:
+        print(True) 
+    else:
+        print("%s, appid/secret_id/secret_key/region/bucket invalid"% list_folder_ret[u'message'])
+
+def cmd_mkdir(COSDIR):
+    cosdir = COSDIR.decode('utf-8')
+    request = CreateFolderRequest(bucket, cosdir)
+    create_folder_ret = cos_client.create_folder(request)
+    if create_folder_ret[u'code'] == 0:
+        print("mkdir cos://%s%s OK" % (bucket,COSDIR))
+    else:
+        print(create_folder_ret[u'message'])
+
+def cmd_put(LOCALFILE,COSFILE):
+    localfile = LOCALFILE.decode('utf-8')
+    cosfile = COSFILE.decode('utf-8')
+    request = UploadFileRequest(bucket, cosfile, localfile)
+    request.set_insert_only(0)
+    upload_file_ret = cos_client.upload_file(request)
+    if upload_file_ret[u'code'] == 0:
+        print("put cos://%s%s OK" % (bucket,COSFILE))
+    else:
+        print(upload_file_ret[u'message'])
+
+def loginit():
+    global config
+    if (log_file_name == ""):
+        return
+    log_level = logging.ERROR
+    if log_level == 0:
+        log_level = logging.DEBUG
+    if log_level == 1:
+        log_level = logging.INFO
+    if log_level == 2:
+        log_level = logging.WARNING
+
+        #定义一个RotatingFileHandler,最多备份5个日志文件,每个日志文件最大20M
+    logger = logging.getLogger("")
+    Rthandler = RotatingFileHandler(log_file_name, maxBytes= 20*1024*1024,backupCount=5)
+    Rthandler.setLevel(log_level)
+    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
+    Rthandler.setFormatter(formatter)
+    logger.addHandler(Rthandler)
+        #输出日志到屏幕
+    console = logging.StreamHandler()
+    console.setFormatter(formatter)
+    if (log_out_to_screen == 1):
+        logger.addHandler(console)
+
+    logger.setLevel(log_level)
+    return logger
+
+#日期相关操作
+class Dateop():
+    @staticmethod
+    def isValidDate(str):
+        try:
+            time.strptime(str, "%Y""%m""%d")
+            return True
+        except:
+            return False
+
+    @staticmethod
+    def getdaystr(n=0):
+        dt = date.today()-timedelta(days=n)
+        tt = dt.timetuple()
+        daystr = strftime("%Y""%m""%d",tt)
+        return daystr
+
+    @staticmethod
+    def cmpDateAgo(t1,t2):
+        if (Dateop.isValidDate(t1)==False or Dateop.isValidDate(t2)==False):
+            return False
+        if (int(t1) <= int (t2)):
+            return True
+        return False
+
+    @staticmethod
+    def isNeedDeleteDir(dirname, n=0):
+        if (len(dirname) != 8):
+            return False
+        if Dateop.isValidDate(dirname) == False:
+            return False
+        d2 = Dateop.getdaystr(n);
+        if Dateop.cmpDateAgo(dirname, d2):
+            return True
+        return False
+#删除文件统计
+class FileStat():
+    global cos_log
+    def __init__(self):
+        self.delfilesuccnum = 0
+        self.deldirsuccnum = 0
+        self.delfilefailnum = 0
+        self.deldirfailnum = 0
+        self.lock = threading.Lock()
+ 
+    def addDelFileFailNum(self,num=1):
+        self.lock.acquire(1)
+        self.delfilefailnum += num
+        self.lock.release()
+    def addDelDirFailNum(self,num=1):
+        self.lock.acquire(1)
+        self.deldirfailnum += num
+        self.lock.release()
+    def addDelDirSuccNum(self, num=1):
+        self.lock.acquire(1)
+        self.deldirsuccnum += num
+        self.lock.release()
+    def addDelFileSuccNum(self, num=1):
+        self.lock.acquire(1)
+        self.delfilesuccnum += num
+        self.lock.release()
+    def printStat(self):
+        msg ="".join(["delfilesuccnum=",str(self.delfilesuccnum),
+                ",delfilefailnum=",str(self.delfilefailnum),
+                ",deldirsuccnum=",str(self.deldirsuccnum),
+                ",deldirfailnum=",str(self.deldirfailnum)])
+        print(msg) 
+    def logStat(self):
+        curtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+        log = ''.join(["delfilenum=",str(self.delfilesuccnum),
+            ",deldirnum=",str(self.deldirsuccnum),",delfilefailnum=",
+            str(self.delfilefailnum),",deldirfailnum=",str(self.deldirfailnum)])
+        cos_log.info(log)
+
+#执行时间统计
+class TimeStat(object):
+    global cos_log
+    def __init__(self):
+        self.start()
+    def start(self):
+        self.start = datetime.datetime.now()
+        self.t1 = time.time()
+        msg = "delete task started  ..........."
+        cos_log.info(msg)
+    def end(self):
+        self.end = datetime.datetime.now()
+        self.t2 = time.time()
+        msg = "delete task ended\n\nrm task finished,\ntimecost:"+str(self.t2-self.t1) + " (s)"
+        cos_log.info(msg)
+
+#删除文件列表中的文件
+def delfiles(cos_client, bucket, filelist):
+    for f in filelist:
+        delfile(cos_client, bucket, f)
+
+def delfolders(cos_client, bucket, folderlist):
+    for f in folderlist:
+        delfolder(cos_client, bucket, f)
+#文件夹删除
+def delfolder(cos_client, bucket, folder):
+    global stat
+    global cos_log
+    if not folder:
+        return 0
+    delfolderreq = DelFolderRequest(bucket, folder)
+    retry = 0
+    while (retry < MAX_RETRY_TIMES):
+        ret = cos_client.del_folder(delfolderreq)
+        msg = "delfolder fail, bucket="+bucket+",folder="+folder+ret['message']
+        if (ret['code'] == 0):
+            break
+        elif (ret['code'] == -166):
+            cos_log.warning(msg)
+            break
+        #操作太频繁,频控
+        elif (ret['code'] == -71):
+            sleep(random.randint(1,5))
+            cos_log.warning(msg)
+            retry += 1
+            continue
+        #文件夹非空
+        elif (ret['code'] == -173):
+            break
+        else:
+            cos_log.warning(msg)
+            retry += 1
+    if (ret['code'] != 0 and  ret['code'] != -166):
+        stat.addDelDirFailNum()
+        cos_log.error("delfolder fail, bucket="+bucket+",folder="+folder+ret['message'])
+        return ret['code']
+    if (ret['code'] == 0):
+        stat.addDelDirSuccNum()
+        msg = "delfolder success, bucket="+bucket+",folder="+folder
+        cos_log.info(msg)
+    return 0
+
+#文件删除
+def delfile(cos_client, bucket, filepath):
+    global stat
+    global cos_log
+    delfilereq = DelFileRequest(bucket, filepath)
+    retry = 0
+    while (retry < MAX_RETRY_TIMES):
+        ret = cos_client.del_file(delfilereq)
+        msg = "delfile fail bucket="+bucket+",file="+filepath+ret['message']
+        if (ret['code'] == 0):
+            break
+        #文件不存在
+        elif (ret['code'] == -166):
+            cos_log.warning(msg)
+            break
+        #单目录写操作过快
+        elif (ret['code'] == -143):
+            sleep(random.randint(1,5))
+            cos_log.warning(msg)
+            retry += 1
+            continue
+        #操作太频繁,频控
+        elif (ret['code'] == -71):
+            sleep(random.randint(1,5))
+            cos_log.warning(msg)
+            retry += 1
+            continue
+        else:
+            cos_log.warning(msg)
+            retry += 1
+            continue
+    if (ret['code'] != 0 and  ret['code'] != -166):
+        stat.addDelFileFailNum()
+        cos_log.error("delfile fail, bucket="+bucket+",file="+filepath+ret['message'])
+        return ret['code']
+    if (ret['code'] == 0):
+        stat.addDelFileSuccNum()
+        msg = "delfile success, bucket="+bucket+",file="+filepath
+        cos_log.info(msg)
+    return 0
+
+#递归文件夹进行文件删除
+def delete_r(cos_client, bucket, path, thread_pool_file):
+    global stat
+    global config
+    global cos_log
+    cos_log.debug("delete_r bucket:"+bucket+",path:"+path)
+    context = u""
+    #递归文件夹
+    while True:
+        listfolderreq = ListFolderRequest(bucket, path, 1000, u'', context)
+        retry = 0
+        while (retry < MAX_RETRY_TIMES):
+            listret = cos_client.list_folder(listfolderreq)
+            if listret['code'] != 0 :
+                retry += 1
+                sleep(random.randint(1,3))
+                continue
+            else:
+                break
+        if (listret['code'] != 0):
+            cos_log.error("delete_r: list folder fail:"+path +",return msg:"+ listret['message'])
+            return listret['code']
+        if (len(listret['data']['infos']) == 0):
+            break;
+        filelist = []
+        dirlist = []
+        for info in listret['data']['infos']:
+            fullname = path + info['name']
+            #list出来的文件列表中文件夹和文件本身是混杂一起的
+            if info.has_key('filesize'):
+                filelist.append(fullname)
+                if (len(filelist) >= ONE_TASK_DEL_FILE_NUMS):
+                    args = [cos_client, bucket, filelist]
+                    args_tuple = (args,None)
+                    args_list = [args_tuple]
+                    requests = threadpool.makeRequests(delfiles, args_list)
+                    for req in requests:
+                        thread_pool_file.putRequest(req)
+                        filelist = []
+                        continue
+                else:
+                    pass
+            else:
+                dirlist.append(fullname)
+                if (len(dirlist) >= ONE_TASK_DEL_FILE_NUMS):
+                    args = [cos_client, bucket, dirlist]
+                    args_tuple = (args,None)
+                    args_list = [args_tuple]
+                    requests = threadpool.makeRequests(delfolders, args_list)
+                    for req in requests:
+                        thread_pool_file.putRequest(req)
+                        dirlist = []
+                        continue
+                else:
+                    pass
+                pass
+
+        if (len(filelist) > 0):
+            args = [cos_client, bucket, filelist]
+            args_tuple = (args,None)
+            args_list = [args_tuple]
+            requests = threadpool.makeRequests(delfiles, args_list)
+            for req in requests:
+                thread_pool_file.putRequest(req)
+                filelist = []
+        else:
+            pass
+
+        if (len(dirlist) > 0):
+            args = [cos_client, bucket, dirlist]
+            args_tuple = (args,None)
+            args_list = [args_tuple]
+            requests = threadpool.makeRequests(delfolders, args_list)
+            for req in requests:
+                thread_pool_file.putRequest(req)
+                filelist = []
+        else:
+            pass
+ 
+        cos_log.debug("delete_r thread pool file waiting\n")
+        thread_pool_file.wait()
+        cos_log.debug("delete_r thread pool file waiting end\n")
+
+        if (listret['data']['listover'] == False):
+            context = listret['data']['context']
+            continue
+        else:
+            break
+
+    stat.logStat()
+    return 0
+#支持Ctrl+C终止程序
+class Watcher():
+             
+    def __init__(self):
+        self.child = os.fork()
+        if self.child == 0:
+            return
+        else:
+            self.watch()
+             
+    def watch(self):
+        global cos_log
+        try: 
+            os.wait()
+        except KeyboardInterrupt:
+            cos_log.ERROR("ctrl+c terminated rm_recursive.py, exiting...")                                                                                        
+            self.kill()
+        sys.exit()
+    def kill(self):
+        try: 
+            os.kill(self.child, signal.SIGKILL)
+        except OSError:
+            pass
+def cmd_rm(COSDIR):
+    global thread_pool
+    global cos_log
+    global stat 
+    cos_log = loginit()
+    stat = FileStat()
+    timestat = TimeStat()
+    if HAS_FORK:
+      Watcher()
+    path = COSDIR.decode('utf-8')
+    thread_pool_dir = threadpool.ThreadPool(dir_thread_num)
+    thread_pool_file = threadpool.ThreadPool(file_thread_num)
+    cos_log.debug("bucket:"+bucket +",path:"+path)
+    args = [cos_client, bucket, path, thread_pool_file]
+    args_tuple = (args, None)
+    args_list = [args_tuple]
+    requests = threadpool.makeRequests(delete_r, args_list)
+    for req in requests:
+        thread_pool_dir.putRequest(req)
+
+    cos_log.debug("thread_pool_dir waiting.....\n")
+    thread_pool_dir.wait()
+    thread_pool_dir.dismissWorkers(dir_thread_num, True)
+    cos_log.debug("thread_pool_dir wait end.....\n")
+
+    timestat.end()
+    stat.logStat()
+
+if sys.argv[1] in ['config','ls','mkdir','put','rm','delete','del'] and len(sys.argv) >= 3:
+    if sys.argv[1] == 'config':
+        parser = OptionParser()
+        parser.add_option("-a", "--appid", dest="appid", help="specify appid")
+        parser.add_option("-i", "--id", dest="secret_id", help="specify secret id")
+        parser.add_option("-k", "--key", dest="secret_key", help="specify secret key")
+        parser.add_option("-r", "--region", dest="region", help="specify region")
+        parser.add_option("-b", "--bucket", dest="bucket", help="specify bucket")
+        (options, args) = parser.parse_args()
+        CMD_LIST['config'] = cmd_configure    
+        CMD_LIST['config'](args, options)
+    if sys.argv[1] == 'ls':
+        cmd_loadconfigure()
+        cos_client = CosClient(appid, secret_id, secret_key, region)
+        COSDIR = sys.argv[2]
+        cmd_lsdir(COSDIR)
+    if sys.argv[1] == 'mkdir':
+        cmd_loadconfigure()
+        cos_client = CosClient(appid, secret_id, secret_key, region)
+        COSDIR = sys.argv[2]
+        cmd_mkdir(COSDIR)
+    if sys.argv[1] == 'put' and len(sys.argv) == 4:
+        cmd_loadconfigure()
+        cos_client = CosClient(appid, secret_id, secret_key, region)
+        LOCALFILE = sys.argv[2]
+        COSFILE = sys.argv[3]
+        cmd_put(LOCALFILE,COSFILE)
+    if sys.argv[1] in ('rm','delete','del'):
+        cmd_loadconfigure()
+        cos_client = CosClient(appid, secret_id, secret_key, region)
+        COSDIR = sys.argv[2]
+        path = COSDIR.decode('utf-8') 
+        cmd_rm(path)
+else:
+    print(HELP)
+    exit()

+ 2 - 2
versions.txt

@@ -5,8 +5,8 @@ tengine_version=2.1.2
 openresty_version=1.11.2.2
 openssl_version=1.0.2k
 
-tomcat8_version=8.0.42
-tomcat7_version=7.0.76
+tomcat8_version=8.0.43
+tomcat7_version=7.0.77
 tomcat6_version=6.0.51
 
 apache24_version=2.4.25