fabric同步文件

原始

之前编写了一个工具,用于从windows里面根据svn修改状态,筛选一系列的文件;计算本地文件和远程linux文件的md5差异,上传有差异的文件到远程。

#!/usr/bin/python
# encoding: utf-8
# pip install svn
# pip install fabric
#
import logging
import os
import svn.local
from fabric import Connection
import hashlib

logging.basicConfig(level=logging.INFO,format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',datefmt='%a, %d %b %Y %H:%M:%S')

# 指定远程的服务器目录地址
path_remoate = "/home/cq/work/server/"

# 计算本地文件md5码
def GetFileMd5(filename):
    if not os.path.isfile(filename):
        return
    myhash = hashlib.md5()
    f = open(filename,'rb')
    while True:
        b = f.read(8096)
        if not b :
            break
        myhash.update(b)
    f.close()
    return myhash.hexdigest()


def main():
    # 链接远程服务器的配置
    c = Connection(host="xxx.xxx.xxx.xxx",
        user="username",
        connect_kwargs={"key_filename": "E:/software/keyfile/id_rsa_abel",})
    c.open()
    logging.info(c.is_connected)
    # 测试执行执行指令
    c.run("pwd")
    # 获取本地svn文件状态
    l = svn.local.LocalClient('./')
    entries = l.status()
    for filename in entries:
        # 选择“修改”、“添加”状态的文件
        if filename.type_raw_name=='modified' or 'added' == filename.type_raw_name:
            name = filename.name
            name = name.replace("\\","/")
            logging.info("put name: {0}".format(name))
            # 通过Linux指令计算出远程的文件md5,并且赋值给retstr
            cmd = "md5sum {0}{1}".format(path_remoate,name) + "|awk '{print $1}'"
            retstr = c.run(cmd)
            # 计算本地m5,并且匹配
            selfmd5 = GetFileMd5(name)
            print(selfmd5)
            # 命令行返回的md5将会带一个\n,需要去除
            if retstr.stdout[:-1] == selfmd5:
                print("match")
            else:
                print("not match")
                # 不匹配的情况下,将会开始使用fabric的put命令上传文件
                c.put(name,"{0}{1}".format(path_remoate,name))
    c.close()

if __name__ == '__main__':
    main()
改造

修改的版本主要是将其中的配置信息抽出来,写成一个json配置文件,接受输入参数。
配置文件内容

{
    "r_ip" : "xxxxxxxxxxxxx",
    "r_usr" : "root",
    "r_path": "/root/upload",
    "keyfile" : "C:/Users/abel/Documents/keyfile/id_rsa",
    "l_path" : "C:/main/code/"
}

上传脚本

#!/usr/bin/python
# encoding: utf-8
# 
# 先安装这些插件
# pip install fabric==2.5.0
# pip install pypiwin32
#
import logging
import os
from fabric import Connection
import hashlib
import json
import sys
import glob

logging.basicConfig(level=logging.INFO,format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',datefmt='%a, %d %b %Y %H:%M:%S')

class r_sync(object):
    def __init__(self,_cfg_file):
        super().__init__()
        self.cfg_file_ = _cfg_file
        self.load_cfg()

    def work(self):
        self.open_conn()
        self.check_sync()
        self.close_conn()
        pass
    
    def load_cfg(self):
        self.j_obj_ = None
        with open(self.cfg_file_, 'r') as jsonFile:
            self.j_obj_ = json.load(jsonFile)
        self.r_ip_ = self.j_obj_["r_ip"]
        self.r_usr_ = self.j_obj_["r_usr"]
        self.r_path_ = self.j_obj_["r_path"]
        self.keyfile_ = self.j_obj_["keyfile"]
        self.l_path_ = self.j_obj_["l_path"]
    
    def open_conn(self):
        # 链接远程服务器的配置
        self.conn_ = Connection(host=self.r_ip_,
            user=self.r_usr_,
            connect_kwargs={"key_filename": self.keyfile_,})
        self.conn_.open()
        logging.info(self.conn_.is_connected)
    def check_sync(self):
        old_cwd = os.getcwd()
        os.chdir(self.l_path_)
        self.recursion_path("./")
        os.chdir(old_cwd)
    def recursion_path(self,_path):
        logging.info("dir: {0}".format(_path))
        for root,dirs,files in os.walk(_path):
            for dn in dirs:
                sub_d = self.win2linux(str(_path) +"/"+ str(dn))
                if os.path.exists(sub_d) and os.listdir(sub_d):
                    self.recursion_path(sub_d)
            for fn in files:
                fn = self.win2linux(str(_path) +"/"+ str(fn))
                if os.path.exists(fn):
                    self.check_single_file(fn)
    def check_single_file(self,_filename):
        lmd5 = self.get_l_md5(_filename)
        rmd5 = self.get_r_md5(_filename)
        logging.info("local file: {0}, \nlmd5: {1}".format(_filename,lmd5))
        logging.info("rmd5: {0}".format(rmd5))
        if lmd5 != rmd5:
            self.put_file(_filename)
    def close_conn(self):
        self.conn_.close()
    def win2linux(self, _path):
        _path = _path.replace("\\","/")
        _path = _path.replace("/./","/")
        _path = _path.replace("//","/")
        return _path
    def get_r_md5(self,_name):
        r_p = self.win2linux("{0}/{1}".format(self.r_path_,_name))
        cmd="mkdir -p {0}".format(os.path.dirname(r_p))
        self.conn_.run(cmd)
        cmd = "md5sum {0}".format(r_p) + "| awk '{print $1}'"
        retstr = self.conn_.run(cmd)
        return retstr.stdout[:-1]
    def get_l_md5(self, _name):
        if not os.path.isfile(_name):
            return None
        myhash = hashlib.md5()
        f = open(_name,'rb')
        while True:
            b = f.read(8096)
            if not b :
                break
            myhash.update(b)
        f.close()
        return myhash.hexdigest()
    def put_file(self, _name):
        self.conn_.put(_name,self.win2linux("{0}/{1}".format(self.r_path_,_name)))

def main():
    if len(sys.argv) == 1:
        r = r_sync("./config.json")
        r.work()
        pass
    else:
        for cfg_name in sys.argv[1:]:
            r = r_sync(cfg_name)
            r.work()

if __name__ == '__main__':
    main()

你可能感兴趣的:(fabric同步文件)