集群间文件并发同步

条件:python

    有一台跳板机, 能够ssh到全部机器git

    知道其余机器的用户名和密码github

思路:web

    1. 在跳板机先简单for循环把文件从本地拷贝到远程10台机器( 由于咱们设置了并发Pool(10))redis

    2. 而后 调用 do_peer_copy 并发10台同时拷贝api

#!/usr/bin/env python
# -*- coding:utf-8 -*-

import logging
import multiprocessing

import invoke
import redis
from fabric import Connection

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
fh = logging.FileHandler('distributeCopy.log')
fh.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
formatter = logging.Formatter(fmt='%(asctime)s %(processName)s-%(threadName)s - %(name)s - %(levelname)s - %(message)s',
                              datefmt='%Y-%m-%d %H:%M:%S')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
logger.addHandler(fh)
logger.addHandler(ch)

passwd = 'ssh_password'
def get_server_ip(): """
获取全部机器列表
"""
pass


def get_remote_model(): remote_path = '/data1/online/model/final_model/model' local_path = '/data1/rsync_data/ctr_model_webapi' invoke.run('rsync -a 192.168.10.100:%s %s' % (remote_path, local_path)) def do_local_copy(dst_ip, dst_path, queue):
"""
脚本运行在跳板机上(能够免密码登陆其它机器的), 由于设置了Pool(10), 因此事先须要先准备10台已经同步好了文件
""" src_path
= '/data1/rsync_data/ctr_model_webapi/model' result = Connection(dst_ip).put(src_path, remote=dst_path) queue.put(dst_ip) logger.info('do_local_copy %s: %s', dst_ip, result) def do_peer_copy(args):
"""
从跳板机ssh到server-Y, 在server-Y上 rsync 文件
""" dst_ip
= args[0] queue = args[1] print 'before qsize: %s' % queue.qsize() src_ip = queue.get() cmd = """sshpass -p "%s" rsync -e 'ssh -o "StrictHostKeyChecking no"' -a username@%s:%s %s""" % \ (passwd, src_ip, '/tmp/model', '/tmp') result = Connection(dst_ip).run(cmd) queue.put(dst_ip) print 'end qsize: %s' % queue.qsize() logger.info('do_peer_copy %s => %s, %s', src_ip, dst_ip, result)
#return result 此处不要return任何东西 由于会触发python的一个issue https://github.com/joblib/joblib/issues/818#issuecomment-445865581
if __name__ == '__main__': manager = multiprocessing.Manager() q = manager.Queue() pool = multiprocessing.Pool(10) ips = get_server_ip() for ip in ips[:10]: do_local_copy(ip, '/tmp', q) pool.map(do_peer_copy, [(ip, q) for ip in ips[10:]]) pool.close() pool.join() logger.info('finished')
相关文章
相关标签/搜索