├── .gitignore ├── sync ├── __init__.py ├── sync.py ├── __main__.py └── api.py ├── remote ├── __init__.py ├── dh.py ├── __main__.py ├── remote.py └── local.py ├── rmtfunc.py ├── README.md ├── README.zh.md └── hwinfo.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc -------------------------------------------------------------------------------- /sync/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | @date: 2015-09-02 5 | @author: Shell.Xu 6 | @copyright: 2015, Shell.Xu 7 | @license: BSD-3-clause 8 | ''' 9 | from api import * 10 | -------------------------------------------------------------------------------- /remote/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | @date: 2015-08-30 5 | @author: Shell.Xu 6 | @copyright: 2015, Shell.Xu 7 | @license: BSD-3-clause 8 | ''' 9 | from local import * 10 | -------------------------------------------------------------------------------- /rmtfunc.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | @date: 2015-08-14 5 | @author: shell.xu 6 | @copyright: 2015, Shell.Xu 7 | @license: BSD-3-clause 8 | ''' 9 | import sys, logging, subprocess 10 | # import bs4 11 | 12 | def callback(hostname): 13 | print 'hostname: ' + hostname 14 | 15 | def get_hostname_cb(): 16 | from remote import remote 17 | with open('/etc/hostname') as fi: 18 | remote.channel.apply(callback, fi.read().strip()) 19 | 20 | def get_hostname(): 21 | logging.info('get hostname') 22 | with open('/etc/hostname') as fi: 23 | return fi.read().strip() 24 | 25 | def get_dpkg(): 26 | rslt = [] 27 | for i, line in enumerate(subprocess.check_output(['dpkg', '-l']).splitlines()): 28 | if i < 6: continue 29 | # if line.startswith('ii'): continue 30 | line = line.strip() 31 | r = line.split() 32 | if r[1].startswith('python'): rslt.append(r[:3]) 33 | return rslt 34 | -------------------------------------------------------------------------------- /remote/dh.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | @date: 2015-09-03 5 | @author: Shell.Xu 6 | @copyright: 2015, Shell.Xu 7 | @license: BSD-3-clause 8 | @comment: 9 | I saw the code in https://github.com/lowazo/pyDHE/blob/master/DiffieHellman.py, but it is GPL-3. So I'm not copy it, rewrite it according http://tools.ietf.org/html/rfc3526. Those numbers, I'm not so sure what its copyright is, or what license it is. I think I may use it here, Mail me if I'm wrong. 10 | ''' 11 | from binascii import hexlify 12 | import hashlib 13 | 14 | try: 15 | import Crypto.Random.random 16 | secure_random = Crypto.Random.random.getrandbits 17 | except ImportError: 18 | import OpenSSL 19 | secure_random = lambda x: long(hexlify(OpenSSL.rand.bytes(x>>3)), 16) 20 | 21 | GENERATOR = 2 22 | PRIME6144 = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C93402849236C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BDF8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1BDB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F323A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AACC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE32806A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55CDA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE12BF2D5B0B7474D6E694F91E6DCC4024FFFFFFFFFFFFFFFF 23 | 24 | def sha256(s): 25 | h = hashlib.sha256() 26 | h.update(s) 27 | return h.digest() 28 | 29 | def gen_prikey(bits=576): 30 | return secure_random(bits) 31 | 32 | def gen_pubkey(prikey, prime=PRIME6144): 33 | return pow(GENERATOR, prikey, prime) 34 | 35 | def gen_key(prikey, other, prime=PRIME6144): 36 | if other <= 2 or other >= prime - 1 or not pow(other, (prime-1)/2, prime): 37 | raise Exception('invaild other key') 38 | k = pow(other, prikey, prime) 39 | return sha256(str(k)) 40 | 41 | def main(): 42 | pri1 = gen_prikey() 43 | pri2 = gen_prikey() 44 | 45 | pub1 = gen_pubkey(pri1) 46 | pub2 = gen_pubkey(pri2) 47 | 48 | k1 = gen_key(pri1, pub2) 49 | k2 = gen_key(pri1, pub2) 50 | 51 | print k1 52 | print k2 53 | print k1 == k2 54 | 55 | if __name__ == '__main__': main() 56 | -------------------------------------------------------------------------------- /sync/sync.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | @date: 2015-09-02 5 | @author: Shell.Xu 6 | @copyright: 2015, Shell.Xu 7 | @license: BSD-3-clause 8 | ''' 9 | import os 10 | import stat 11 | import fnmatch 12 | import logging 13 | from os import path 14 | import yaml 15 | import api 16 | 17 | # TODO: 目录的来回同步 18 | 19 | def reloca_path(filepath, origbase, newbase): 20 | rpath = path.relpath(filepath, origbase) 21 | if rpath == '.': 22 | reloc = newbase 23 | else: 24 | reloc = path.join(newbase, rpath) 25 | logging.debug('%s reloc from %s to %s => %s', 26 | filepath, origbase, newbase, reloc) 27 | return reloc 28 | 29 | def chk4file(filist, remote, local): 30 | f2sync = [] 31 | for fi in filist: 32 | if fi['type'] != stat.S_IFREG: continue 33 | localpath = reloca_path(fi['path'], remote, local) 34 | 35 | if path.lexists(localpath): # link is not ok. 36 | st = os.lstat(localpath) 37 | if not stat.S_ISREG(st.st_mode): 38 | logging.error('remote file to local non-file %s' % local) 39 | continue 40 | if st.st_size == fi['size'] and api.gen_md5hash(localpath) == fi['md5']: 41 | continue # done 42 | 43 | # if base dir not exist, create it first. 44 | dirname = path.dirname(localpath) 45 | if not path.exists(dirname): # link is ok. 46 | logging.info('create dir %s' % dirname) 47 | os.makedirs(dirname) 48 | 49 | f2sync.append((fi['path'], localpath)) 50 | return f2sync 51 | 52 | def sync_back(rmt, remote, local, partten=None): 53 | logging.warning('sync %s in %s to %s.' % (remote, str(rmt), local)) 54 | filist = rmt.apply(api.walkdir, remote, None, partten) 55 | f2sync = chk4file(filist, remote, local) 56 | try: 57 | datas = rmt.apply(api.read_files, [f[0] for f in f2sync]) 58 | api.write_files(zip([f[1] for f in f2sync], datas)) 59 | except Exception as err: 60 | # maybe total size of files are larger then 4GB. 61 | logging.error("sync files failed, exception: %s.", str(err)) 62 | logging.info("retry sync file one by one.") 63 | for rmtpath, localpath in f2sync: 64 | data = rmt.apply(api.read_file, rmtpath) 65 | api.write_file(localpath, data) 66 | return filist 67 | 68 | def sync_to(rmt, remote, local, partten=None): 69 | logging.warning('sync %s to %s in %s' % (local, remote, str(rmt))) 70 | filist = api.walkdir(local, os.getcwd(), partten) 71 | f2sync = rmt.apply(chk4file, filist, local, remote) 72 | try: 73 | datas = api.read_files([f[0] for f in f2sync]) 74 | rmt.apply(api.write_files, zip([f[1] for f in f2sync], datas)) 75 | except Exception as err: 76 | # maybe total size of files are larger then 4GB. 77 | logging.error("sync files failed, exception: %s", str(err)) 78 | logging.info("retry sync file one by one.") 79 | for localpath, rmtpath in f2sync: 80 | data = api.read_file(localpath) 81 | rmt.apply(api.write_file, data) 82 | return filist 83 | 84 | def apply_meta(filist): 85 | for fi in filist: 86 | mode = fi['mode'] 87 | logging.info('chmod %s %s', fi['path'], oct(mode)) 88 | os.chmod(fi['path'], mode) 89 | uid = api.get_userid(fi['user']) 90 | gid = api.get_userid(fi['group']) 91 | logging.info('chown %s %d %d', fi['path'], uid, gid) 92 | os.lchown(fi['path'], uid, gid) 93 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Run Your Code Remotely 2 | 3 | [Chinese README](README.zh.md) 4 | 5 | ## How to use 6 | 7 | try this: 8 | 9 | python -m remote -m host1,host2 'import pprint,rmtfunc; pprint.pprint(rmtfunc.get_dpkg())' 10 | 11 | it will return all your packages start with 'python' in machine 'hostname'. 12 | 13 | Attention: hostname should be a debian/ubuntu. cause get_dpkg, as it named, are gather information from dpkg -l. 14 | 15 | ## more example 16 | 17 | try this: 18 | 19 | python -m remote -x -n sudo -m host1,host2 'hwinfo.all_info()' 20 | 21 | it will print all infomation about remote machine. 22 | 23 | * -x for eval mode. result will get back and dump out as json. 24 | * -n for channel select. we run remote by sudo. 25 | * -m for machine list, -f(file) or -c(stdin) also can be use. 26 | * hwinfo.all_info is a program in hwinfo.py. it will collect all infomation about remote machine. 27 | 28 | ## How it works 29 | 30 | 1. Run bootstrap code in a python instance. The bootstrap code will read stdin, unmarshal it, compile, and run. 31 | 2. Sent core.py from stdin. 32 | 3. core.py will read a new message, unmarshal it, run, and return result. So you can run anything remotely. 33 | 34 | * sys.stdout is hooked, so print data will send back by marshal to a message. 35 | * import is hooked. every time you try to import some module. main.py will find it, and send it to core.py. And it will be loaded as native module. 36 | * C extension are send as binary file. When you wanna use C extension, server and client must in same arch. 37 | 38 | ## How developer use it 39 | 40 | chancls = type('C', (local.SshChannel, local.BinaryEncoding), {}) 41 | with chancls(hostname) as h: 42 | h.execute('xxx') 43 | result = h.eval('xxx') 44 | h.run_single('xxx; xxx') 45 | 46 | More example, see remote/__main__.py. 47 | 48 | ## difference between execute and eval 49 | 50 | eval just accept a sigle expression, and will evaluate it as the return value. 51 | 52 | execute accept a sequence of statements, but just return None. 53 | 54 | single accept a single interactive statement. print every thing other than None. 55 | 56 | You can get more information from [python doc](https://docs.python.org/2/library/functions.html#compile). 57 | 58 | # sync 59 | 60 | A sync system based on project 'run it remote'. It can sync file, read/write privilege and owner. Mostly used in config files. 61 | 62 | # License 63 | 64 | Copyright (c) 2015, Shell.Xu 65 | All rights reserved. 66 | 67 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 68 | 69 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 70 | 71 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 72 | 73 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 74 | 75 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 76 | -------------------------------------------------------------------------------- /README.zh.md: -------------------------------------------------------------------------------- 1 | # Run Your Code Remotely 2 | 3 | ## 如何使用 4 | 5 | 试试: 6 | 7 | python -m remote -m host1,host2 'import pprint,rmtfunc; pprint.pprint(rmtfunc.get_dpkg())' 8 | 9 | 屏幕上应当打印出hostname这台机器上所有以python开头的包。 10 | 11 | 注意:hostname这台机器应当是一台debian/ubuntu。因为get_dpkg这个函数,如同名字暗示的一样,是通过读取dpkg -l来工作的。 12 | 13 | ## 更多例子 14 | 15 | 试试: 16 | 17 | python -m remote -x -n sudo -m host1,host2 'hwinfo.all_info()' 18 | 19 | 这应当会打出远程机器的配置。 20 | 21 | * -x是使用eval模式工作的意思,结果会被收集回来,使用json格式打印出来。 22 | * -n是channel模式选择,这里使用sudo在远程执行。 23 | * -m是机器列表,也可以用-f或-c指定。 24 | * hwinfo.all_info是附带的收集机器信息的程序。 25 | 26 | ## 工作原理 27 | 28 | 1. 启动一个python实例,用-c执行启动代码。启动代码会读取stdin中的输入,unmarshal,编译,并执行。 29 | 2. 从stdin中,将core.py发送过去。 30 | 3. core.py会读取一个个消息,unmarshal,编译,执行,返回结果。因此就可以在远程执行任意代码了。 31 | 32 | * sys.stdout被处理过。所有打印数据都会marshal后发给服务器端去打印。 33 | * import也做过处理。每当你导入一个模块时,core.py会接替工作,请求main.py找到他并发过来。而后这个模块就可以像本地模块一样用了。 34 | * C扩展是以二进制形式发送的。所以当你需要使用C扩展时,服务器和客户端必须在同一个架构上。 35 | 36 | ## 开发接口 37 | 38 | chancls = type('C', (local.SshChannel, local.BinaryEncoding), {}) 39 | with chancls(hostname) as h: 40 | h.execute('xxx') 41 | result = h.eval('xxx') 42 | h.run_single('xxx; xxx') 43 | 44 | remote/__main__.py里有进一步例子。 45 | 46 | ## eval, execute和run_single的区别 47 | 48 | eval只接受一个表达式,会返回表达式的值。 49 | 50 | execute可以接受一系列语句(甚至是一个模块),但是只会返回None。 51 | 52 | single可以接受一系列语句,执行每一条,得到表达式的值。并打印非None的返回值。 53 | 54 | 你可以在[python doc](https://docs.python.org/2/library/functions.html#compile)找到更多信息。 55 | 56 | # sync 57 | 58 | 基于run it remote的,同步远程文件和权限/属主的程序。主要用于同步配置文件。 59 | 60 | ## 工作原理 61 | 62 | 在工作目录下,包含有多个yaml文件。每个文件描述一台服务器的同步信息。 63 | 64 | sync back模式下。从根据配置,从机器上找到合适的文件,检查其在本地是否已经存在一样的内容。如果不存在,则同步回来。最后将所有文件(无论是执行了复制还是本地已存在)的属主/权限写入描述文件。 65 | 66 | sync to模式下。根据配置,从本地寻找合适的文件,检查是否在远程存在一样的内容。如不存在,则同步过去。最后根据描述文件内的记录,将同步过去的文件的权限和属主修改到位。 67 | 68 | 如果要同步系统文件(配置),需要root权限。因此默认以SshSudo模式运行。 69 | 70 | 注意:所有10M以上的文件会跳过不处理。 71 | 72 | ## 文件属性 73 | 74 | * path: 文件路径。其中包含的路径可以是绝对路径也可以是相对路径。一般远程路径保存时都以绝对路径保存。 75 | * type: 类型。在内存中是数字类型,定义参见import stat中的S_IFMT。写入文件时变换为字符串。可以取'dir', 'file', 'link'。 76 | * mode: 权限。定义同import stat中的S_IMODE。实际上就是unix中的UGO权限。 77 | * user: 用户名。注意是用户名字符串。 78 | * group: 组名。也是字符串。 79 | * md5: 文件的md5值。仅文件存在此项。 80 | * size: 文件大小。仅文件存在此项。 81 | * link: 链接目标。仅软链接存在此项。 82 | 83 | ## 描述文件 84 | 85 | yaml格式,里面包含每个文件的必要属性。属性以dict方式存储。 86 | 87 | 保存时,会根据内容计算出最多的属主/属组和文件权限/目录权限,并且在最开始的common中保存。如果和common中一致,即可省略去文件项记录。 88 | 89 | * common: dict,所有文件的默认属性。 90 | * username: 默认属主。 91 | * groupname: 默认属组。 92 | * filemode: 默认文件权限。 93 | * dirmode: 默认目录权限。 94 | * filelist: dict。key为文件路径,value为文件属性。由于路径保存于key中,因此属性中会去掉path。其中只描述user, group, mode, type。如果其余三项和默认值一致,仅剩type一项时,该文件项即被忽略。 95 | 96 | ## 同步配置结构 97 | 98 | * hostname: 可以服务器的hostname。如果没有指定,则按照hostname.yaml的规则,从文件名中解析。 99 | * user: 用户名,控制权限描述文件的默认用户名。 100 | * group: 组名,控制权限描述文件的默认组名。 101 | * filemode: 文件权限。 102 | * dirmode: 目录权限。 103 | * synclist: 104 | * remote: 远程路径,注意需要是绝对路径,否则需要从python执行的起始路径开始计算。支持~来表示用户根目录。如果里面包含通配符,则会分为两个部分。基础路径和通配规则。 105 | * local: 本地路径。无论是否是绝对路径,都会被转换为相对路径,在前面加上hostname来存放。 106 | 107 | # License 108 | 109 | Copyright (c) 2015, Shell.Xu 110 | All rights reserved. 111 | 112 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 113 | 114 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 115 | 116 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 117 | 118 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 119 | 120 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 121 | -------------------------------------------------------------------------------- /sync/__main__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | @date: 2015-09-02 5 | @author: Shell.Xu 6 | @copyright: 2015, Shell.Xu 7 | @license: BSD-3-clause 8 | ''' 9 | import os 10 | import sys 11 | import stat 12 | import getopt 13 | import logging 14 | from os import path 15 | import remote, remote.__main__ 16 | import api, sync 17 | 18 | def listdesc(dirname): 19 | import yaml 20 | for filename in os.listdir(dirname): 21 | if not filename.endswith('.yaml'): 22 | continue 23 | hostname = filename[:-5] 24 | with open(path.join(dirname, filename)) as fi: 25 | desc = yaml.load(fi.read()) 26 | if 'hostname' not in desc: 27 | desc['hostname'] = hostname 28 | yield desc 29 | 30 | def get_syncinfo(rmt, desc, syncinfo): 31 | rmtpath = syncinfo['remote'] 32 | if rmtpath.startswith('~'): 33 | rmtpath = rmt.apply(path.expanduser, rmtpath) 34 | 35 | partten = None 36 | if '*' in rmtpath: 37 | partten, rmtpath = path.basename(rmtpath), path.dirname(rmtpath) 38 | logging.info('rmt: %s, partten: %s' % (rmtpath, partten)) 39 | if '*' in rmtpath: 40 | raise Exception('match just allow in last level.') 41 | 42 | local = syncinfo.get('local') or rmtpath 43 | if local.startswith(path.sep): 44 | local = local[1:] 45 | local = path.join(desc['hostname'], local) 46 | return rmtpath, local, partten 47 | 48 | def merge_filist(filist, attrs, remote, local): 49 | attrfiles = attrs['filelist'] 50 | for fi in filist: 51 | fi2 = api.limit_attr(fi, set(['user', 'group', 'path', 'mode', 'type'])) 52 | 53 | if fi['type'] in (stat.S_IFREG, stat.S_IFLNK): 54 | fi2.update(attrs['file']) 55 | elif fi['type'] == stat.S_IFDIR: 56 | fi2.update(attrs['dir']) 57 | fi2.update() 58 | 59 | remotepath = sync.reloca_path(fi['path'], local, remote) 60 | fi2['path'] = remotepath 61 | if remotepath in attrfiles: 62 | fi2.update(attrfiles[remotepath]) 63 | yield fi2 64 | 65 | def cache_default_attr(attrs): 66 | common = attrs['common'] 67 | attrs['file'] = { 68 | 'user': common['username'], 69 | 'group': common['groupname'], 70 | 'mode': common['filemode']} 71 | attrs['dir'] = { 72 | 'user': common['username'], 73 | 'group': common['groupname'], 74 | 'mode': common['dirmode']} 75 | 76 | def sync_desc(desc): 77 | import yaml 78 | ChanCls = type('C', (remote.SshSudoChannel, remote.BinaryEncoding), {}) 79 | with remote.Remote(ChanCls(desc['hostname'])) as rmt: 80 | filist = [] 81 | if '-t' in optdict: 82 | with open('%s.meta' % desc['hostname'], 'rb') as fi: 83 | attrs = api.filist_load(fi.read()) 84 | cache_default_attr(attrs) 85 | for syncinfo in desc['synclist']: 86 | rmtpath, local, partten = get_syncinfo(rmt, desc, syncinfo) 87 | if '-b' in optdict: 88 | filist.extend( 89 | sync.sync_back(rmt, rmtpath, local, partten)) 90 | else: 91 | fl = sync.sync_to(rmt, rmtpath, local, partten) 92 | fl = list(merge_filist(fl, attrs, rmtpath, local)) 93 | filist.extend(fl) 94 | if '-b' in optdict: 95 | doc = api.filist_dump( 96 | filist, desc.get('user'), desc.get('group'), 97 | desc.get('filemode'), desc.get('dirmode')) 98 | with open('%s.meta' % desc['hostname'], 'wb') as fo: 99 | fo.write(doc) 100 | else: 101 | rmt.apply(sync.apply_meta, filist) 102 | 103 | def main(): 104 | ''' 105 | -b: sync back. 106 | -l: log level. 107 | -h: help, you just seen. 108 | -m: machine list. 109 | -t: sync to. 110 | ''' 111 | global optdict 112 | global args 113 | optlist, args = getopt.getopt(sys.argv[1:], 'bl:hm:t') 114 | optdict = dict(optlist) 115 | if '-h' in optdict: 116 | print main.__doc__ 117 | return 118 | 119 | if '-l' in optdict: 120 | logging.basicConfig(level=optdict['-l'].upper()) 121 | 122 | if '-b' not in optdict and '-t' not in optdict: 123 | print 'you must set sync back or sync to.' 124 | return 125 | 126 | machine_allow = [] 127 | if '-m' in optdict: 128 | machine_allow = optdict['-m'].split(',') 129 | 130 | desces = [] 131 | for dirname in args: 132 | for desc in listdesc(dirname): 133 | if not desc['synclist']: continue 134 | if machine_allow and desc['hostname'] not in machine_allow: 135 | continue 136 | desces.append(desc) 137 | 138 | remote.__main__.parallel_map_t(sync_desc, desces) 139 | 140 | if __name__ == '__main__': main() 141 | -------------------------------------------------------------------------------- /remote/__main__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | @date: 2015-08-30 5 | @author: Shell.Xu 6 | @copyright: 2015, Shell.Xu 7 | @license: BSD-3-clause 8 | ''' 9 | import os, sys 10 | import json 11 | import getopt 12 | import logging 13 | import traceback 14 | import local 15 | 16 | def initlog(lv, logfile=None): 17 | rootlog = logging.getLogger() 18 | if logfile: handler = logging.FileHandler(logfile) 19 | else: handler = logging.StreamHandler() 20 | handler.setFormatter( 21 | logging.Formatter( 22 | '%(asctime)s,%(msecs)03d [%(levelname)s] <%(name)s>: %(message)s', 23 | '%H:%M:%S')) 24 | rootlog.addHandler(handler) 25 | rootlog.setLevel(lv) 26 | 27 | def parallel_map_t(func, it, concurrent=20): 28 | from multiprocessing.pool import ThreadPool 29 | pool = ThreadPool(concurrent) 30 | def wrapper(i): 31 | try: 32 | return func(i) 33 | except Exception as err: 34 | print traceback.format_exc() 35 | for i in it: 36 | pool.apply_async(wrapper, (i,)) 37 | pool.close() 38 | pool.join() 39 | 40 | def get_source(infile): 41 | for line in infile: 42 | yield line.strip() 43 | 44 | def name2obj(name): 45 | module_name, funcname = name.rsplit('.', 1) 46 | module = __import__(module_name) 47 | return getattr(module, funcname) 48 | 49 | def parse_channel(): 50 | if '-n' not in optdict or optdict['-n'] == 'ssh': 51 | return local.SshChannel 52 | if optdict['-n'] == 'sudo': 53 | return local.SshSudoChannel 54 | if optdict['-n'] == 'pssh': 55 | return local.PSshChannel 56 | if optdict['-n'] == 'psudo': 57 | return local.PSshSudoChannel 58 | return name2obj(optdict['-n']) 59 | 60 | def parse_protocol(): 61 | if '-p' not in optdict or optdict['-p'] == 'binary': 62 | return local.BinaryEncoding 63 | if optdict['-p'] == 'base64': 64 | return local.Base64Encoding 65 | return name2obj(optdict['-p']) 66 | 67 | def parse_hostlist(): 68 | if '-c' in optdict: 69 | return get_source(sys.stdin) 70 | elif '-f' in optdict: 71 | fi = open(optdict['-f']) 72 | return get_source(fi) 73 | elif '-m' in optdict: 74 | return optdict['-m'].split(',') 75 | print 'can\'t find host list.' 76 | print 'you may define by stdin(-c), file(-f) or machine(-m).' 77 | return None 78 | 79 | def prepare_modules(rmt, command): 80 | if '(' not in command: 81 | return 82 | funcname = command.split('(', 1)[0] 83 | if '.' not in funcname: 84 | return 85 | module_name = funcname.rsplit('.', 1)[0] 86 | rmt.execute('import ' + module_name) 87 | 88 | def run_eval_host(ChanCls): 89 | args = {} 90 | if '-l' in optdict: 91 | args['loglevel'] = optdict['-l'].upper() 92 | def inner(host): 93 | with local.Remote(ChanCls(host), args=args) as rmt: 94 | for command in commands: 95 | prepare_modules(rmt, command) 96 | result = rmt.eval(command) 97 | result = json.dumps(result) 98 | if '-M' in optdict: 99 | print '%s: %s' % (host, result) 100 | else: 101 | print result 102 | return inner 103 | 104 | def run_single_host(ChanCls): 105 | args = {} 106 | if '-l' in optdict: 107 | args['loglevel'] = optdict['-l'].upper() 108 | def inner(host): 109 | with local.Remote(ChanCls(host), args=args) as rmt: 110 | for command in commands: 111 | print '-----%s output: %s-----' % (host, command) 112 | rmt.single(command) 113 | return inner 114 | 115 | def main(): 116 | ''' 117 | -c: input hostlist from stdin. 118 | -f: input hostlist from file. 119 | -j: dump result as json mode. 120 | -L: log file. 121 | -l: log level. 122 | -h: help, you just seen. 123 | -M: print with hostname. 124 | -m: host list as parameter. 125 | -n: channel mode, can be local, ssh or sudo, pssh or psudo. ssh is default. 126 | -p: protocol mode, binary or base64, or other class. binary is default. 127 | -s: run in serial mode. 128 | -x: eval mode. normally run in single mode. 129 | ''' 130 | global optdict 131 | global commands 132 | optlist, commands = getopt.getopt(sys.argv[1:], 'cf:jL:l:hMm:n:p:sx') 133 | optdict = dict(optlist) 134 | if '-h' in optdict: 135 | print main.__doc__ 136 | return 137 | 138 | loglevel = optdict.get('-l') or 'WARNING' 139 | loglevel = loglevel.upper() 140 | logfile = optdict.get('-L') 141 | initlog(loglevel, logfile) 142 | 143 | hostlist = parse_hostlist() 144 | if hostlist is None: 145 | return 146 | 147 | chancls = parse_channel() 148 | protcls = parse_protocol() 149 | ChanCls = type('C', (chancls, protcls), {}) 150 | 151 | if '-x' in optdict: 152 | run_host = run_eval_host(ChanCls) 153 | else: 154 | run_host = run_single_host(ChanCls) 155 | 156 | if '-s' in optdict: 157 | return map(run_host, hostlist) 158 | return parallel_map_t(run_host, hostlist) 159 | 160 | if __name__ == '__main__': main() 161 | -------------------------------------------------------------------------------- /hwinfo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | @date: 2015-03-11 5 | @author: shell.xu 6 | ''' 7 | import os, re, sys, json, getopt, subprocess 8 | 9 | def check_output(x): 10 | p = subprocess.Popen(x, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 11 | r = p.stdout.read() 12 | p.wait() 13 | return r.splitlines() 14 | 15 | def split_reader(src, sep, keys, stopblank=False): 16 | for line in src: 17 | if stopblank and not line: return 18 | r = line.split(sep, 1) 19 | if len(r) == 1: continue 20 | k, v = r[0].strip(), r[1].strip() 21 | if k.lower() in keys: yield k, v 22 | 23 | DMI_INFO = { 24 | 'system': set(['serial number', 'uuid']), 25 | 'base': set(['product name', 'version', 'serial number']), 26 | 'processor': set(['id', 'version']), 27 | 'memory': set([ 28 | 'size', 'locator', 'speed', 'manufacturer', 'serial number', 29 | 'asset tag', 'part number', 'configured'])} 30 | def dmidecode(): 31 | try: src = iter(check_output(['dmidecode',])) 32 | except: 33 | exc_info = sys.exc_info() 34 | yield 'Base', 'error' 35 | raise exc_info[0], exc_info[1], exc_info[2] 36 | for line in src: 37 | if not line or line.startswith('Handle'): continue 38 | name = line.split()[0] 39 | info = DMI_INFO.get(name.lower()) 40 | if not info: continue 41 | r = dict(split_reader(src, ':', info, True)) 42 | if not r: continue 43 | if name == 'Memory' and not r.get('Serial Number'): continue 44 | yield name, r 45 | 46 | SMART_INFO = ['vendor', 'model family', 'product', 'device model', 47 | 'user capacity', 'logical block size', 'serial number'] 48 | re_disk = re.compile('(sd[a-z]+)\d*') 49 | def diskinfo(): 50 | disks = set() 51 | with open('/proc/diskstats', 'r') as fdisk: 52 | for line in fdisk: 53 | m = re_disk.match(line.split()[2]) 54 | if m: disks.add(m.group(1)) 55 | for disk in disks: 56 | try: 57 | output = check_output(['smartctl', '-i', '/dev/%s' % disk]) 58 | info = dict(split_reader(output, ':', SMART_INFO)) 59 | info['disk'] = disk 60 | yield 'Disk', info 61 | except: 62 | exc_info = sys.exc_info() 63 | yield 'Disk', {'disk': disk, 'state': 'error'} 64 | raise exc_info[0], exc_info[1], exc_info[2] 65 | 66 | def diskusage(): 67 | df = check_output(['df', '-k', '-P']) 68 | for line in df[1:]: 69 | d = line.strip().split() 70 | yield 'DiskUsage', {'dev': d[0], 'total': d[1], 'used': d[2], 'mountpoint': d[5]} 71 | 72 | re_iface = re.compile('^(\d+): (.+): <(.*)> (.*)') 73 | def ipaddr(): 74 | iface, ips, macs = '', [], [] 75 | for line in check_output(['ip', 'addr']): 76 | if line.startswith(' '): 77 | r = line.strip().split() 78 | if r[0] == 'inet': ips.append(r[1]) 79 | elif r[0] == 'link/ether': macs.append(r[1]) 80 | continue 81 | if iface: 82 | try: unicode(iface) 83 | except UnicodeDecodeError: 84 | iface = 'Base64ed:' + base64.b64code(iface) 85 | rslt.update({'iface': iface, 'ipaddr': ips, 'ether': macs}) 86 | yield rslt 87 | ips, macs = [], [] 88 | m = re_iface.match(line) 89 | i = iter(m.group(4).split()) 90 | iface, rslt = m.group(2), dict(zip(i, i)) 91 | try: unicode(iface) 92 | except UnicodeDecodeError: 93 | iface = 'Base64ed:' + base64.b64code(iface) 94 | rslt.update({'iface': iface, 'ipaddr': ips, 'ether': macs}) 95 | yield rslt 96 | 97 | ETH_INFO = ['speed', 'duplex', 'auto-negotiation'] 98 | def ethtool(iface): 99 | src = iter(check_output(['ethtool', iface])) 100 | return dict(split_reader(src, ':', ETH_INFO)) 101 | 102 | def ethinfo(): 103 | for info in ipaddr(): 104 | d = ethtool(info['iface']) 105 | if d: info.update(d) 106 | yield 'Network', info 107 | 108 | def memusage(): 109 | free = check_output(['free', '-m']) 110 | total = int(free[1].split()[1]) 111 | used = int(free[2].split()[2]) 112 | yield 'MemoryUsage', {'total': total, 'used': used} 113 | 114 | def hostname(): 115 | with open('/etc/hostname') as fi: return [('Hostname', fi.read().strip())] 116 | 117 | def run_info(funcs): 118 | rslt = {} 119 | for f in funcs: 120 | try: 121 | for k, v in f(): rslt.setdefault(k, []).append(v) 122 | except: 123 | import traceback 124 | rslt.setdefault('Error', []).append(traceback.format_exc()) 125 | return rslt 126 | 127 | def all_info(): 128 | return run_info([dmidecode, diskinfo, diskusage, ethinfo, memusage, hostname]) 129 | 130 | def main(): 131 | ''' 132 | -h: help 133 | -i: indent 134 | ''' 135 | optlist, args = getopt.getopt(sys.argv[1:], 'hi') 136 | optdict = dict(optlist) 137 | if '-h' in optdict: 138 | print main.__doc__ 139 | return 140 | 141 | info = all_info() 142 | try: s = json.dumps(info, indent=4 if '-i' in optdict else None) 143 | except: s = json.dumps({'Hostname': hostname()[0][1], 'Error': 'json error'}) 144 | sys.stdout.write(s + '\n') 145 | sys.stdout.flush() 146 | 147 | if __name__ == '__main__': main() 148 | -------------------------------------------------------------------------------- /sync/api.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | @date: 2015-09-02 5 | @author: Shell.Xu 6 | @copyright: 2015, Shell.Xu 7 | @license: BSD-3-clause 8 | ''' 9 | import os 10 | import pwd 11 | import grp 12 | import stat 13 | import fnmatch 14 | import hashlib 15 | import logging 16 | import collections 17 | from os import path 18 | 19 | MAX_SYNC_SIZE = 10 * 1024 * 1024 20 | 21 | def memorized(func): 22 | cache = {} 23 | from functools import wraps 24 | @wraps(func) 25 | def inner(k): 26 | if k not in cache: 27 | cache[k] = func(k) 28 | return cache[k] 29 | return inner 30 | 31 | def read_file(filepath): 32 | with open(filepath, 'rb') as fi: 33 | return fi.read() 34 | 35 | def write_file(filepath, data): 36 | with open(filepath, 'wb') as fo: 37 | fo.write(data) 38 | 39 | def read_files(fs): 40 | return [read_file(f) for f in fs] 41 | 42 | def write_files(fs): 43 | for f, d in fs: 44 | write_file(f, d) 45 | 46 | @memorized 47 | def get_username(uid): 48 | return pwd.getpwuid(uid).pw_name 49 | 50 | @memorized 51 | def get_userid(username): 52 | return pwd.getpwnam(username).pw_uid 53 | 54 | @memorized 55 | def get_groupname(gid): 56 | return grp.getgrgid(gid).gr_name 57 | 58 | @memorized 59 | def get_groupid(groupname): 60 | return grp.getgrnam(groupname).pw_gid 61 | 62 | def gen_md5hash(filepath): 63 | try: 64 | h = hashlib.md5() 65 | h.update(read_file(filepath)) 66 | return h.hexdigest() 67 | except IOError: # no priv to read 68 | return 69 | 70 | def gen_fileinfo(filepath, start=None): 71 | filepath = path.abspath(filepath) 72 | rpath = filepath 73 | if start is not None: 74 | rpath = path.relpath(filepath, start) 75 | st = os.lstat(filepath) 76 | if stat.S_IFMT(st.st_mode) not in (stat.S_IFREG, stat.S_IFLNK, stat.S_IFDIR): 77 | return 78 | fi = { 79 | 'path': rpath, 80 | 'type': stat.S_IFMT(st.st_mode), 'mode': stat.S_IMODE(st.st_mode), 81 | 'user': get_username(st.st_uid), 'group': get_groupname(st.st_gid)} 82 | 83 | if stat.S_ISREG(st.st_mode): 84 | fi['md5'] = gen_md5hash(filepath) 85 | fi['size'] = st.st_size 86 | elif stat.S_ISLNK(st.st_mode): 87 | fi['link'] = os.readlink(filepath) 88 | return fi 89 | 90 | def walkdir(basedir, start=None, partten=None): 91 | basedir = path.abspath(basedir) 92 | filist = [gen_fileinfo(basedir, start)] 93 | for root, dirs, files in os.walk(basedir): 94 | for filename in files + dirs: 95 | filepath = path.join(root, filename) 96 | if partten: 97 | rpath = path.relpath(filepath, root) 98 | if not fnmatch.fnmatch(rpath, partten): 99 | continue 100 | fi = gen_fileinfo(filepath, start) 101 | if 'size' in fi and fi['size'] > MAX_SYNC_SIZE: 102 | logging.error('file %s size %d out of limit' % (localpath, fi['size'])) 103 | continue # pass 104 | if fi: filist.append(fi) 105 | return filist 106 | 107 | def listdir(dirname, partten=None): 108 | filist = [] 109 | for filename in os.listdir(dirname): 110 | if partten and not fnmatch.fnmatch(filename, partten): 111 | continue 112 | fi = gen_fileinfo(path.join(dirname, filename)) 113 | if fi: 114 | filist.append(fi) 115 | return filist 116 | 117 | def stat_dir_user(filist, username=None): 118 | if username: return username 119 | users = collections.Counter() 120 | for fi in filist: 121 | users[fi['user']] += 1 122 | return users.most_common(1)[0][0] if users else None 123 | 124 | def stat_dir_group(filist, groupname=None): 125 | if groupname: return groupname 126 | groups = collections.Counter() 127 | for fi in filist: 128 | groups[fi['group']] += 1 129 | return groups.most_common(1)[0][0] if groups else None 130 | 131 | def stat_dir_mode(filist, filemode=None, dirmode=None): 132 | if filemode and dirmode: return filemode, dirmode 133 | files = collections.Counter() 134 | dirs = collections.Counter() 135 | for fi in filist: 136 | st = fi['type'] 137 | if stat.S_ISREG(st) or stat.S_ISLNK(st): 138 | files[fi['mode']] += 1 139 | elif stat.S_ISDIR(st): 140 | dirs[fi['mode']] += 1 141 | if not filemode: 142 | filemode = files.most_common(1)[0][0] if files else None 143 | if not dirmode: 144 | dirmode = dirs.most_common(1)[0][0] if dirs else None 145 | return filemode, dirmode 146 | 147 | def limit_attr(fi, attrs): 148 | rslt = {} 149 | for k, v in fi.iteritems(): 150 | if k in attrs: 151 | rslt[k] = v 152 | return rslt 153 | 154 | filetype_map = { 155 | stat.S_IFDIR: 'dir', 156 | stat.S_IFREG: 'file', 157 | stat.S_IFLNK: 'link', 158 | } 159 | 160 | def reversed_map(m, v1): 161 | for k, v in m.iteritems(): 162 | if v == v1: 163 | return k 164 | 165 | def transmode(fi, value): 166 | if fi['mode'] == value: 167 | del fi['mode'] 168 | else: 169 | fi['mode'] = oct(fi['mode']) 170 | 171 | def filist_dump(filist, username=None, groupname=None, filemode=None, dirmode=None): 172 | username = stat_dir_user(filist, username) 173 | groupname = stat_dir_group(filist, groupname) 174 | filemode, dirmode = stat_dir_mode(filist, filemode, dirmode) 175 | fileattrs = set(['path', 'type',]) 176 | 177 | files = {} 178 | for fi in filist: 179 | if 'md5' in fi: del fi['md5'] 180 | if 'size' in fi: del fi['size'] 181 | 182 | if fi['user'] == username: del fi['user'] 183 | if fi['group'] == groupname: del fi['group'] 184 | 185 | if fi['type'] in (stat.S_IFREG, stat.S_IFLNK): 186 | transmode(fi, filemode) 187 | elif fi['type'] == stat.S_IFDIR: 188 | transmode(fi, dirmode) 189 | else: 190 | raise Exception('unknown file type %s' % oct(fi['type'])) 191 | if set(fi.keys()) == fileattrs: continue 192 | 193 | fi['type'] = filetype_map[fi['type']] 194 | files[fi['path']] = fi 195 | del fi['path'] 196 | 197 | import yaml 198 | return yaml.dump({ 199 | 'common': { 200 | 'username': username, 201 | 'groupname': groupname, 202 | 'filemode': oct(filemode), 203 | 'dirmode': oct(dirmode)}, 204 | 'filelist': files}) 205 | 206 | def filist_load(doc): 207 | import yaml 208 | doc = yaml.load(doc) 209 | common = doc['common'] 210 | username, groupname = common['username'], common['groupname'] 211 | common['filemode'] = int(common['filemode'], 8) 212 | common['dirmode'] = int(common['dirmode'], 8) 213 | 214 | for filepath, fi in doc['filelist'].iteritems(): 215 | fi['path'] = filepath 216 | fi['type'] = reversed_map(filetype_map, fi['type']) 217 | if 'mode' in fi: 218 | fi['mode'] = int(fi['mode'], 8) 219 | return doc 220 | -------------------------------------------------------------------------------- /remote/remote.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | @date: 2015-08-14 5 | @author: shell.xu 6 | @copyright: 2015, Shell.Xu 7 | @license: BSD-3-clause 8 | ''' 9 | import os, sys, imp, zlib, struct, marshal, logging 10 | 11 | Args = None # replace Parameter here. 12 | 13 | def add_module(name): 14 | if name not in sys.modules: 15 | sys.modules[name] = imp.new_module(name) 16 | return sys.modules[name] 17 | 18 | class Loader(object): 19 | 20 | def __init__(self, finder, srcfid, pathname, description): 21 | self.finder, self.src = finder, None 22 | self.pathname, self.description = pathname, description 23 | if srcfid is not None: 24 | with ChannelFile(finder.channel, srcfid) as srcfile: 25 | self.src = srcfile.read() 26 | 27 | def exec_code_module(self, mod): 28 | exec compile(self.src, self.pathname, 'exec') in mod.__dict__ 29 | 30 | class SrcLoader(Loader): 31 | 32 | def load_module(self, fullname): 33 | m = add_module(fullname) 34 | m.__file__ = self.pathname 35 | self.exec_code_module(m) 36 | return m 37 | 38 | class PycLoader(Loader): 39 | 40 | def load_module(self, fullname): 41 | import tempfile 42 | with tempfile.NamedTemporaryFile('wb') as tmp: 43 | tmp.write(self.src) 44 | tmp.flush() 45 | return imp.load_compiled(fullname, tmp.name) 46 | 47 | class ExtLoader(Loader): 48 | 49 | def load_module(self, fullname): 50 | import tempfile 51 | with tempfile.NamedTemporaryFile('wb') as tmp: 52 | tmp.write(self.src) 53 | tmp.flush() 54 | return imp.load_dynamic(fullname, tmp.name) 55 | 56 | class PkgLoader(Loader): 57 | 58 | def load_module(self, fullname): 59 | loader = self.finder.find_remote('__init__', [self.pathname,]) 60 | m = add_module(fullname) 61 | m.__file__ = loader.pathname 62 | m.__path__ = [self.pathname,] 63 | m.__package__ = fullname 64 | loader.exec_code_module(m) 65 | return m 66 | 67 | class Finder(object): 68 | 69 | def __init__(self, channel): 70 | self.channel = channel 71 | 72 | def find_module(self, name, path): 73 | try: imp.find_module(name, path) 74 | except ImportError: 75 | r = self.find_remote(name, path) 76 | if r is not None: return r 77 | raise 78 | 79 | def find_remote(self, name, path): 80 | self.channel.send(['find_module', name.split('.')[-1], path]) 81 | r = self.channel.recv() 82 | if r is None: 83 | return 84 | if r[2][2] not in self.type_map: 85 | raise Exception('unknown module type') 86 | return self.type_map[r[2][2]](self, *r) 87 | 88 | type_map = { 89 | imp.PY_SOURCE: SrcLoader, 90 | imp.PY_COMPILED: PycLoader, 91 | imp.C_EXTENSION: ExtLoader, 92 | imp.PKG_DIRECTORY: PkgLoader,} 93 | 94 | class ChannelFile(object): 95 | 96 | def __init__(self, channel, id): 97 | self.channel, self.id = channel, id 98 | 99 | def __enter__(self): 100 | return self 101 | 102 | def __exit__(self, exc_type, exc_value, traceback): 103 | self.close() 104 | 105 | def write(self, s): 106 | self.channel.send(['write', self.id, s]) 107 | 108 | def read(self, size=-1): 109 | self.channel.send(['read', self.id, size]) 110 | return self.channel.recv() 111 | 112 | def seek(self, offset, whence): 113 | self.channel.send(['seek', self.id, offset, whence]) 114 | 115 | def flush(self): 116 | self.channel.send(['flush', self.id]) 117 | 118 | def close(self): 119 | self.channel.send(['close', self.id]) 120 | 121 | class Remote(object): 122 | 123 | def __init__(self, chan): 124 | self.chan = chan 125 | self.g = dict() 126 | 127 | def loop(self): 128 | while True: 129 | o = self.chan.recv() 130 | if o[0] == 'exit': break 131 | if o[0] == 'result': return o[1] 132 | if o[0] == 'apply': 133 | r = eval(o[1], self.g)(*o[2:]) 134 | elif o[0] == 'dh': 135 | r = self.do_dh(o[1], o[2]) 136 | elif o[0] in ('exec', 'eval', 'single'): 137 | r = eval(compile(o[1], '<%s>' % o[0], o[0]), self.g) 138 | self.chan.send(['result', r]) 139 | 140 | def do_dh(self, other_key, other_iv): 141 | from remote import dh 142 | from Crypto.Cipher import AES 143 | 144 | # Diffie-Hellman key exchange 145 | pri_key, pri_iv = dh.gen_prikey(), dh.gen_prikey() 146 | key = dh.gen_key(pri_key, other_key) 147 | iv = dh.gen_key(pri_iv, other_iv) 148 | self.chan.send(['result', [dh.gen_pubkey(pri_key), dh.gen_pubkey(pri_iv)]]) 149 | 150 | self.encryptor = AES.new(key, AES.MODE_CBC, IV=iv) 151 | self.decryptor = AES.new(key, AES.MODE_CBC, IV=iv) 152 | origwrite, origread = self.chan.write, self.chan.read 153 | def write(d): 154 | return origwrite(self.encryptor.encrypt(d)) 155 | def read(n): 156 | d = origread(n) 157 | return self.decryptor.decrypt(d) 158 | self.chan.write = write 159 | self.chan.read = read 160 | 161 | def open(self, filepath, mode): 162 | self.chan.send(['open', filepath, mode]) 163 | r = self.chan.recv() 164 | return ChannelFile(self, r) 165 | 166 | def getstd(self, which): 167 | self.chan.send(['std', which]) 168 | id = self.chan.recv() 169 | return ChannelFile(self.chan, id) 170 | 171 | def eval(self, f): 172 | self.chan.send(['eval', f]) 173 | return self.loop() 174 | 175 | def execute(self, f): 176 | self.chan.send(['exec', f]) 177 | return self.loop() 178 | 179 | def single(self, f): 180 | self.chan.send(['single', f]) 181 | return self.loop() 182 | 183 | def apply(self, f, *p): 184 | self.chan.send(['apply', self.sendfunc(f)] + list(p)) 185 | return self.loop() 186 | 187 | def sendfunc(self, f): 188 | import inspect 189 | m = inspect.getmodule(f) 190 | fname = f.__name__ 191 | if m is None: 192 | self.execute('import __main__') 193 | fname = '__main__.' + fname 194 | else: 195 | self.execute('import ' + m.__name__) 196 | fname = m.__name__ + '.' + fname 197 | return fname 198 | 199 | class BinaryEncoding(object): 200 | 201 | def send(self, o): 202 | d = zlib.compress(marshal.dumps(o)) 203 | self.write(struct.pack('>I', len(d)) + d) 204 | 205 | def recv(self): 206 | l = struct.unpack('>I', self.read(4))[0] 207 | return marshal.loads(zlib.decompress(self.read(l))) 208 | 209 | class Base64Encoding(object): 210 | 211 | def send(self, o): 212 | d = base64.b64encode(zlib.compress(marshal.dumps(o), 9)) 213 | self.write(base64.b64encode(struct.pack('>I', len(d))) + d) 214 | 215 | def recv(self): 216 | l = struct.unpack('>I', base64.b64decode(self.read(8)))[0] 217 | o = marshal.loads(zlib.decompress(base64.b64decode(self.read(l)))) 218 | return o 219 | 220 | class StdChannel(object): 221 | 222 | def __init__(self): 223 | self.stdin, self.stdout = sys.stdin, os.fdopen(os.dup(1), 'w') 224 | os.close(1) 225 | os.dup2(2, 1) 226 | 227 | def write(self, d): 228 | self.stdout.write(d) 229 | self.stdout.flush() 230 | 231 | def read(self, n): 232 | return self.stdin.read(n) 233 | 234 | def initlog(): 235 | rootlog = logging.getLogger() 236 | handler = logging.StreamHandler(sys.stdout) 237 | handler.setFormatter( 238 | logging.Formatter( 239 | '%(asctime)s,%(msecs)03d [%(levelname)s] : %(message)s', 240 | '%H:%M:%S')) 241 | rootlog.addHandler(handler) 242 | if 'loglevel' in Args: 243 | rootlog.setLevel(Args['loglevel']) 244 | 245 | def main(): 246 | protocol = BinaryEncoding 247 | if 'protocol' in Args: 248 | protocol = globals().get(Args['protocol']) 249 | channel = type('C', (StdChannel, protocol), {})() 250 | remote = Remote(channel) 251 | 252 | sys.modules['remote.remote'] = __import__(__name__) 253 | sys.meta_path.append(Finder(channel)) 254 | sys.stdout = remote.getstd('stdout') 255 | initlog() 256 | channel.send(['result', None]) 257 | 258 | remote.loop() 259 | 260 | if __name__ == '__main__': main() 261 | -------------------------------------------------------------------------------- /remote/local.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | @date: 2015-08-14 5 | @author: shell.xu 6 | @copyright: 2015, Shell.Xu 7 | @license: BSD-3-clause 8 | ''' 9 | import os 10 | import sys 11 | import imp 12 | import zlib 13 | import base64 14 | import struct 15 | import marshal 16 | import inspect 17 | import logging 18 | from os import path 19 | 20 | def show_msg(action, o): 21 | if o is None: 22 | logging.debug('%s: none', action) 23 | elif isinstance(o, list): 24 | logging.debug('%s: %s', action, str(o)) 25 | elif isinstance(o, (int, long)): 26 | logging.debug('%s int: %d', action, o) 27 | elif isinstance(o, basestring): 28 | logging.debug('%s str: %d', action, len(o)) 29 | else: 30 | logging.debug('%s: unknown', action) 31 | 32 | class BinaryEncoding(object): 33 | 34 | BOOTSTRAP = '''import sys, zlib, struct, marshal; l = struct.unpack('>I', sys.stdin.read(4))[0]; o = marshal.loads(zlib.decompress(sys.stdin.read(l))); exec compile(o, '', 'exec')''' 35 | 36 | def send(self, o): 37 | show_msg('send', o) 38 | d = zlib.compress(marshal.dumps(o), 9) 39 | self.write(struct.pack('>I', len(d)) + d) 40 | 41 | def recv(self): 42 | l = struct.unpack('>I', self.read(4))[0] 43 | o = marshal.loads(zlib.decompress(self.read(l))) 44 | show_msg('recv', o) 45 | return o 46 | 47 | class Base64Encoding(object): 48 | 49 | BOOTSTRAP = '''import sys, zlib, base64, struct, marshal; l = struct.unpack('>I', base64.b64decode(sys.stdin.read(8)))[0]; o = marshal.loads(zlib.decompress(base64.b64decode(sys.stdin.read(l)))); exec compile(o, '', 'exec')''' 50 | 51 | def get_args(self): 52 | return {'protocol': 'Base64Encoding'} 53 | 54 | def send(self, o): 55 | show_msg('send', o) 56 | d = base64.b64encode(zlib.compress(marshal.dumps(o), 9)) 57 | self.write(base64.b64encode(struct.pack('>I', len(d))) + d) 58 | 59 | def recv(self): 60 | l = struct.unpack('>I', base64.b64decode(self.read(8)))[0] 61 | o = marshal.loads(zlib.decompress(base64.b64decode(self.read(l)))) 62 | show_msg('recv', o) 63 | return o 64 | 65 | class ProcessChannel(object): 66 | 67 | def __init__(self, cmd): 68 | import subprocess 69 | self.p = subprocess.Popen( 70 | cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 71 | 72 | def close(self): 73 | self.p.wait() 74 | 75 | def write(self, d): 76 | try: 77 | self.p.stdin.write(d) 78 | self.p.stdin.flush() 79 | except: 80 | print self.p.stderr.read() 81 | raise 82 | 83 | def read(self, n): 84 | try: 85 | d = self.p.stdout.read(n) 86 | if not d: raise EOFError() 87 | return d 88 | except: 89 | print self.p.stderr.read() 90 | raise 91 | 92 | class LocalChannel(ProcessChannel): 93 | 94 | def __init__(self): 95 | ProcessChannel.__init__(self, ['python', '-c', BOOTSTRAP]) 96 | 97 | def __repr__(self): 98 | return '' 99 | 100 | class SshChannel(ProcessChannel): 101 | 102 | def __init__(self, host): 103 | ProcessChannel.__init__(self, ['ssh', host, 'python', '-c', '"%s"' % self.BOOTSTRAP]) 104 | self.host = host 105 | 106 | def __repr__(self): 107 | return self.host 108 | 109 | class SshSudoChannel(ProcessChannel): 110 | 111 | def __init__(self, host, user=None): 112 | if user: 113 | cmd = ['ssh', host, 'sudo', '-u', user, 114 | 'python', '-c', '"%s"' % self.BOOTSTRAP] 115 | else: 116 | cmd = ['ssh', host, 'sudo', 117 | 'python', '-c', '"%s"' % self.BOOTSTRAP] 118 | ProcessChannel.__init__(self, cmd) 119 | self.host, self.user = host, user 120 | 121 | def __repr__(self): 122 | return self.host 123 | 124 | class ParamikoChannel(object): 125 | 126 | # had to set auto_hostkey to True, for detail: https://github.com/paramiko/paramiko/issues/67 127 | 128 | def __init__(self, host, cmd, auto_hostkey=True, **kw): 129 | import paramiko 130 | self.ssh = paramiko.SSHClient() 131 | print self.ssh.get_host_keys().items() 132 | self.ssh.load_system_host_keys() 133 | print self.ssh.get_host_keys().items() 134 | if auto_hostkey: 135 | self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 136 | self.ssh.connect(host, **kw) 137 | self.stdin, self.stdout, self.stderr = self.ssh.exec_command(cmd) 138 | 139 | def close(self): 140 | self.ssh.close() 141 | 142 | def write(self, d): 143 | try: 144 | self.stdin.write(d) 145 | self.stdin.flush() 146 | except: 147 | print self.stderr.read() 148 | raise 149 | 150 | def read(self, n): 151 | try: 152 | return self.stdout.read(n) 153 | except: 154 | print self.stderr.read() 155 | raise 156 | 157 | class PSshChannel(ParamikoChannel): 158 | 159 | def __init__(self, host, auto_hostkey=False, **kw): 160 | cmd = 'python -c "%s"' % self.BOOTSTRAP 161 | ParamikoChannel.__init__(self, host, cmd, auto_hostkey, **kw) 162 | self.host = host 163 | 164 | def __repr__(self): 165 | return self.host 166 | 167 | class PSshSudoChannel(ParamikoChannel): 168 | 169 | def __init__(self, host, user=None, auto_hostkey=False, **kw): 170 | if user: 171 | cmd = 'sudo -u %s python -c "%s"' % (user, self.BOOTSTRAP) 172 | else: 173 | cmd = 'sudo python -c "%s"' % self.BOOTSTRAP 174 | ParamikoChannel.__init__(self, host, cmd, auto_hostkey, **kw) 175 | self.host, self.user = host, user 176 | 177 | def __repr__(self): 178 | return self.host 179 | 180 | class Remote(object): 181 | 182 | def __init__(self, chan, args=None): 183 | self.chan = chan 184 | self.g = {} 185 | self.fmaps = {} 186 | self.mc = set() 187 | self.args = args if args is not None else {} 188 | self.send_remote_core() 189 | 190 | def send_remote_core(self): 191 | kw = self.args.copy() 192 | if hasattr(self.chan, 'get_args'): 193 | kw.update(self.chan.get_args()) 194 | 195 | basedir = path.dirname(__file__) 196 | with open(path.join(basedir, 'remote.py'), 'r') as fi: 197 | d = fi.read() 198 | d = d.replace('None # replace Parameter here.', str(kw)) 199 | 200 | self.chan.send(d) 201 | self.loop() 202 | 203 | def __repr__(self): 204 | return str(self.chan) 205 | 206 | def __enter__(self): 207 | return self 208 | 209 | def __exit__(self, exc_type, exc_value, traceback): 210 | self.close() 211 | 212 | def close(self): 213 | self.chan.send(['exit',]) 214 | self.chan.close() 215 | 216 | def loop(self): 217 | while True: 218 | o = self.chan.recv() 219 | if o[0] == 'result': 220 | return o[1] 221 | if o[0] == 'apply': 222 | r = eval(o[1], self.g)(*o[2:]) 223 | self.chan.send(['result', r]) 224 | elif o[0] in ('exec', 'eval', 'single'): 225 | r = eval(compile(o[1], '<%s>' % o[0], o[0]), self.g) 226 | self.chan.send(['result', r]) 227 | else: 228 | getattr(self, 'on_' + o[0])(*o[1:]) 229 | 230 | def on_open(self, filepath, mode): 231 | f = open(filepath, mode) 232 | self.fmaps[id(f)] = f 233 | self.chan.send(id(f)) 234 | 235 | def on_std(self, which): 236 | if which == 'stdout': 237 | f = sys.stdout 238 | elif which == 'stderr': 239 | f = sys.stderr 240 | elif which == 'stdin': 241 | f = sys.stdin 242 | else: 243 | raise Exception('unknown std: %s' % which) 244 | self.fmaps[id(f)] = f 245 | self.chan.send(id(f)) 246 | 247 | def on_write(self, id, d): 248 | self.fmaps[id].write(d) 249 | 250 | def on_read(self, id, size): 251 | d = self.fmaps[id].read(size) 252 | self.chan.send(d) 253 | 254 | def on_seek(self, id, offset, whence): 255 | self.fmaps[id].seek(offset, whence) 256 | 257 | def on_flush(self, id): 258 | self.fmaps[id].flush() 259 | 260 | def on_close(self, id): 261 | f = self.fmaps[id] 262 | if f not in (sys.stdin, sys.stdout, sys.stderr): 263 | f.close() 264 | del self.fmaps[id] 265 | 266 | def on_find_module(self, name, path): 267 | try: 268 | r = list(imp.find_module(name, path)) 269 | if r[0] is not None: 270 | self.fmaps[id(r[0])] = r[0] 271 | r[0] = id(r[0]) 272 | self.chan.send(r) 273 | except ImportError: self.chan.send(None) 274 | 275 | def on_except(self, err): 276 | raise Exception(err) 277 | 278 | def enable_aes(self): 279 | import dh 280 | from Crypto.Cipher import AES 281 | 282 | # Diffie-Hellman key exchange 283 | pri_key, pri_iv = dh.gen_prikey(), dh.gen_prikey() 284 | self.chan.send(['dh', dh.gen_pubkey(pri_key), dh.gen_pubkey(pri_iv)]) 285 | other_key, other_iv = self.loop() 286 | key = dh.gen_key(pri_key, other_key) 287 | iv = dh.gen_key(pri_iv, other_iv) 288 | 289 | self.encryptor = AES.new(key, AES.MODE_CBC, IV=iv) 290 | self.decryptor = AES.new(key, AES.MODE_CBC, IV=iv) 291 | origwrite, origread = self.chan.write, self.chan.read 292 | def write(d): 293 | return origwrite(self.encryptor.encrypt(d)) 294 | def read(n): 295 | d = origread(n) 296 | return self.decryptor.decrypt(d) 297 | self.chan.write = write 298 | self.chan.read = read 299 | 300 | # for last result. 301 | return self.loop() 302 | 303 | def eval(self, f): 304 | self.chan.send(['eval', f]) 305 | return self.loop() 306 | 307 | def execute(self, f): 308 | self.chan.send(['exec', f]) 309 | return self.loop() 310 | 311 | def single(self, f): 312 | self.chan.send(['single', f]) 313 | return self.loop() 314 | 315 | def apply(self, f, *p): 316 | self.chan.send(['apply', self.sendfunc(f)] + list(p)) 317 | return self.loop() 318 | 319 | def sendfunc(self, f): 320 | m = inspect.getmodule(f) 321 | fname = f.__name__ 322 | if m.__name__ == '__main__': 323 | self.execute(inspect.getsource(m)) 324 | else: 325 | self.import_module(m.__name__) 326 | fname = m.__name__ + '.' + fname 327 | return fname 328 | 329 | def import_module(self, name): 330 | if name in self.mc: return 331 | self.execute('import ' + name) 332 | self.mc.add(name) 333 | --------------------------------------------------------------------------------