日期:2014-05-16  浏览次数:20391 次

Python实现备份EC2的重要文件和MySQL数据库到S3

今天尝试了使用boto这个工具来用python备份文件到S3,废话不说,上代码:

1. 备份重要文件到S3:


import os
connected = 0
def connect():
    access_key = 'YOURKEY
    secret_key = 'YOURKEY'
    from boto.s3.connection import S3Connection
    global conn
    conn = S3Connection(access_key, secret_key)
    global connected
    connected = 1

def put(fileName, bucketName):
    if connected == 0:
        print 'Not connected!'
    elif connected == 1:
        local_file = fileName.strip()
        bucket = bucketName.strip()
        from boto.s3.key import Key
        b = conn.get_bucket(bucket)
        k = Key(b)
        k.key = local_file
        k.set_contents_from_filename(local_file)
        
if __name__ == '__main__':
    connect()
    sourceFolder = '/var/www/www.ttgrow.com/ttgrow/photos/storyPhotos'
    print 'story Photo sync in progress'
    for root, dirs, files in os.walk(sourceFolder):
        for file in files:
            print '  '+str(os.path.join(root,file))
            put(os.path.join(root,file),'ttgrow-photo')
    sourceFolder = '/var/www/www.ttgrow.com/ttgrow/photos/thumbnails'
    print 'thumbnail sync in progress'
    for root, dirs, files in os.walk(sourceFolder):
        for file in files:
            print '  '+str(os.path.join(root,file))
            put(os.path.join(root,file),'ttgrow-photo')
    print 'finished'

2. 备份mysql数据库到S3:

import os
connected = 0
def connect():
    access_key = 'YOURKEY'
    secret_key = 'YOURKEY'
    from boto.s3.connection import S3Connection
    global conn
    conn = S3Connection(access_key, secret_key)
    global connected
    connected = 1

def put(fileName, bucketName):
    if connected == 0:
        print 'Not connected!'
    elif connected == 1:
        local_file = fileName.strip()
        bucket = bucketName.strip()
        from boto.s3.key import Key
        b = conn.get_bucket(bucket)
        k = Key(b)
        k.key = local_file
        k.set_contents_from_filename(local_file)
        
if __name__ == '__main__':
    from datetime import datetime
    import os
    temp = datetime.today()
    fileName = '/tmp/dbbak-'+str(temp.year)+'-'+str(temp.month)+'-'+str(temp.day)+'-'+str(temp.hour)+'-'+str(temp.minute)+'.sql'
    os.system("mysqldump -h YOUR_RDS_LOCATION -u USRNAME -pPASSWORD DBNAME > "+fileName)
    print 'backup DB finished'
    connect()
    put(fileName,'ttgrow-db')

    print 'upload to S3 finished'

再把执行脚本加到定时器就每天可以定时执行了 :)


更多操作见官方文档:

http://aws.amazon.com/python/



版权所有。转载本BLOG内任何文章,请以超链接形式注明出处。