当前位置:编程学习 > python >>

Python实现备份EC2的重要文件和MySQL数据库到S3

今天尝试了使用boto这个工具来用python备份文件到S3,废话不说,上代码:

1. 备份重要文件到S3:


[python] 
import os 
connected = 0 
def connect(): 
    access_key = 'YOURKEY 
    secret_key = 'YOURKEY' 
    from boto.s3.connection import S3Connection 
    global conn 
    conn = S3Connection(access_key, secret_key) 
    global connected 
    connected = 1 
 
def put(fileName, bucketName): 
    if connected == 0: 
        print 'Not connected!' 
    elif connected == 1: 
        local_file = fileName.strip() 
        bucket = bucketName.strip() 
        from boto.s3.key import Key 
        b = conn.get_bucket(bucket) 
        k = Key(b) 
        k.key = local_file 
        k.set_contents_from_filename(local_file) 
         
if __name__ == '__main__': 
    connect() 
    sourceFolder = '/var/www/www.ttgrow.com/ttgrow/photos/storyPhotos' 
    print 'story Photo sync in progress' 
    for root, dirs, files in os.walk(sourceFolder): 
        for file in files:    www.zzzyk.com
            print '  '+str(os.path.join(root,file)) 
            put(os.path.join(root,file),'ttgrow-photo') 
    sourceFolder = '/var/www/www.ttgrow.com/ttgrow/photos/thumbnails' 
    print 'thumbnail sync in progress' 
    for root, dirs, files in os.walk(sourceFolder): 
        for file in files: 
            print '  '+str(os.path.join(root,file)) 
            put(os.path.join(root,file),'ttgrow-photo') 
    print 'finished' 

2. 备份mysql数据库到S3:

[python] 
import os 
connected = 0 
def connect(): 
    access_key = 'YOURKEY' 
    secret_key = 'YOURKEY' 
    from boto.s3.connection import S3Connection 
    global conn 
    conn = S3Connection(access_key, secret_key) 
    global connected 
    connected = 1 
 
def put(fileName, bucketName): 
    if connected == 0: 
        print 'Not connected!' 
    elif connected == 1: 
        local_file = fileName.strip() 
        bucket = bucketName.strip() 
        from boto.s3.key import Key 
        b = conn.get_bucket(bucket) 
        k = Key(b) 
        k.key = local_file 
        k.set_contents_from_filename(local_file) 
         
if __name__ == '__main__': 
    from datetime import datetime 
    import os 
    temp = datetime.today() 
    fileName = '/tmp/dbbak-'+str(temp.year)+'-'+str(temp.month)+'-'+str(temp.day)+'-'+str(temp.hour)+'-'+str(temp.minute)+'.sql' 
    os.system("mysqldump -h YOUR_RDS_LOCATION -u USRNAME -pPASSWORD DBNAME > "+fileName) 
    print 'backup DB finished' 
    connect() 
    put(fileName,'ttgrow-db') 
 
    print 'upload to S3 finished' 
再把执行脚本加到定时器就每天可以定时执行了 :)

补充:Web开发 , Python ,
CopyRight © 2022 站长资源库 编程知识问答 zzzyk.com All Rights Reserved
部分文章来自网络,