备份策略一
写shell脚本分别置放于各个网站下,根据实际情况配置,然后在需要备份的时候,登录每个vps进行备份,略感麻烦
备份策略二
通过本地脚本,预先配置每个vps的信息,如IP、端口、用户密码、路径等;在本地执行脚本,并获取下载链接,及时下载备份文件;目前采用策略二;
优点:备份方便
风险:若脚本泄露将导致vps失陷;但可通过ssh证书登录,降低风险;若如此,策略二脚本配置文件可单独存放,在执行脚本时读取即可,降低风险,
(1)直接配置到脚本:
注:可对备份文件进行一些hash,即文件名随机化,避免被探测
#coding:utf-8
'''
python2.7
'''
import paramiko
import datetime
import time
def backup(website,ssh_ip,ssh_port,ssh_user,ssh_pass,webpath,bkClass):
currenttime = datetime.datetime.now().strftime("%Y_%m_%d")
#存放备份文件的路径
#webpath = '/home/wwwroot/default/'+website+'/'
if bkClass == 'conf':
#需要备份的路径
backup_path = '/usr/local/nginx/conf/'
else:
backup_path = '/usr/local/mysql/var/'
#存放备份的文件名
filename = '{}-{}-{}.tar.gz'.format(website,bkClass,currenttime)
#存放备份的文件地址
filePath = webpath + filename
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(ssh_ip,port=ssh_port,username = ssh_user,password=ssh_pass)
cmd = 'tar -zcvf {} {}'.format(filePath,backup_path)
ssh.exec_command(cmd)
time.sleep(5)
cmd_check = 'du -h {}'.format(filePath)
stdin,stdout,stderr = ssh.exec_command(cmd_check)
#stdin.write("Y")
res = stdout.readlines()
print 'File Size:',res[0].split(' ')[0]
print 'File Path:',res[0].split(' ')[1].strip()
print 'Download Url :','http://'+website+'/'+filename
def start():
weblist = [
#{"website":"11.com","ssh_ip":"1.0.0.1","ssh_port":"22","ssh_user":"root","ssh_pass":"123456","webpath":"/home/wwwroot/default/test.com/"},
{"website":"22.com","ssh_ip":"1.2.3.4","ssh_port":"2222","ssh_user":"root","ssh_pass":"root","webpath":"/home/wwwroot/default/ali/"}
]
for webinfo in weblist:
website = webinfo["website"]
ssh_ip = webinfo["ssh_ip"]
ssh_port = webinfo["ssh_port"]
ssh_user = webinfo["ssh_user"]
ssh_pass = webinfo["ssh_pass"]
webpath = webinfo["webpath"]
backup_class = ['db','conf']
for bkClass in backup_class:
backup(website,ssh_ip,ssh_port,ssh_user,ssh_pass,webpath,bkClass)
def main():
start()
if __name__ == '__main__':
main()
(2)配置到ini文件:
#coding:utf-8
'''
python2.7
'''
import paramiko
import datetime
import time
import random
import hashlib
import configparser
def backup(website,ssh_ip,ssh_port,ssh_user,ssh_pass,webpath,bkClass):
currenttime = datetime.datetime.now().strftime("%Y_%m_%d")
#存放备份文件的路径
#webpath = '/home/wwwroot/default/'+website+'/'
if bkClass == 'conf':
#需要备份的路径
backup_path = '/usr/local/nginx/conf/'
else:
backup_path = '/usr/local/mysql/var/'
#存放备份的文件名
filename = '{}-{}-{}.tar.gz'.format(website,bkClass,currenttime)
#存放备份的文件地址
filePath = webpath + filename
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(ssh_ip,port=ssh_port,username = ssh_user,password=ssh_pass)
cmd = 'tar -zcvf {} {}'.format(filePath,backup_path)
ssh.exec_command(cmd)
time.sleep(5)
cmd_check = 'du -h {}'.format(filePath)
stdin,stdout,stderr = ssh.exec_command(cmd_check)
#stdin.write("Y")
res = stdout.readlines()
print ' [-]File Size:',res[0].split(' ')[0]
print ' [-]File Path:',res[0].split(' ')[1].strip()
print ' [-]Download Url :','http://'+website+'/'+filename
except Exception as e:
print website,e
def start():
config = configparser.ConfigParser()
iniPath = 'server.ini'
config.read(iniPath)
section = config.sections()
for info in section:
print '[+] starting backup :',info
website = info
ssh_ip = config[info]['ssh_ip']
ssh_port = int(config[info]['ssh_port'])
ssh_user = config[info]['ssh_user']
ssh_pass = config[info]['ssh_pass']
webpath = config[info]['webpath']
backup_class = ['db','conf']
for bkClass in backup_class:
backup(website,ssh_ip,ssh_port,ssh_user,ssh_pass,webpath,bkClass)
def main():
start()
if __name__ == '__main__':
main()
ini文件示例,节点为网站域名:
以上,
1、File Size 非实际大小,仅确认是否开始备份,如果要确认实际大小,考虑监控tar压缩进程是否存在,不存在则压缩结束,再判断文件大小,应为实际大小
2、每执行一次备份就连接了一次ssh,可优化仅连接一次;