有时使用nginx做高可用的时候,日志可能会根据不同的应用生产许多的访问日志,所以写了以下的批量日志自动备份脚本
#!/usr/bin/python2.4
import os
import shutil
import glob
import datetime
import re
import gzip
Tm=datetime.datetime.now().strftime("%Y%m%d%H%M")
def Time_part():
"""a hour partition for the log"""
FD=open('/root/script/nginx_url.ini','r')
URL=FD.readline().rstrip().split(":")[0]
File_url=URL+'*.log'
for file in glob.glob(File_url):
Log_file=os.path.basename(file)
bak_file=Log_file+"."+Tm
shutil.move(URL+Log_file,URL+bak_file)
def Tar_log():
"""tar log"""
FD=open('/root/script/nginx_url.ini','r')
URL=FD.readline().rstrip().split(":")[0]
for file in glob.glob(URL+'*.log.*'):
Log_file=os.path.basename(file)
if len(re.findall(r'\w+\.gz',Log_file)) <= 0:
bak_file=URL+Log_file+'.gz'
gz=gzip.GzipFile(filename="",mode='wb',compresslevel=9,fileobj=open(bak_file,'wb'))
gz.write(open(URL+Log_file).read())
gz.close()
os.remove(URL+Log_file)
os.system("/bin/kill -USR1 `/bin/cat /usr/local/nginx/logs/nginx.pid`")
def Bak_log():
"""Backup log"""
FD=open('/root/script/nginx_url.ini','r')
url=FD.readline().strip().split(":")
Source_url=url[0]
Dist_url=url[1]
print url,Source_url,Dist_url
for file in glob.glob(Source_url+'*.log.*.gz'):
print file
Log_file=os.path.basename(file)
shutil.move(Source_url+Log_file,Dist_url+Log_file)
FD.close()
if __name__ == '__main__':
Time_part()
Tar_log()
Bak_log()
if __name__ == '__main__':
Time_part()
Tar_log()
Bak_log()
配置文件nginx_url.ini :
注释:第一部分是nginx日志路径,第二部分是日志备份路径
/usr/local/nginx/log:/applog/log/