#!/bin/bash cache_path=/home/nginx/proxy_cache_dir logfile=/home/nginx/logs/access.log datetime=`date +%d/%b/%Y` logdir=/home/nginx/logs #查找出所有当天nginx日志中状态码为200,大小小于100字节且被缓存命中的url,并将其保存至url.list grep "$datetime" $logfile | awk '{if($9 == 200 && $10 < 100 && $NF ~ "HIT") print $7}' | sort -n | uniq > url.list for i in `cat url.list` do #根据nginx日志中过滤的url查找缓存目录,对缓存中的url作处理后,保存到cache.list文件中 grep -a -r "$i" $cache_path| strings |grep "KEY: " | awk -F'KEY: ' '{print "http://"$2;}' | grep [a-zA-Z0-9]$ > cache.list #再次对url进行处理 sed -i "s#1img.looklook.cn/#1img.looklook.cn:7086/purge/#" cache.list #清除缓存 for url in `cat cache_list.txt` do curl "$url" | tee -a $logdir/purgelog/$(date +%y%m%d).log done done |
#!/bin/bash F=/home/nginx/logs/access.log O=/home/nginx/logs/old.log if [ ! -f $O ] then cp $F $O exit 3 fi diff $F $O | \ awk '{if($10 ==200 && $11< 100 && $NF ~ "HIT") print "http://1img.looklook.cn:7086/purge"$8}' | sort | uniq | \ awk '{system("curl \""$1"\"|tee -a /home/nginx/logs/purgelog/$(date +%y%m%d).log")}' cat $F > $O |