1.定期删除某个目录下的日志
#!/bin/bash
LOG_PATH=/home/app/sxpservice/logs
# Tar the log file
cd $LOG_PATH
DATE=`date -d "2 day ago" +"%Y%m%d"`
LOG_FILE=${DATE}
if [ -f ${DATE}.tar.gz ]; then
echo "The ${DATE}.tar.gz is already exist!"
else tar zcf ${DATE}.tar.gz $LOG_FILE && rm -rf $LOG_FILE
fi
# Delete tar_file from 10 days ago
#cd $LOG_PATH
#DEL_FILE=$(date -d "30 day ago" +"%Y%m%d").tar.gz
#if [ -f $DEL_FILE ]; then
# rm -f $DEL_FILE && echo "$DEL_FILE is deleted."
2.定期拷贝到备份日志到其他机器
#!/bin/bash
export PATH="/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin"
LOGDIR=/home/app/sxpservice/logs
LOG=`date -d "7 day ago" +"%Y%m%d"`
MONTH=`date -d "7 day ago" +"%m"`
YEAR=`date -d "7 day ago" +"%Y"`
BACKUPSERVER=10.1.47.107
HOSTNAME=`hostname`
IP=`ip address show dev eth0 |grep inet|awk -F. '{print $3"."$4}'|awk -F/ '{print $1}'`
LOCAL=${HOSTNAME}_$IP
REMOTEDIR=/log/LogBackup/${LOCAL}/${YEAR}/${MONTH}
function compress_log(){
cd $LOGDIR
if [ -d ${LOG} ]; then
/bin/tar zcf ${LOG}.tar.gz ${LOG} && sleep 3 && rm -rf ${LOG}
current_time=`date "+%Y-%m-%d %H:%M:%S"`
echo "${current_time}: ${LOG} archived and deleted successfully" >> Backup.log
else
current_time=`date "+%Y-%m-%d %H:%M:%S"`
echo "${current_time}: ${LOG} is non-existent" >> Backup.log
echo "====================================" >> Backup.log
fi
}
function backup_log(){
ssh app@${BACKUPSERVER} "mkdir -p $REMOTEDIR" || exit 1
if [ -f ${LOG}.tar.gz ]; then
scp -l 600000 ${LOG}.tar.gz app@${BACKUPSERVER}:${REMOTEDIR} && rm -f ${LOG}.tar.gz
current_time=`date "+%Y-%m-%d %H:%M:%S"`
echo "${current_time}: ${LOG} backup successfully" >> Backup.log
echo "====================================" >> Backup.log
fi
}