#!/bin/bash # # This script gets logs from our production webserver and archives them # in a permanent location each day. # The name of the domain, and the logs we want DOMAIN="www.cancerprev.org" LOGS="access ssl_access error ssl_error" # SSH private key for authenticating connection to remote host SSH_ID="/etc/local-ssh/id_dsa" # Where to get the log files from SOURCE_USER="canprev" SOURCE_HOST="leary.csoft.net" SOURCE_DIR="www/logs" SOURCE_SUFFIX=".0.gz" # Where to put them DEST_DIR="/var/log/local-vhost/$DOMAIN/`date +%Y`" DEST_SUFFIX="-`date +%Y-%m-%d`.gz" DEST_PERMS=444 # Makes sure the destination directory exists if [ ! -d "$DEST_DIR" ]; then mkdir --parents "$DEST_DIR" || exit 1 fi # Makes sure the destination directory is writable if [ ! -w "$DEST_DIR" ]; then echo "I do not have access to write to $DEST_DIR." >&2 exit 1 fi # Gets the logs for log in $LOGS; do # Only gets the file if it doesn't already exist. if [ ! -f "$DEST_DIR/$log$DEST_SUFFIX" ]; then # Copies the file from the remote host. scp -p -B -q -i $SSH_ID \ "$SOURCE_USER@$SOURCE_HOST:$SOURCE_DIR/$log$SOURCE_SUFFIX" \ "$DEST_DIR/$log$DEST_SUFFIX" # If the copy failed, removes any possibly incomplete file and # exit with error. if [ ${PIPESTATUS[0]} != "0" ]; then rm -f "$DEST_DIR/$log$DEST_SUFFIX" exit 1 fi # Makes sure the the new file has proper permissions. chmod $DEST_PERMS "$DEST_DIR/$log$DEST_SUFFIX" fi done