Leser: 1
|< 1 2 >| | 11 Einträge, 2 Seiten |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138
#!/usr/bin/perl TMP=/tmp #### Directories & Stasis Location #### #### Edit these to suit your needs # RAID = where you want your raid files RAID=~/raidstats/logs/raids/ LOGS=~/raidstats/logs-in OLD=~/raidstats/logs-old REPO=~/raidstats/logs/ # Directory in which stasis lives STASIS=~/raidstats/stasiscl mkdir -p $RAID $LOGS $OLD $REPO # Lockfile if [ -f /tmp/log.processing ]; then exit; fi touch /tmp/log.processing # The commented out line is what I use, because I process these on a different host than people ftp them to. I left it in for example only. cd $LOGS # /usr/local/bin/wget --quiet --delete-remote --ftp-user=wildly_logs --ftp-password=XXXX ftp://www.XXXX.org/* if [ `ls $LOGS | wc -l` -eq 0 ]; then rm /tmp/log.processing exit fi ls $LOGS if [ `ls *.zip 2> /dev/null | wc -l` -gt 0 ]; then for ZIP in *.zip ; do /usr/local/bin/unzip -qq "$ZIP" mv "$ZIP" $OLD done fi if [ `ls *.ZIP 2> /dev/null | wc -l` -gt 0 ]; then for ZIP in *.ZIP ; do /usr/local/bin/unzip -qq "$ZIP" mv "$ZIP" $OLD done fi if [ `ls *.gz 2> /dev/null | wc -l` -gt 0 ]; then for GZ in *gz; do /usr/bin/gunzip -q -c "$GZ" > "$GZ.txt" mv "$GZ" $OLD done fi # Mac cleanup rm -rf $LOGS/__MACOSX for LOG in *; do if [ -f "$LOG" ]; then echo "Processing $LOG..." cp "$LOG" $TMP mv "$LOG" $OLD # Date stuff ODATE=`head -10 "$TMP/$LOG" | tail -1 | awk '{ print $1 }'` DAY=${ODATE##*/} MON=${ODATE%%/*} YEAR=`date +%y` if [ $MON -eq "12" ]; then if [ `date +%m` -eq "01" ]; then YEAR=`date -v-1y +%y` fi fi if [ ${#DAY} -eq 1 ]; then DAY=0$DAY; fi if [ ${#MON} -eq 1 ]; then MON=0$MON; fi DATE=${MON}-${DAY}-${YEAR} if [ -f "$REPO/$LOG" ]; then rm /tmp/log.processing exit fi grep "^${ODATE}" "$TMP/$LOG" > "$TMP/$LOG.out" rm "$TMP/$LOG" # What raid? INSTANCE=`~/bin/getraid.pl "$TMP/$LOG.out"` # Other logs? LAST=1 if [ -f $REPO/${DATE}-${INSTANCE}.1.txt.gz ]; then LAST=`ls $REPO | tail -1` LAST=${LAST#*.} LAST=${LAST%.txt.gz} LAST=$(( $LAST + 1 )) fi # Move into place cp "$TMP/$LOG.out" $REPO/${DATE}-${INSTANCE}.${LAST}.txt rm "$TMP/$LOG.out" # Prep directory mkdir -p $RAID/$MON-$YEAR # Stasis START=`date +%s` cd $STASIS ./stasis add -dir $RAID/$MON-$YEAR -file $REPO/${DATE}-${INSTANCE}.${LAST}.txt -attempt -trash -overall -collapse echo -n "Run time: " echo -n $(( `date +%s` - $START )) echo " seconds." # Tidy up echo "Added $REPO/${DATE}-${INSTANCE}.${LAST}.txt" gzip $REPO/${DATE}-${INSTANCE}.${LAST}.txt cd $LOGS fi done rm -f /tmp/log.processing
Knevil+2008-09-10 10:21:34--naja ich warte erstmal bis jemand ne andre möglichkeit dafür gefunden hat, das ist mir doch alles ein bissi zu kompliziert ;)
QuoteThis is a pair of scripts which I use to automatically handle log uploads. My goal was to be able to allow people to upload logs into an FTP directory, and run a script out of cron that would periodically check that directory and do intelligent things. I wanted the logs to be renamed, organized, and so forth.
It assumes four directories -- one for incoming logs, one for processed logs, one for raids, and one for saved files (which is a backup, so if something screws up you can go get the originals).
|< 1 2 >| | 11 Einträge, 2 Seiten |