Back up script in perl using rsync , File::Find and Archive::Tar

the Following is a back up script to take back up from remote servers using rsync 

The script can do the following : 

1) reads config file , check the config file if the remote file exists 

2) does an rsync of remote  dir to backupServer 

3) does a tar of the back up in archive dir of the backup server 

4) remove all files that are 60 days older ( currently disabled ) 

5) sends a mail in  case any errors  




-------------------------------------------------------
 this is your /script/backup_scripl.pl
-------------------------------------------------------
#!/usr/bin/perl 

use strict ; 
use warnings;
use Data::Dumper ; 
use Net::SMTP;
use File::Find;
use Archive::Tar;
######################################



our %Config;

require('/<PATHTOCONFIFFILE>/config_backup.pl');



my $F1    ='/var/www/projects/';
my $F2    ='/backup/apps/DocRoot/';
my $ARCH  ='/backup/apps/DocRoot/archive/';
my $today = `date +%F`; chomp($today);
my $now   =`date +%d%b-%H:%M`;  chomp($now);
my $log   ="/backup/docroot-backup1.log";
my $KEY   ="/opt/.securekey/id_rsa";
my $commonip = '10.0.1.' ;
my $hostname =`hostname -i`; chomp($hostname);


unless  ( -e $F2   ) { 
  sendemail("ALERT - DocRoot Backup - Storage Path ABSENT ","Storage location $F2 was not found in the system at `date`. \nTerminating DocRoot backup process at stage1. \nServer is $hostname"); 
  logit(" Backup Alert - $F2 STOR ABSENT");
  exit ;

unless  ( -e $F2   ) { 
  sendemail("ALERT - DocRoot backup Alert - Write Error on storage","Apparently I CANNOT write to $F2/testtouch on `date`. \nTerminating backup process at stage1.\nServer is $hostname "); 
  logit(" Backup Alert - storage is probably readonly. Cannot write to $F2 ");
  exit;

logit("---------------------------- START TIME $now ---------------------------------------- ");

foreach my $web ( keys %Config )  {
  logit(" Start server $web");

  foreach my $server( keys %{$Config{$web}} )  {
    print  "/usr/bin/rsync -e \"ssh -i $KEY  \"  --delete -au root\@$commonip$server:$Config{$web}{$server}[1]$Config{$web}{$server}[0]  $F2$Config{$web}{$server}[0].$server.proj\n" ;
    system("/usr/bin/rsync -e \"ssh -i $KEY \"  --delete -au  root\@$commonip$server:$Config{$web}{$server}[1]$Config{$web}{$server}[0]  $F2$Config{$web}{$server}[0].$server.proj") ;
    logit("$F2$Config{$web}{$server}[0].$server.proj Rsync completed ... ");


  }
  logit(" End  server ") ;

}


foreach my $web ( keys %Config )  {

  foreach my $server  ( keys %{$Config{$web}} )  {

    if ( -e  "$F2$Config{$web}{$server}[0].$server.proj" ) { 

      if ( chdir("$F2$Config{$web}{$server}[0].$server.proj") ) {                
      print "  tar --use=lbzip2 -c -f $ARCH$Config{$web}{$server}[0].$server.proj-$today.tbz2 . 2>/dev/null \n";
        system("tar --use=lbzip2 -c -f $ARCH$Config{$web}{$server}[0].$server.proj-$today.tbz2 . 2>/dev/null");

        if ( $? == 0 ) {  

          logit("Archive of $F2$Config{$web}{$server}[0].$server.proj  to $ARCH$Config{$web}{$server}[0].$server.proj-$today.tbz2  success");
        }else{
          logit("Archive of $F2$Config{$web}{$server}.$server.proj  failed") ;
          sendemail("ALERT - DocRoot backup Alert - Archive Fail", "$now :[STAGE - 2, Server = $hostname ] -- Archive of  $F2$Config{$web}{$server}[0]\.$server\.proj  failed.\nPlease check manually.\nPossible failure due to bad ecit status. Continuing with remaining process." ); 

        }

      } else { 

        logit("NO Dir $F2$Config{$web}{$server}[0].$server.proj ") ; 

      } 

    }else { 

      sendemail("ALERT - DocRoot backup Alert - Archiving Failed","Archiving of $F2$Config{$web}{$server}[0]\.$server\.proj failed  $now  \nTerminating backup process at stage2-Archiving.\nServer is $hostname "); 
      logit("-- Compression Failed....Not Found $F2/$Config{$web}{$server}[0]\.$server\.proj ");


    } 


  }

}

logit("---------------------------- END TIME $now ---------------------------------------- ");


# purge backups older than AGE in days
my @file_list;
my @find_dirs = ($ARCH); # directories to search
my $now_time = time(); # get current time
my $days = 60; # how many days old
my $seconds_per_day = 60*60*24; # seconds in a day
my $AGE = $days*$seconds_per_day; # age in seconds

find ( sub {     my $file = $File::Find::name;
    if ( -f $file ) {
    push (@file_list, $file);
    }
    }, @find_dirs);

for my $file (@file_list) {
  my @stats = stat($file);
  if ($now_time-$stats[9] > $AGE) {
#unlink $file;

    print "unlink $file\n";

  }
}





sub sendemail { 
#usage sendemail("subject","mssg");

  my $from = 'backupserver@yourdomain.com' ;
  my $sub = $_[0] ;
  my $content = $_[1] ;
  my $to = $RCPT ;
  my $relayhost = '10.0.0.22' || 'localhost';

  my $smtp = Net::SMTP->new("$relayhost",
      Debug   => 1,
      );

  $smtp->mail($from);
  $smtp->to($to);
  $smtp->data();
  $smtp->datasend("Subject: $sub");
  $smtp->datasend("\n");
  $smtp->datasend("\n");
  $smtp->datasend("$content");
  $smtp->dataend();
  $smtp->quit();



sub logit {
  my $s = shift;
  my $logtimestamp =`date +%d%b-%H:%M:%S`  ;
  chomp($logtimestamp);

  print "$s\n";
  my $fh;
  open($fh, '>>', "$log") or die "$log: $!";
  print $fh "$logtimestamp : $s\n";
  close($fh);
}

------------------------------------------------------------------------------------------------------------------
this is the config file -- /script/config_backup.pl
------------------------------------------------------------------------------------------------------------------
#!/usr/bin/perl

%Config = ( 
    'site1.domain.com'  => {'23 => ['site1-domain','/var/www/html/'] },
    'website2.domain.com'  => {'24' => ['website2-domain.com','/var/www/html/'] },
    'ontravel1.domain.com'  => {'25' => ['ontravel1','/var/www/html/'] },
      ) ;





**** use on your own risk 





shell script for website deployment through git or svn

This summary is not available. Please click here to view the post.

amazing regex generator

extract data from html table using HTML::TableExtract

in this tutorial you will get to learn : 

1) How to extract data from a html table 
2 ) regex 
3)  appending a hash to a hash 
4) for loops 

----------------------------------------------------------------------------------------------
HTML file 
----------------------------------------------------------------------------------------------
<table class="table1" border="1" cellspacing="1" cellpadding="1" style="width: 1144px; height: 883px;">

<tr>
<td> <b>Dom0<br /></b>
</td><td> <b>DomU <br /></b>
</td><td> <b>ipaddress<br /></b>
</td><td> <b>server name <br /></b>
</td><td> <b>application <br /></b>
</td><td> <b>website <br /></b>
</td><td> <b>assigned to </b><br />
</td></tr>
<tr>
<td> S1 (1.51)<br />
</td><td> yourwebsitenews-db 10GB ram 100 GB HDD &nbsp;&nbsp;
</td><td> 1.0.0.101<br />
</td><td> yourwebsitenews-db
</td><td> win2k8 ,sql
</td><td> MSSQL Server
</td><td> yourwebsitenews
</td></tr>
<tr>
<td> <br />
</td><td> yourwebsitetvweb3&nbsp; 4GB ram 48 GB HDD<br />
</td><td> 9.9.02.132<br />
</td><td> yourwebsitetvweb3<br />
</td><td> win2k8,IIS<br />
</td><td> yourwebsite TV <br />
</td><td> yourwebsitetv<br />
</td></tr>
<tr>
<td> <br />
</td><td> yourwebsitetvweb4&nbsp; 4GB ram 30 GB HDD<br />
</td><td> 9.9.202.146<br />
</td><td> yourwebsitetvweb4<br />
</td><td> win2k8,IIS<br />
</td><td> yourwebsite TV <br />
</td><td> yourwebsitetv<br />
</td></tr>
<tr>
<td> <br />
</td><td> <br />
</td><td> <br />
</td></tr>
<tr>
<td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td></tr>
<tr>
<td> S2 (1.52) <br />
</td><td> web1 5.5GB ram 50GB hdd<br />
</td><td> 1.0.0.106<br />
</td><td> web1 <br />
</td><td> apache,php,memcache<br />
</td><td> someweb,cndas,lms.someweb.com<br />
</td><td> IWPL
</td></tr>
<tr>
<td> <br />
</td><td> web4 5.5GB ram 50GB hdd<br />
</td><td> 1.0.0.109<br />
</td><td> web4<br />
</td><td> apache,php,memcache<br />
</td><td> b.in,gs.in.com
</td><td> IWPL
</td></tr>
<tr>
<td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td></tr>
<tr>
<td> S3 (1.53) <br />
</td><td> DB1 11GB ram 100GB hdd <br />
</td><td> 1.0.0.111<br />
</td><td> db1<br />
</td><td> mysql <br />
</td><td> all commons (auth  ),someweb<br />
</td><td> IL
</td></tr>
<tr>
<td> <br />
<tr>
<td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td></tr>
<tr>
<td> S4 (1.54) <br />
</td><td> DB3 10GB ram 200GB hdd<br />
</td><td> 1.0.0.116<br />
</td><td> db3<br />
</td><td> mysql <br />
</td><td> ors_db,ls_ting
</td><td> IWPL
</td></tr>
<tr>
<td> <br />
</td><td> yourwebsitetvmemcach-1 10 Gb ram 24 Gb hdd<br />
</td><td> 9.9.202.134<br />
</td><td> yourwebsitetvmem-1<br />
</td><td> yourwebsite Tv Memcached Server-1<br />
</td><td> yourwebsite Tv Memcached Server-1<br />
</td><td> yourwebsitetv
</td></tr>
<tr>
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td></tr>
<tr>
<td> S5 (1.55)<br />
</td><td> 1 systems ops 8GB 100GB <br />
</td><td> 1.0.0.121<br />
</td><td> ops<br />
</td><td> mysql,php,apache<br /><br />
</td><td> odw <br /> opsview <br /> reports <br /> runtime <br />
</td><td> IWPL
</td></tr>
<tr>
<td> <br />
<td> <br />
<td> <br />
</td><td> cache 4GB ram 129GB hdd<br />
</td><td> 9.9.202.209<br />
</td><td> linux-yourwebsite<br />
</td><td> php,apache<br />
</td><td> yourwebsite <br />
</td><td> yourwebsitetv
</td></tr>
<tr>
<td> <br />
</td><td> cache 6GB ram 20GB hdd<br />
</td><td> 1.0.0.125<br />
</td><td> cache-9<br />
</td><td> backup server
</td><td> backup server
</td><td> IL
</td></tr>
<tr>
<td> <br />
</td><td> web16- 5 GB RAM 46GB HDD
</td><td> 1.0.0.103
</td><td> web16
</td><td> php,apache
</td><td> voiofa.com,solr instance
</td><td> Il
</td></tr>
<tr>
<td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td><td> <br />
</td></tr>
<tr>
<td> S6 (1.56) <br />
</td><td> web9 5GBram <br />
</td><td> 1.0.0.126<br />
</td><td> web9 <br />
</td><td> apache,php,memcache<br />
</td><td> Il
</td></tr>
<tr>
<td> <br />
</td><td> web10 <br />
</td><td> 1.0.0.127<br />
</td><td> web10<br />
</td><td> apache,php,memcache<br />
</td><td> Il
</td></tr>
<tr>
<td> <br />
</td><td> r1s6v1web13&nbsp;5-GB ram - 50-GB hdd<br />
</td><td> 1.0.0.129<br />
</td><td> web13<br />
</td><td> apache,php,memcache
</td><td> critry,m.try,admin.ccry.com
</td><td> IL
</td></tr></table>


==================================================================
PERL script 
==================================================================
#!/usr/bin/perl 

use strict;
use warnings; 
use Data::Dumper;
use LWP::Simple;
use HTML::TableExtract;


#host your table on a local site 




my @data = get_data($url) ;

shift(@data);
my %serverdata;    
my %serverdata_sub;    

my $DOM0;

my @aeey;

my $re1='((?:[a-z][a-z]*[0-9]+[a-z0-9]*))';  # Alphanum 1
my $re2='(\\s+)';  # White Space 1
my $re3='\\((.*)\\)';  # Round Braces 1


foreach my $y (@data) {
  next unless ($y->[2]);
  $DOM0 = ($y->[0])? $y->[0] : $DOM0;

  my $re=$re1.$re2.$re3; 
  ($DOM0 =  $DOM0)  =~ s/$re/172.16.$3/isg; # S1 (1.51)
#   Dom0            DomU                ipaddress  server name   application   website   assigned to 
#  "$DOM0 ----->   $y->[2] ========== > $y->[1],   $y->[3],      $y->[4]  $y->[5]    $y->[6]
 ";

    push (@{$serverdata{$DOM0}{$y->[2]}},  {  'serverdetails' => $y->[1] , 'servername' => $y->[3], 'application' => $y->[4] , 'websites' => $y->[5] } ) ;


  if ( $y->[0]  ) { 
    $DOM0= "$y->[0]"; 
  }else { 
    $DOM0 = $DOM0;
  } 
}

foreach my $keysi ( keys %serverdata) { 
  print "================== $keysi ===============\n";
  print Dumper ($serverdata{$keysi}) ;
  print "==================  ===============\n";




sub get_data {
  my $sub_url = $_[0];
  my $html =  get "$sub_url";

  my @arry;
  my $te = new HTML::TableExtract( depth => 0, count => 0, attribs => { 'class' => "table1" }   );
  $te->parse($html);
  foreach my  $ts ($te->tables) {
    foreach my $row ($ts->rows) {
#      next unless $row->[0] =~ /\w/;   # skip garbage rows
      @{$row} =  grep(s/\n\n|(^\s+|\s+$)//g, @{$row});
      push( @arry , [@{$row}] ) ;
#   print "$row->[0] , $row->[1],$row->[2],$row->[3]\n";
    }
  }
  return @arry ;
}


AWS -- EC2 ELB -- shell script to change ip in backend varnish incase the ELB ips keeep changing

in AWS the ELP ( load balancer ) ip is dynamic .. so we need to change this in the varnish config file  . this script does the trick .

------------------------------------------------------------
#!/bin/bash


#check if varnish is reunning 
#if running reload if not restart 
varnishstatus=$(/usr/bin/pgrep varnishd |wc -l )

if [ $varnishstatus -gt 0  ]
then
vstatus='reload'
else 
vstatus='restart'
fi
#check IP in hosts set in vairrable 

temdigcount=/tmp/temdigcount 
tmpbackendconfig=/tmp/tmpbackendconfig


backendconfig='/etc/varnish/health.backend.vcl'
LBS_aws='/etc/varnish/LBS_aws'
touch $LBS_aws


defaultvcl=''


OIFS=$IFS 
IFS=$'\n' 
array=($(dig +short $LB2|sort))
IFS=$OIFS




echo ${array[@]} > $temdigcount

DIFF1=`diff $temdigcount $LBS_aws|wc -l `

echo $DIFF1

if [ $DIFF1 -eq 0 ]
then
echo -e "no diff in ip \n"
exit 
fi 


set_count=0

cat /dev/null > $tmpbackendconfig


for i in ${array[@]}
do
let set_count++
 SERVERIP=$i
cat <<EOF >> $tmpbackendconfig
backend health$set_count {
  .host = "$SERVERIP";
  .port = "http";
  .first_byte_timeout = 300s;
  .probe = {
         .request = "GET / HTTP/1.1" "Host: yoursite.com" "Connection: close";
         .interval = 10s;
         .timeout = 10s;
         .window = 5;
         .threshold = 3;
    }
  }
EOF
done




echo -e "\n Bakcend changed, updating VCL and Reloading VARNISH\n\n"
 /bin/cp -f $tmpbackendconfig  $backendconfig
/etc/init.d/varnish reload
 [ -f $FILE ] && \
 cp ${FILE} $FILE_${NOW} && \
 varnishadm -T localhost:1234 vcl.load vcl_${NOW} $FILE && \
 varnishadm -T localhost:1234 vcl.use  vcl_${NOW}  && \
 varnishadm -T localhost:1234 vcl.list |tail -n 3





Other Articles

Enter your email address: