send email using MIME::Lite::TT::HTML

#!/usr/bin/perl

 use strict;
 use warnings;
 use MIME::Lite::TT::HTML;




email ('agnello','agnello.dsouza@gmail.com');


sub email {

 my ( $recruiter_name, $to ) = @_;

 my %options;
 $options{INCLUDE_PATH} = '/scripts';
 my %params;
 $params{recruiter_name} = $recruiter_name ;

 my $msg = MIME::Lite::TT::HTML->new(
          From        =>  'agnello.dsouza@gmail.com'  ,
            To          =>  $to,
            Subject     =>  'Some subject  ',
            Template    =>  {
                                html     =>  'resume.tt',
                                text     =>  'text.resume.tt',
                            },
            TmplOptions =>  \%options,
            TmplParams  =>  \%params,
 );

# Set our content type properly
 $msg->attr("content-type"  => "multipart/mixed");

 # Attach a PDF to the message
 $msg->attach(  Type        =>  'application/pdf',
                Path        =>  '/scripts',
                Filename    =>  'Agnello_dsouza.pdf',
                Disposition =>  'attachment'
 );


# $msg->send('smtp', "smtp.gmail.com",   SSL=>1,  AuthUser=>'agnello.dsouza.linux@gmail.com',  AuthPass=>'xxxxxxxxxx',  Debug=>1);
 $msg->send();


}

HACK : script to get all linkedin Contact email IDs -- part 1

#!/usr/bin/perl


use strict;
use warnings;
use WWW::Mechanize;
use LWP;
use JSON;
use Data::Dumper;
use Getopt::Long;
use POSIX qw/ strftime /;
my $username = 'agnello.dsouza';
my $password = 'xxxxxxxxxxxxxx';
my $loginurl=  $maiurl.'/uas/login?goback=&trk=hb_signin';
my $json_file = '/tmp/json.data';



my $mech = login_linkedin();

#my $jdata = get_jdata($mech,$all_data);
#write_to_file($jdata,$json_file);

my $jdata = read_from_file($json_file);

my @contacts = gen_url ($jdata);

foreach (@contacts){
my $fnd_details_json = get_jdata($mech,$_);
my $email = $fnd_details_json->{'contact_data'}->{'emails_extended'}->[0]->{'email'};
next unless ( $email);
print "$email\n"
}





sub gen_url {
my $jdata= shift;
my @url ;
for my $i (@{$jdata->{'contacts'}} ){
my $uri = $maiurl.'/contacts/api/contacts/'.$i->{'id'}.'/?fields=name,emails_extended,birthday,phone_numbers,sites,addresses,company,title,location,ims,profiles,twitter,wechat,display_sources';
push (@url,  $uri );
}
return @url;
}


sub get_jdata {
my ( $mech,$url ) =@_;
$mech -> get ("$url");
return    decode_json($mech ->content()) ;
}


sub login_linkedin {
my $mech = WWW::Mechanize->new();
$mech -> cookie_jar(HTTP::Cookies->new());


$mech -> get("$loginurl");
$mech -> form_id('login');
$mech -> field ('session_key' => $username);
$mech -> field ('session_password' => $password);
$mech -> click_button (value => 'Sign In');

return $mech ;

}



sub read_from_file {
my $file = shift;
my $json;

{
  local $/; #Enable 'slurp' mode
  open my $fh, "<", "$file";
  $json = <$fh>;
  close $fh;
 }
return decode_json($json);
}

sub write_to_file {
my $data  = shift;
my $file  = shift;
open my $fh, ">", "$file";
print $fh encode_json($data);
close $fh;
}


perl script to send download file from website - integration with Jenkins- using Machanize

#!/usr/bin/perl

#use strict;
#use warnings;
use WWW::Mechanize;
use LWP;
use JSON;
use Data::Dumper;
use Getopt::Long;
use POSIX qw/ strftime /;
$| = 1;
my $username = 'something@something.com';
my $password = 'xxxxxx';
my $loginurl= 'https://loginurl';
my $insighturl= 'https://loginurl';
my %region_Squeryurl= ( 'apac' => '/cloud/paths/download/prod/slow-query/9538?s=1767176',
        'na' => '/cloud/paths/download/prod/slow-query/8972?s=1767151',
        'emea' => '/cloud/paths/download/prod/slow-query/3664?s=670131');
my $region;
GetOptions(
        'region|r=s' => \$region  );
die 'usage: $0 -r [apac|na|emea]'  unless( $region_Squeryurl{$region} );


my $date = my $time = strftime("%Y%m%d", localtime());
#my $log_dir = "$ENV{JENKINS_HOME}/docroot/getlog/";
my $log_dir = "/mnt/getlog/";

my $fullpath= "$log_dir"."$region/"."$date/";


unless (-d $fullpath ){
print "creating $fullpath ";
    system( "sudo mkdir -p  $fullpath" )  ;
    system( "sudo chown -R www-data:www-data  $fullpath" )  ;

}




my $mech = WWW::Mechanize->new();
$mech -> cookie_jar(HTTP::Cookies->new());

download_file ($username,$password,$loginurl,$insighturl,$region_Squeryurl{$region},$date,$fullpath);

sub download_file {

    my ($username,$password,$loginurl,$insighturl,$region_Squeryurl,$date,$fullpath)=@_;
    $mech -> get("$loginurl");
    $mech -> form_id('user-login');
    $mech -> field ('name' => $username);
    $mech -> field ('pass' => $password);
    $mech -> click_button (value => 'Sign in');
    $mech -> get ("$insighturl$region_Squeryurl");
    my $jdata_1 =   decode_json($mech ->content());

    $jdata_1 = $jdata_1->[1]->{'url'};
    $jdata_1 =~ s/'//g;
    print "$insighturl$jdata\n";
    while (1){
    sleep(2);
     $mech -> get ("$insighturl$jdata_1");
    $jdata = decode_json($mech ->content());
     print "get url $insighturl$jdata_1\n";
        if ( $jdata->[3]->{'delay'} =~ /^0$/ ) {
         print "mnt url --> ".$jdata->[3]->{'url'}."\n";
         print "delay --->". $jdata->[3]->{'delay'}."\n";
        last;
        }

    }
    my $D_url =  $jdata->[3]->{'url'}."\n";

    $mech -> get ("$D_url");
    $mech->save_content( "/tmp/mysql-slow-log-$date.$$.log" );
    system("sudo mv /tmp/mysql-slow-log-$date.$$.log ${fullpath}mysql-slow-log-$date.$$.log" );
    print $mech->content_type()."\n";
    if ($mech ->status()){
    print "created successfully ".$fullpath."mysql-slow-log-$date.$$.log\n ";
    my $msql_slow_log_q=$fullpath."mysql-slow-log-$date.$$.log";
    system("/bin/bash $ENV{WORKSPACE}/scripts/pt-query-digest.sh $region $msql_slow_log_q ");
    }

}
#print $mech ->status();
#print $mech ->content();

#print $mech-> content();


logstash - ELK

input {
        file {
         path => [  "/var/log/apache2/example.com_access_log",
                    "/var/log/apache2/domain.com_access_log",
                    "/var/log/apache2/somesite.com_access_log",
                    "/var/log/apache2/access.log"
                 ]
         type => "apache-access-log"
            }


         }


filter {
  if [type] == "apache-access-log" {
    grok {
      match => { "message" => "%{COMBINEDAPACHELOG}" }
    }
  }


  date {
    match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
  }
}






logstash.conf 

output {
stdout {
    codec => rubydebug
  }
elasticsearch {
         bind_host => "127.0.0.1"
        cluster => "elasticsearch"
         host => "127.0.0.1"
        }
   }


web scraping part 1

This script download all emails of all recruiters. 

=============================================
using simple regex. 
=============================================
#!/usr/bin/perl

use strict;
use warnings;
use Data::Dumper;
use LWP::Simple;




my $html =  get "$url";

my @html = split(/\n/,$html);

for my $i  (@html){
    next unless  $i =~ /<a href="\/recruiter\/(\S.*\/)">/ ;
    print "${url}${1}\n";
    my $indiv_recruiter = get("$url$1");
    my @indiv_recruiter = split(/\n/,$indiv_recruiter);
    for my $j (@indiv_recruiter ){

     next unless $j =~ /<a class="recruiter-email-link" href="mailto:(\S.*?\@CyberCoders\.com)">/ ;
        print "$1\n"

    }
}

=========================================================
Using HTM::treebuilder::Xpath

#!/usr/bin/perl

use strict;
use warnings;
use Data::Dumper;
use LWP::Simple;
use HTML::TreeBuilder;
use HTML::TreeBuilder::XPath;




my $html =  get "$url";



my $tree = HTML::TreeBuilder::XPath->new_from_content();
$tree->parse($html);

my @customers = $tree->findnodes('//div[@class="recruiter-item"');

for my $custm (@customers){
    my @all_a_tag = $custm->look_down('_tag' => 'a'  ) ;
     my $recruiter_pth = $all_a_tag[0]->attr('href') ."\n" ;
      $recruiter_pth =~ s/\/recruiter\///g;
      get_email($recruiter_pth);

}


sub get_email {

my ($r_pth)=@_;
my $html =  get ("$url$r_pth") ;
my    $tree2 = HTML::TreeBuilder->new_from_content($html);
    my @mailto = $tree2->look_down( _tag => 'a' , class => 'recruiter-email-link' );
     print $mailto[0]->attr('href')."\n";
   print "get $url$r_pth";
}

Cloud developer Chef


> The sender of this email is registered with Naukri.com as dynproindia. To respond back directly to the Employer, please click on Reply button, or send an email to savitha@dynproindia.com
> Experience required for the Job: 5 - 15 years
> Annual Salary of the Job: 0.0 - 30.0 Lacs
>
> Apply Now   Reply
>
>
> Dear Candidate,
>>
>> Hi All
>>
>> About Dynpro
>> * DynPro specializes in helping companies build infrastructures that can weather the most fierce storms by focusing on strong fundamentals: appropriate and effective systems, the right people, and proven business processes. Our worldwide force of technology and business experts can identify the right solutions to meet any challenge. Our relationships with the world's best service and product providers ensures that you get the most practical, value-driven solutions available anywhere. Location: USA,UK, Kolkata,Bangalore,Noida
>>
>> Skill sets -Cloud,Chef,AWS
>>
>> Experience: 5+ Yrs
>>
>> Location: Bangalore
>>
>> NP: Who can join Immediate or within 15 to 20 days of time
>> JD
>>
>> Following is the detailed Job description:
>> 1. Hands on experience (preferably 2 years) on Chef and Ruby scripting.
>> 2. Aware of cloud technologies such as AWS, Azure.
>> 3. Experience on working on DevOps methodology.
>> 4. Good to have - shell, python scripting hands on.
>> 5. Technically strong with a min of 5 years of experience.
>> 6. Self-motivated, team player and ability to work in high pressure scenarios.
>> 7. Capable of taking instructions and action them without too much hand holding.
>> 8. Show technical innovation and strive for continuous improvement on the platform.
>>
>> Key skills required for the job are:
>>
>> Devops-Chef(Mandatory)
>> Amazon Web Services - AWS(Mandatory)
>> ServiceNow - SaaS
>>
>>
>>
>> Mode Of Employment: Contract to Hire
>>
>> If your profile is relevant to the JD then kindly revert me back with your updated resume and with below Details
>>
>> Total Exp :
>> Rel Exp :
>> Current CTC :
>> Expected CTC :
>> Notice Period :
>> Work Location :
>> Pan no:
>> Passport no:
>> Educational Background with University name and Year of Passing and with %
>>
>> Thanks & Regards,
>>
>> Savita
>> Dynpro India Pvt Ltd.
>> Bangalore
>> Email: savitha@dynproindia.com
>> URL: www.dynproindia.com ; www.dynpro.com
>
>
> Is this job relevant to you? Yes No
> Your feedback would help us in sending you the most relevant job opportunities
> Disclaimer:
> The sender of this email is registered with naukri.com as dynproindia (venkatesh@dynpro.com, #3793, 7th Main HAL 2nd Stage Indiranagar, 121, Dickenson Road,, BANGALORE, Karnataka - 560038) using Naukri.com services. The responsibility of checking the authenticity of offers/correspondence lies with you. If you consider the content of this email inappropriate or spam, you may: Forward this email to: compliance@naukri.com or you can Block this Company from searching your resume in the database.
> Advisory: Please do not pay any money to anyone who promises to find you a job. This could be in the form of a registration fee, or document processing fee or visa charges or any other pretext. The money could be asked for upfront or it could be asked after trust has been built after some correspondence has been exchanged. Also please note that in case you get a job offer or a letter of intent without having been through an interview process it is probably a scam and you should contact compliance@naukri.com for advise.
> Update Your Resume Now to be on the top of Employer's Resume Search
> Create your own job alert to receive similar jobs directly in your inbox.
> Your Resume has only 30 seconds to impress a Recruiter. Get your resume written by Experts.
> Call 1800-102-5557 now! (Toll-Free)

Devops professionals with chef automation


>>
>> Experienced DevOps Consultant working in an Agile Environment (Scrum) and specialized in design, implementation, administration, configuration, upgrade and maintenance of Chef, Clearcase, ClearQuest, TFS, JIRA, Splunk, VMware ESX, NetApp.Storage, Flexlm License Server, Perforce, Tableau, and BladeLogic. Strong hands on experience in Chef, docker, AWS, Openstack, Mysql.
>> Good experience in automation using Unix shell scripting, PowerShell, PowerCLI and Perl. Experienced in delivering virtualised environments for multiple applications like TFS, Splunk, Bladelogic.. Good experience in Build and Release activities. Excellent interpersonal skills and ability to communicate effectively with all levels of management.A quick learner with the confidence to master new applications and technologies in order to deliver results.
>>
>> * Help companies setup Enterprise Chef Environment and conducted demos/PoCs.
>> * Enable macbook provisioning for a using Chef.
>> * Write Chef cookbooks for various requirements from clients.
>> * Write Ohai plugins, chef handlers.
>> * Work on Chef cookbook testing using foodcritic, chefspec, rubocop, Test Kitchen, Vagrant.
>> * Write cookbooks to create Docker containers using Chef.
>> * Design and implement complex and efficient Splunk dashboards using Splunk DB Connect and Sideview Utils.
>> * Involve in the administration of AWS, Openstack and Cloudstack setups.
>> * Setup of MySQL database, importing data from external sources into MySQL
>>
>>.

Senior Puppet Engineer


>
> Job Description:
>
> A candidate with good DevOps exposure and expertise on managing Puppet infrastructure on various cloud platforms (preferably AWS). He/She should have an excellent working knowledge of Linux with Puppet Master/Client setup.
>
> Desired Skills & Experience
> Strong hands-on experience on various• linux platforms. Good knowledge of linux shell scripting for automation of regular activities.
> Extensive experience in• DevOps and Puppet Master/Client setup.
> Proficient in deploying• Puppet, Puppet Dashboard, and Puppet DB for configuration management to the existing infrastructure.
> Experience in configuring parameters in Puppet for• different deployments.
> Experience in enterprise software monitoring tools• like Nagios, Cacti, OpsView, ManageEngine.
> Experience working with• scripting/programming languages (shell, PHP, Ruby, Python and/or Perl)
> Knowledge in configuring Puppet Foreman.•
> Knowledge in Puppet Hiera.•
> Understanding of cloud computing e.g.• AWS, Azure, and Google etc. will be an advantage.
> • Puppet Certified Professional will be an advantage.
> Monitoring: Nagios, Cacti, ManageEngine•

devops

>> 1Very Strong Linux/Unix administration background expertise in Scripting.

>> 2. Responsible for Design manage and maintain tools to automate operational processes using Chef Puppet Ansible and SaltStack etc.
>> 3. Implement systems that are highly available scalable and self-healing system and & IT Operations.
>> 4. Understand implement and automate security controls and governance processes.
>> 5. Build and manage development and testing environments assisting developers in debugging application issues.
>> 6. Ability to Use wide verity of open source technologies and tools.
>> 7.Contribute to the creation of system support documents such as run books Mandatory Skill Sets
>> 8. Experience with networking properly architecting high availability systems and security in an in premises and Cloud context

amazon training

Overview

We are developing a large scale website and are looking to automate the environment setup and parts of the software engineering process, build, package, deploy, and test within AWS.

  • For this test assignment, we are asking you to provide a development plan document for completing each of the tasks detailed below.
  • We will use your document to evaluate your knowledge of AWS and other software packages for build, package, deploy, and test
  • We are expecting that a great candidate will spend less than 8 hours and produce a clear and well researched dev plan.
  • If we like your plan, and your interview goes well, we will then hire you for a full time role, and your first tasks will be to execute your implementations!

Task 1 - Fully Automated environment setup

Create the cloudformation template and chief scripting required to bring up a dev, test, staging, or production web site

  • Must be fully automated; no manual intervention required whatsoever
  • web site AWS environment consists of RDS, Cloudfront, SQS, SNS, SES, and elastic beanstalk as well as other third party non-Amazon software.
  • must notify when environment is completely setup ready
  • DOD (Definition of Done) - this task is complete when I can specify a few parameters (examples, name of environment, size of farm), click a button, and receive an email notification containing the details of the environment when the environment is up and running.

Task 2 - Continuous 24/7 Testing Automation

  • create and administer a 24/7 test suite that runs against a selected environment
  • setup fully automated testing pipeline that detects SVN commits, builds and packages the website, then deploys it into the staging environment, without disrupting the automated testing
  • requires using existing monitoring tools
  • DOD - this task is complete when the following 2 capabilities are enabled
  • an svn commit command triggers a build, package, deploy into a configured environment, and then an email notification is received containing details of the deployment
  • a web page is available at all times that shows the results of all tests run in the last 8 hours against the configured environment

Instructions

When complete and ready to submit:

  • Please create and provide two (2) distinct plans. Both plans should be contained in a single Google document or folder, saved with the file name ACAE$60K1003_Lastname_Firstname (substituting your name for Lastname_Firstname) share it publicly, and proceed to this link for completing your submission.  

Each plan should contain the following sections

Plan Summary

  • Define in one paragraph or 5 bullet points the most important parts of your plan.
  • Have you selected great services that will do the work for you without having to write much code?  Is there a design pattern you will use that will make the automation maintainable and more robust?  This is the section to highlight what makes your plan special.

Software List

  • Identify all software you will use, including development tools
  • You must reuse Amazon tools and best practices when they apply
  • When Amazon tools and best practices don't apply, open source standards should be used.
  • Writing code should only been done as a last resort.  Configuring "off the shelf" software is preferable by far to writing code or frameworks.

Task Breakdown

  • Plan should cover the task breakdown to the level where the maximum task effort is 3 man hours.
  • use a Google spreadsheet and link to it from the Google doc
  • make sure it shared with the same people
  • provide the task breakdown in a form that you feel is as simple and easy to understand as possible

Total Effort

  • Each plan should include the total effort in man-hours. Assume you are the only one working on each project in your estimates.

Calendar Duration

  • Each plan should include the calendar duration-to-complete, measured in weeks and days.  Again assume that you are the only one working each project

Finally, if we like your plan, you will be invited to the final phase of the application process.  [NOTE: You may be asked to sign legal documents related to security and proprietary information before you move forward to the next step of the process.  This is sometimes necessary if, in the next application phase, you will have access to such information for completing this phase.]  The next phase for this position is an online video interview with the hiring manager.  Instructions for scheduling this interview will be included in the invitation, should you successfully complete this testing phase.  Once that interview is complete, only minor (and brief) administrative steps remain.

You have only 5 days from today to complete this testing phase.


Other Articles

Enter your email address: