use strict;
use warnings;
use File::Find;
use Digest::MD5;
use Data::Dumper ;
my @dup = fnd_dup(@ARGV) ;
#print Dumper ([@dup]);
foreach my $cur_cup (@dup) {
foreach my $cur_fil (@$cur_cup) {
print "my duplicate file is $cur_fil\n";
}
}
sub fnd_dup (@){
my (@dir_list) = @_;
if ( $#dir_list < 0 ) {
return (undef) ;
}
my %file ;
find( sub { -f && push @{$file{(stat($_))[7]}}, $File::Find::name }, @dir_list );
#print Dumper ([\%file]);
#'2040' => [
# '/opt/dkim-milter-2.8.3/obj.Linux.2.6.18-92.el5.x86_64/libsm/fpurge.o',
# '/opt/dkim-milter-2.8.3/obj.Linux.2.6.18-128.el5.x86_64/libsm/fpurge.o'
# ]
my (@resulth);
my %md ;
foreach my $size ( keys %file ) {
if ( $#{$file{$size}} < 1 ) {
next ;
}
foreach my $curr_file ( @{$file{$size}}) {
open (FILE, $curr_file ) or next ;
binmode(FILE) ;
push @{$md{Digest::MD5->new->addfile(*FILE)->hexdigest}}, $curr_file ;
close(FILE);
}
foreach my $hash (keys %md ) {
if ($#{$md{$hash}} >= 1 ) {
push (@resulth, [@{$md{$hash}}]);
}
}
}
return (@resulth)
perl script to check duplicate files
Subscribe to:
Post Comments (Atom)
No comments:
Post a Comment