#!/usr/bin/perl
# $Id: fileops.pm,v 1.1 2011/06/03 09:28:47 yason Exp $
package fileops;
use log;
my $FILE_LOCK = "/var/run/ztransfer.lock";
sub cleanup()
{
# clanup procedure
# remove all .rar files in /tmp
# THIS CODE SHOULD BE FIXED
`rm -f /tmp/*.rar`;
}
sub checkdir($)
{
my $file = shift;
chomp( my $dir = `dirname '$file'` );
if(! -e $dir)
{
$_ = system("mkdir -p '$dir'");
if($_ == 0)
{
log::main("Created directory $dir");
}
else
{
# directory has not created
log::error("Can not create directory $dir");
}
}
}
sub take_lock()
{
open( FILE, ">$FILE_LOCK") or log::critical("Can not create lock file $FILE_LOCK");
# write PID of current process to file
print FILE $$;
close(FILE);
}
sub release_lock()
{
# I think this is bad for a while
`rm $FILE_LOCK`;
}
sub check_lock()
{
# I'm really sure that this technique (file-locks) is not very
# good idea since it is no good way to determine if one copy
# of this script is already running now.
# My implementation based on writing a PID of a current process
# and (on startup) check if that file (or PID in it) is exists in
# processes tree (`ps ax`). It uses standart utility grep(1).
my $PID;
if(! -e $FILE_LOCK)
{
# continue running main transfer code
return()
}
else
{
open(FILE, $FILE_LOCK) or log::critical("Can not open existing lock file $FILE_LOCK");
$PID = <FILE>;
close(FILE);
$_ = `ps ax | egrep '\\b$PID\\b'`;
if($_ ne '')
{
# another copy is running
log::critical("Another copy is running! Exiting..");
}
else
{
# previous run of script ends incorrectly
# and he hadn't remove his lock
# return to main (should call take_lock() within it hardly)
return()
}
}
}
return(1);