-------------------------------------------------------------------------------d General filename handling for $file in (@ARGV) # split up directory and filename (my $pathname = $_) =~ s/[^\/]+$//; # directory (including '/') (my $filename = $_) =~ s/^.*\///; # filename # split up basename and suffix my ($basename,$suffix) = split(/\.([^.]+)$/, $filename); $suffix = '' unless $suffix; # no '.' and thus no suffix ------------------------------------------------------------------------------- Open ANY compressed file # Hash of Hash for each compression suffix my $compressors = { gz => { cmd => 'zcat', args => '' }, bz2 => { cmd => 'bzcat', args => '' }, xz => { cmd => 'xzcat', args => '' }, lzma => { cmd => 'lzma', args => '-dc' } }; # map a filename extension to a de-compression command sub compression_open_string { my $filename = shift; my ($ext) = $filename =~ /\.([^.]+)$/; # return normal open, if no valid compression suffix identified return "<$filename" unless $ext && defined $compressors->{$ext}; # work out command to run if not already defined unless ( defined $compressors->{$ext}->{cmdline} ) { $compressors->{$ext}->{cmdline} = `type -p $compressors->{$ext}->{cmd}`; chomp $compressors->{$ext}->{cmdline}; die("Didn't find \"$ext\" decompression command for $filename\n") unless $compressors->{$ext}->{cmdline}; $compressors->{$ext}->{cmdline} .= " ".$compressors->{$ext}->{args} if $compressors->{$ext}->{args}; } # return the command to open this compressed filename return "$compressors->{$ext}->{cmdline} \"$filename\" |"; } $log = shift; my $open_string = &compression_open_string($log); open ( LOG, $open_string ) || die "Unable to ", $open_string=~/^autoflush(1); $fh->autoflush(1); The IO::Socket module now automatically makes the network socket hot. However say you want to sent 57 byte records, want to buffer them but without splitting the record boundarys (input or output). The value 8151 is the largest number less than 8K that is a multiple of 57 use IO::Handle '_IOFBF'; # `FBF' means `Fully Buffered' FH->setvbuf($buffer_var, _IOFBF, 8151); ------------------------------------------------------------------------------- Check if a file handle is open.. perl 5.. if (defined(FH)) { close(FH); } perl 4.. if (defined($fd = fileno(FH))) { close(FH); } ------------------------------------------------------------------------------- Removing Comments cleanly when reading data files open( FILE, $file ) || die("$prog: Unable to open $file: $!\n"); while( ) { s/#.*$//; # ignore comments s/\s+$//; # remove end of line spaces next if /^$/; # skip blank lines ... } ------------------------------------------------------------------------------- Reading a whole file in various ways Into a single string $file = `cat foo`; faster { local $/=''; open(FILEH,'foo'); $whole_file = ; close FILEH; } Into an array of lines (without the returns) chop(@file = `cat foo`); or without a sub-process "cat" (faster) open(FILEH,'foo'); chop(@FILE = ); close(FILEH); Remember the -p and -n perl options for standard "cat"-like file processing. Uniq without sorting perl -ne 'print unless $a{$_}++' If you want to split and rejoin the file string into a string array use Remove End-Of-Line Delimiter @lines = split /\n/, $file; # the "deliminator" $file = join "", map { "$_\n" } @lines; $file = join("\n", @lines) . "\n"; Keeping End-Of-Line (simplier rejoin) @lines = $file =~ /(.*\n)/g; @lines = split /(?<=\n)/, $file; # for right-to-left schwartzian transforms $file = join "", @lines; Read Key-Value pairs into hash ('=' or space delimited) open(F,"user_image.txt"); %hash = map { split /=|\s+/; } ; close(F); ------------------------------------------------------------------------------- Keeping line count $. correct -or- Reading the top of a list of files. Example in sed sed -n 's/^THE_URL://p; 20q' one_file Want to do it with perl over a lot of files.... (Attempt 1) perl -ne '$.==20 and close ARGV; s/^THE_URL:// && print' file... The ARGV filehandle is closed at line 20 to goto the next FILE in @ARGV BUT it must also be closed that the end of each input file so that $. is reset for the next file in the sequence!!!! (how?) This does a 'close' on ARGV to reset $. at the end of each files See http://perldoc.perl.org/functions/eof.html # reset line numbering on each input file while (<>) { next if /^\s*#/; # skip comments print "$.\t$_"; } continue { close ARGV if eof; # reset $. at end of each file } ------------------------------------------------------------------------------- tail -f reading in perl for(;;) { for ($curpos = tell(GWFILE); $_ = ; $curpos = tell(GWFILE)) { search for some stuff and put it into files } sleep for a while seek(GWFILE, $curpos, 0); } The trick is use seek() to clear the EOF condition and to tell STDIO to look at the file on the disk again. ------------------------------------------------------------------------------- Reading a directory opendir(D,"directory"); while (defined($name = readdir(D))) { ... } closedir(D); The "defined" is needed otherwise it will quit on a filename of "0". ------------------------------------------------------------------------------- Reading the time stamps of a file NOTE: the -M -C -A time produced are in days since the start of the current perl process. This is find for many purposes but not for this. require "ctime.pl"; print "Access time: ", &ctime( (stat( shift ))[8] ); | # Modification = 9 ---------------'| # Change time = 10 ----------------' ------------------------------------------------------------------------------- Temporary files... With an actual filename (for commands) Must be in a eval to prevent the END{} and cleanup_tmps{} from being defined UNLESS you always want them defined in your script! use File::Temp qw( tempfile ); my $tmpdir = $ENV{'TMPDIR'} || "/tmp"; ($data_tmp, $data_dump) = tempfile( "$PROGNAME.XXXXXX", DIR => $tmpdir, UNLINK => 1 ); # redirect a specific handle to the open file open(DATA_DUMP, ">&".fileno($data_tmp)) or die("Failed to open tempfile for write : $!\n"); When $data_tmp becomes undefined, the temporary file will be unlinked. Without a filename in directory (auto clean up). use File::Temp qw( tempfile ); my $tmp1 = tempfile('/tmp/story_XXXXXX'); my $tmp2 = tempfile('/tmp/story_XXXXXX'); Passing open temporary files (without filenames) to sub-commands (See below for details) use Fcntl; use IO::File; use File::Temp qw( tempfile ); my $tmp = bless tempfile('/tmp/story_XXXXXX'), 'IO::File'; $tmp->fcntl(F_SETFD, 0 ); # turn off the close-on-exec $tmp->seek(0,0); $tmp->truncate(0); print $tmp1 $DATA_FOR_TEMPORARY_FILE; $tmp->flush; $tmp->seek(0,0); open( CMD, "cat -n /dev/fd/". $tmp->fileno. " |" ); $OUTPUT = ; close CMD; ------------------------------------------------------------------------------- Reading the DATA section, multiple times Warning this only works in a script. otherwise you get a "Illegal seek" system error =======8<-------- $data_start=tell(DATA); # note start of data while () { ... } seek DATA, $data_start, 0; # rewind data section while () { ... } __DATA__ data to be read multiple times =======8<-------- ------------------------------------------------------------------------------- In-place Editing Search and replace perl -i -pe 's/ADD_LINE_BREAK/\n$&/'' *.txt Delete a line in a file NOTE using "next", or "next LINE" will print the line and loop perl -i -pe '$_="",next if /Delete Me/' *.txt Delete everything after a particular line when editing mulitple files perl -i -pe 'close ARGV if /Junk-after-this-line/' *.txt perl -i -pe '$_="\n", close ARGV if /Junk-including-this-line/' *.txt Remove blank lines perl -ne 'print unless /^$/' perl -lne 'print if length' perl -ne 'print unless /^\s*$/' # delete white space lines perl -ne 'print if /\S/' # delete unless non-space Remove extra blank lines (keep one blank line) cat -s perl -00pe0 perl -ne 'if (/\S/) { print; $i=0 } else {print unless $i; $i=1; }' perl -ne 'print if /\S/../^\s*$/' In-Place Editing in a perl script You can enable in-place editing by setting $^I but it only works when you read files using <> from the filenames in @ARGV. It will then renames the original file and opens a new file for output on ARGVOUT with the original name, and does a select ARGVOUT so you don't have to specify a handle in the print statements. So here is how you do it (removing uppercase from a file)... =======8<-------- $^I=".bak" # Enable inplace editing our @ARGV = ("file_to_edit"); # the file to do in-place editing while( <> ) tr/A-Z/a-z/; print; } =======8<-------- Remove specific line (in-place edit) perl -i -pe '$_="" if $. == 44' .ssh/known_hosts ------------------------------------------------------------------------------- Run a shell script on remote servers... Note no user input, and extra datafiles is not passed Shell script has no extra quoting or syntax problems. =======8<-------- @SERVERS=@ARGV; $data_start=tell(DATA); # start of shell script after __DATA__ tag open(STDIN, "<&DATA"); foreach $host (@SERVERS) { seek(STDIN,$data_start,0); system( qw( ssh -x -o CheckHostIP=no -o BatchMode=yes ), $host, 'sh'); if ( $? != 0 ) { printf STDERR "Failed/Aborted on $host\n"; exit(10); } } open(STDIN, "<&INCOPY"); __DATA__ #!/bin/sh echo "running script on `hostname`" =======8<-------- ------------------------------------------------------------------------------- Making filenames safe Extra characters in the filename to tell perl how it should open it, can cause problems. The solution is to add a relative or absoulte path to the file at the front, and a trailing null byte at the end. The latter tells perl that, that is its real end. sub safe_filename { local($_) = shift; m#^/# ? "$_\0" : "./$_\0"; } $fn = &safe_filename("<< $fn") || die "couldn't open $fn: $!"; Perl 5.6 now provides a 3 argument open to fix this problem. EG in this form perl does not use any "open method" indicators in the filename string. Also you can exec to programs without use of the shell directly. ------------------------------------------------------------------------------- Recursive Empty Directory Removal On newer UNIX machines (linux) use gnu-find.. find . -depth -type d -empty -printf "rmdir %p\n" -delete Classical Method... rmdir will do the needed tests! use File::Find; finddepth(sub{rmdir},'.') # Or more generally to allow modification on exactly what it does. use File::Find; find( { bydepth => 1, wanted => sub { #return unless -d; #return if ($_ eq '.'|'..'); rmdir($_) and print "$File::Find::name\n"; }, }, $ARGV); Doing the directory walk yourself (incomplete) Without changing directory. sub emptydirs { my $path = shift; opendir ( my($dir), $path ) or die "Can't open $path : $!\n"; # get directory content but skip . and .. (to avoid circular looping) my @content = grep {$_ !~ /^\.\.?$/} readdir $dir; # recurse trough directories foreach my $subpath (grep { -d "$path/$_"} @content) { checkpath($path.'/'.$subpath); } # Empty directory # Error: actually we need to re-test if directory is now empty! if (!@content) { print "rmdir '$path'\n"; #rmdir $path; return; } } emptydirs(shift); Also look at File::Find::Parallel on CPan ------------------------------------------------------------------------------- Recursive Directory Removal Remove the whole directory (without using rm -rf ) use File::Path; remove_tree('test');u # Keep top level dir remove_tree('test', keep_root=>1); OR use File::Find; find( { bydepth => 1, wanted => sub { (-d $_) ? rmdir($_) : unlink($_); }, }, 'test'); rmdir 'test'; NOTE: on VMS machines you need to replace the unlink with ( 1 while unlink($_) ) That is because VMS has multiply versioned file. BUT do not do this on cgywin, as windows will have deleted deletion of files that are currently opened, and that will cause a infinite loop! ------------------------------------------------------------------------------- Sysopen command in perl4 If you are stuck with perl4 (like on a SGI) and need to use the special open flags you can use this... require 'syscall.ph'; require 'fcntl.ph'; sub sysopen { local($path, $flags, $mode) = @_; local($fd, $fh); if (($fd = syscall(&SYS_open, $path, $flags, $mode)) == -1) { return undef; } $fh = 'sysfh' . ++$sysopen'fh; open($fh, "+>&$fd") || return undef; # XXX: wrong mode return $fh; } $tty = &sysopen("/dev/tty1M19", &O_RDWR | &O_NDELAY | &O_EXCL, 0444); die "sysopen /dev/tty: $!" unless defined $tty; printf "tty handle is %s, fdesc is %d\n", $tty, fileno($tty); print $tty "Bob please write me if you see this. Bill\n"; ------------------------------------------------------------------------------- Safe IO to commands (avoiding the shell) Unsafe $outputfile could have tainted or uncontroled data open (SORT,"|/usr/bin/sort -o $outputfile") or die "Open Pipe Failure: $!\n\t"; while $line (@lines) { print SORT $line,"\n"; } close SORT or die $! ? "System error on sort output pipe, $! -- ABORTING\n\t" : "Sort returned non-zero status $? -- ABORTING\n\t"; PIPED OPEN open piped into an exec call will avoid the shell parser. OUTPUT... open (SORT,"|-") or exec "/usr/bin/sort", "-o", $outputfile or exit(1); while $line (@lines) { print SORT $line,"\n"; } close SORT or die $! ? "System error on sort output pipe, $! -- ABORTING\n\t" : "Sort returned non-zero status $? -- ABORTING\n\t"; INPUT... open(GREP,"-|") or exec "/usr/bin/grep",$userpattern,$filename or exit(1); while () { print "match: $_"; } close GREP or die $! ? "System error on input grep pipe, $! -- ABORTING\n\t" : "Grep returned non-zero status $? -- ABORTING\n\t"; Note that if you don't do the exit(1), you are still in a 'forked' situation! As such the rest of the program will probably be executed twice! To fully check the situation, you need to check both fork and exec failures and somehow report an exec failure back to parent. That last is not an easy matter at all. INPUT MARK 2... $command = qw( grep ); @opts = qw( -n ); $search = 'anthony'; @filenames = qw( /etc/passwd ); $pid = open(CMD, "-|"); die("Open fork failed: $!") unless defined $pid; if ( !$pid ) { # child process exec $command, @opts, $search, @filenames; die("Command Exec failure: $!\n\t"); } while () { # may be run on print "match: $_"; } close CMD or die $! ? "System error on command pipe, $! -- ABORTING\n\t" : "Command returned non-zero status $? -- ABORTING\n\t"; Of course doing both input and output to same command is more difficult -- see open2 perl module System Command The safe form for standard system call system "real_prog", "argument1", "argument2"; with setting a fake $0 $real_name = "real_prog"; system $real_name "fake_name", "argument1", "argument2"; or system { "real_prog" } "fake_name", "argument1", "argument2"; Example: start a `login' shell... $csh = "/bin/csh"; system $csh "-csh", "-f" or system { "/bin/csh" } "-csh","-f" Replace currently running shell with a login shell (using perl, exec new login shell named appropriatly) exec perl -e '$shell="csh"; exec($shell "-csh")' System call IO redirection, without shell Example: redirect STDOUT to "/dev/null" for JUST the system call. open(SAVE, ">&STDOUT"); open(STDOUT, ">/dev/null"); system( "command", "arg1", "arg2", ... ); open(STDOUT, ">&SAVE"); close(SAVE); ------------------------------------------------------------------------------- Passing an open file descriptor to children We have an open file handle and we want to connect this handle to a child process so that process read/writes to that handle itself. But the parent does not want to 'feed the child' this open descriptor. Note perl by deafult sets close-on-exec for all descriptors except STDIN STDOUT and STDERR. Method 1:- Use the shell to set up command and return results. The BIG problem with this is that when file handles are opened in perl, perl will automatically set 'close-on-exec' flag on that file descriptor. As such the child will NOT see the actual file descriptors. Perl controls this with the $^F variable which is used ON THE OPEN $^F = 10; # HEAVY PERL MAGIC -- turn off 'close-on-exec' when file is opened! $data_fh = FileHandle::new; $data_fh->open( 'data_file' ); $tmp_fh = FileHandle->new; $tmp_fh->open( '>/tmp/t' ); # finished -- restore 'close-on-exec' and run command $^F = 2; $command = "filter 2>&1 <&".fileno( $data_fh ). " >&".fileno( $tmp_fh ); $errors = `$command`; Method 2:- The 'close-on-exec' can also be handled using an fcntl call # Opens as above use Fcntl; $data_fh->fcntl( F_SETFD, 0 ) or die("fcntl data SETFL failed: $!"); $tmp_fh->fcntl( F_SETFD, 0 ) or die("fcntl tmp SETFL failed: $!"); $command = "filter 2>&1 <&".fileno( $data_fh ). " >&".fileno( $tmp_fh ); $errors = `$command`; For old perl handles use... fcntl( HANDLE, F_SETFD, 0 ) or die("fcntl HANDLE SETFL failed: $!"); Method 3:- Use a piped open. However we need to set up the file descriptors after the 'open-fork' but before the actual exec call. # Opens as above $err_fh = FileHandle->new; open( $err_fh, "-|" ) or do { open( OLDERR, ">&STDERR" ); # save the old STDERR in case of trouble open( STDERR, ">&STDOUT" ); # error to the open-pipe open( STDIN, "<&".fileno($data_fh) ); # input from datafile open( STDOUT, ">&".fileno($tmp_fh) ); # output to the tmp file close ( $data_fh ); close ( $tmp_fh ); # child does not need these exec( 'filter' ) do { print OLDERR "Filter exec failure : $!\n"; exit 10; }; }; $_ = join('', <$err_fh> ); # exec success? close $err_fh; Method 4:- Use the IPC::Open2 or IPC::Open3 module. This is difficult to work out as Open3 will accept file descriptors of the form ">&HANDLE" it does NOT seem to accept perl5 FileHandles. ------------------------------------------------------------------------------- Reading BOTH to and from a program WARNING: any such process is dangerious as you can become locked up. It is better to separate the send and recieve into parrellel streams. See also .../shell/co-processes.hints Method 1: Below is a raw perl script that does this. It creates two pipes for communication between the Perl script and the external program, then forks and execs the external program with the pipes attached to STDIN/STDOUT. You may need to setup the read to be non-blocking, or your script may hang indefinitely. Look into using sysread and select if you have problems. =======8<-------- #### create pipes to handle communication pipe (FROM_PERL, TO_PROG); pipe (FROM_PROG, TO_PERL); ##### create a child process $pid = fork; die "Couldn't fork: $!" unless defined $pid; ##### child process becomes the program if ($pid == 0) { ##### close unused parts of pipes close FROM_PROG; close TO_PROG; ##### attach standard input/output/error to the pipes open (STDIN, '<&FROM_PERL') || die ("open: $!"); open (STDOUT, '>&TO_PERL') || die ("open: $!"); open (STDERR, '>&STDOUT') || die ("open: $!"); ##### unbuffer the outputs select STDERR; $| = 1; select STDOUT; $| = 1; ##### execute the program exec $filter; ##### shouldn't get here!!! die; } close FROM_PERL; close TO_PERL; print TO_PROG "data"; $results = ; print $results; waitpid($pid,0); =======8<-------- At this point writes to TO_PROGRAM goes to its stdin and reads from FROM_PROGRAM comes from its stdout Method 2:- Socket pair is a little simpler in that only one handle is needed for both input and output. =======8<-------- use Socket; use IO::Handle; socketpair(CHILD, PARENT, AF_UNIX, SOCK_STREAM, PF_UNSPEC) or die "socketpair: $!"; CHILD->autoflush(1); PARENT->autoflush(1); $pid = fork; die "Couldn't fork: $!" unless defined $pid; if ($pid == 0) { close CHILD; open (STDIN, '<&CHILD') || die ("open: $!"); open (STDOUT, '>&CHILD') || die ("open: $!"); open (STDERR, '>&CHILD') || die ("open: $!"); exec $program_name split (' ', $program_name, $program_args); die; } close PARENT; print PARENT "data"; $results = ; print $results; waitpid($pid,0); =======8<-------- Method 3:- Doing the same thing via chat2 module (NO LONGER AVAILABLE - use Perl Expect module example -- changing the password as root =======8<-------- ($user,$pass) = "bin","bad boy"; require 'chat2.pl'; &chat'open_proc("passwd $user") || die "Cannot open_proc: $!"; eval { ## presume we don't need old password, cuz we run as root die "bad response: $_ (expected changing...:)" if $_ = &chat'expect( 10, # <-- timeout '^Changing.*\n', 0, '.*\n', '$1', TIMEOUT, TIMEOUT, EOF, EOF); die "bad response: $_ (expected ...ssword:)" if $_ = &chat'expect( 10, '.*ssword:.*', 0, TIMEOUT, TIMEOUT, EOF, EOF); &chat'print("$pass\n"); die "bad response: $_ (expected ...ssword:)" if $_ = &chat'expect( 10, '.*ssword:.*', 0, TIMEOUT,TIMEOUT,EOF,EOF); &chat'print("$pass\n"); die "bad response: $_ (expected nothing)" if $_ = &chat'expect( 10, '\s*\S(.*)', '$&', TIMEOUT, TIMEOUT, EOF, 0); }; $die = $@; &chat'close(); if ($die) { die $die; } else { print "password changed"; } =======8<-------- Method 4:- Using the IPC::Open2 module. Notes it does not seem to understand perl "filehandle" objects. Convert a password into md5 encription via grub =======8<-------- #!/usr/bin/perl my $initpwd = "xyzzy"; my $passwd; use IPC::Open2; local( *Response, *Command ); my $pid = open2( \*Response, \*Command, qw( /sbin/grub --batch --device-map=/dev/null ) ); print Command "md5crypt\n"; print Command "$initpwd\n"; print Command "quit\n"; close Command; while ( ) { print; chomp; $passwd=$' if /Encrypted: /; } die("Grub Encryption failed") unless $passwd; close Response; # How to get the return code?? waitpid($pid, 0); print "result: $passwd\n"; =======8<-------- You should be able to also connect existing io handles to the command using IPC::Open2() but it unclear how this is done. Method 5:- Use a filtered non-blocking event driven IO::Event handling method. With this you can set up a program that will know how to handle (filter or process) the data of multiple IO streams as those streams require handling. See my module IO::Events or ~/store/perl/io_events Also see my script "fcp_add" which talks to a network server using this module. It can make process interaction much more automated setting results as they arrive. ------------------------------------------------------------------------------- Critical Periods for File Update (interupt handling) # Interupt handler # # We don't want to be left with a truncated data file in our database. # So turn off the interupts, but record them, for later handling. # $interupted = 0; sub interupt { $interupted = 1; } # We were interupted? sub set_sig_handler { # turn handler on/off my( $turn_on ) = @_; if ( $interupted ) { print STDERR "INTERUPT CAUGHT IN CRITICAL PERIOD JUST ENDED -- EXITING\n"; exit 10; } $SIG{'HUP'} = $SIG{'QUIT'} = $SIG{'INT'} = $SIG{'TERM'} = $turn_on ? 'interupt' : 'DEFAULT'; } # # Use a re-usable TMP file handle # $tmp = "/tmp/$prog.$$"; open( TMP, "+>$tmp" ) || die("$prog: Unable to create tmp file: $!\n"); unlink($tmp); # delete the temporary file so auto-deletes on exit. ... # # Open the file to be modified # open( ACCT, "+<$entry" ) || die("$prog: Unable to open \"$entry\" read/write: $!\n"); # ensure our previously opened TMP file is clear seek(TMP, 0, 0); # rewind the temporary file truncate(TMP, 0); # empty it! # # Transfer file to TMP making changes # while( ) { chop; ... print TMP $_, "\n"; } # # Finialize - enter CRITICAL PERIOD # # copy the updated file back into place set_sig_handler(1); # enter critical period seek(TMP, 0, 0); # rewind the temporary file truncate(ACCT, 0); # zero the acctdb data file seek(ACCT, 0, 0); # rewind to start print ACCT while read(TMP,$_='',1024); # fast copy back close(ACCT); # finished set_sig_handler(0); # leave critical period ... # NOTE that TMP is never closed! It will auto-delete on program exit. # but not under NFS ------------------------------------------------------------------------------- Reading a keypress Perl5 can call &Curses::noecho() and &Curses::echo() to turn on and of echoing, other Items in this package can also turn on and off the terminals raw mode. This should hopefully would for both BSD and SysV unix systems. ------------------------------------------------------------------------------- Input Reading Timeout Alarm Method The following fails if is continued after interupt returns Some systems do not continue the after an interupt. sub handler1 { local($sig)=@_; $SIG{'ALRM'} = 'DEFAULT'; ($sig eq 'ALRM') && &the_rest_of_the_script; } sub the_rest_of_the_script { print $answer; } $answer = "You waited too long\n"; $SIG{'ALRM'} = 'handler1'; print "Type something here quickly (ended by RETURN): "; alarm(5); $answer = ; &the_rest_of_the_script; This solves the return from interupt continuing the read. BUT caused a heavy cpu load during timed period! require "sys/fcntl.ph"; sub out_of_time { $timed_out = 1; } $|=1; print "Prompt: "; $timed_out = 0; $SIG{'ALRM'} = out_of_time; fcntl(STDIN, &F_SETFL, 4); # Sets STDIN to non-blocking. alarm(5); while (!($line = ) && !$timed_out) {} alarm(0); # Turn it off, in case it's not gone off yet. fcntl(STDIN, &F_SETFL, 0); # STDIN back to blocking. $SIG{'ALRM'} = 'DEFAULT'; if ($timed_out) { print "\nRan out of time\n"; } else { print "You entered: $line"; } Select Method sub ReadLineTimeout { local (*HANDLE, $timeout) = @_; local ($bits); vec ($bits, fileno(HANDLE), 1) = 1; select($bits, undef, undef, $timeout) ? : undef; } $| = 1; # so prompt shows up. print "Type something -now-: "; $result = &ReadLineTimeout(STDIN, 5); print "\n"; if( defined $result ) { chop $result; print "You typed '$result'\n"; } else { print "Time's up!\n"; } ------------------------------------------------------------------------------- Auto recording STDOUT and STDERR to a log file $logfile = 'logfile'; local (*SAVE_STDOUT, *SAVE_STDERR); open (SAVE_STDOUT, ">&STDOUT"); # Save the original handles open (SAVE_STDERR, ">&STDERR"); # Redirect STDOUT to my pipe open (STDOUT, "|tee $logfile"); # No use checking for errors since either # the pipe won't even be opened and the # script will fail, or the pipe will be # running and it might fail later. open (STDERR, ">&STDOUT"); # Make STDERR stuff go there also select (STDOUT); $| = 1; # For my use, I wanted it un-buffered. ...do your stuff... close (STDERR); close (STDOUT); open (STDERR, ">&SAVE_STDERR"); # return it to the original handles open (STDOUT, ">&SAVE_STDOUT"); Douglas Valkenaar See the perl routines ~/store/perl/logging.pl -------------------------------------------------------------------------------