#use Data::Dumper

$file = @ARGV[0];
open(FH, "< $file") or die "Cannot open $file for reading: $!";
my @array = <FH>;
close FH or die "Could not open file: $!";

open(OUT, ">$file") or die "Cannot open $file for write access: $!";
print OUT splice(@array,3);
close OUT or die "Could not close file: $!";

open(MYFILE,"< $file") or die "Cannot open $file for read access: $!";

#Create new file for writing;
open(my $OFILE, '>Output.txt') or die "Cannot create file for output: $!";

#List of Wanted Columns, and respective outputs for these columns
my @wanted_fields = ("ACC#", "NAME", "MOD_TYPE", "Gene Symbol");
my @output_fields = qw/Acc Name Type Symbol/;

#Retrieve wanted fields from first line, as it is the header
my @fields = split /\t/, <MYFILE>;
chomp @fields;
#print Dumper \@fields;

#Print the headers to the output file
print $OFILE join("\t",@output_fields), "\n";

	#assign each column to matching header.
	#Read each line from the array and turn it into a hash.
	my %row;
	@row{@fields} = split /\t/;
	#Use map to find wanted column in the hash for matching data on row
	my @wanted_data = map{$row{$_}} @wanted_fields;
	print $OFILE join("\t", @wanted_data), "\n";

close $OFILE or die "Error closing $OFILE: $!";

I have this script, which I built with help from another post I made in this forum. I have another question pertaining to tweaking this script. right now it does a good job of pulling the columns I need and putting them in a pre-specified order in a new file.

What I want to add is the ability to check if a file exists, if not, runs the script above and creates the file, but if it is, it should do the same thing, but without writing a new header row.

how do I append the columns from the new file to the proper columns in the existing file, knowing the order I set up in the existing file.

EDIT: I think i did it properly. I just duplicated the other loop, and left out the "print header" bit. It seems to work.