--- /dev/null
+#! /usr/bin/perl
+
+# get_ensembl_results retreives files of search results from ensembl,
+# and is released under the terms of the GPL version 2, or any later
+# version, at your option. See the file README and COPYING for more
+# information.
+
+# Copyright 2008 by Don Armstrong <don@donarmstrong.com>.
+
+
+use warnings;
+use strict;
+
+
+use Getopt::Long;
+use Pod::Usage;
+
+=head1 NAME
+
+ get_ensembl_results [options]
+
+=head1 SYNOPSIS
+
+
+ Options:
+ --dir, -D directory to stick results into [default .]
+ --name, -n file naming scheme [default ${search}_results.$format]
+ --terms, -t file of search terms [default -]
+ --debug, -d debugging level [default 0]
+ --help, -h display this help
+ --man, -m display manual
+
+=head1 OPTIONS
+
+=over
+
+=item B<--debug, -d>
+
+Debug verbosity. (Default 0)
+
+=item B<--help, -h>
+
+Display brief useage information.
+
+=item B<--man, -m>
+
+Display this manual.
+
+=back
+
+=head1 EXAMPLES
+
+ get_ensembl_results -D ./ensembl_results/ -n '${search}_name.html' < search_parameters
+
+Will pretty much do what you want
+
+=cut
+
+
+
+use vars qw($DEBUG $REVISION);
+
+BEGIN{
+ ($REVISION) = q$LastChangedRevision: 1$ =~ /LastChangedRevision:\s+([^\s]+)/;
+ $DEBUG = 0 unless defined $DEBUG;
+}
+
+use IO::File;
+use URI;
+use WWW::Mechanize;
+use Time::HiRes qw(usleep);
+
+# XXX parse config file
+
+my %options = (debug => 0,
+ help => 0,
+ man => 0,
+ dir => '.',
+ name => '${search}_results_ensembl',
+ terms => '-',
+ ensembl_site => 'http://www.ensembl.org',
+ ensembl_search_url => '/Homo_sapiens/searchview?species=Homo_sapiens&idx=&',
+ );
+
+GetOptions(\%options,'name|n=s',
+ 'terms|t=s','dir|D=s','debug|d+','help|h|?','man|m');
+
+pod2usage() if $options{help};
+pod2usage({verbose=>2}) if $options{man};
+
+$DEBUG = $options{debug};
+
+if (not -d $options{dir}) {
+ die "$options{dir} does not exist or is not a directory";
+}
+
+#open search terms file
+my $terms;
+if ($options{terms} eq '-') {
+ $terms = \*STDIN;
+}
+else {
+ $terms = new IO::File $options{terms}, 'r' or die "Unable to open file $options{terms}: $!";
+}
+
+#For every term
+while (<$terms>) {
+ # Get uids to retrieve
+ chomp;
+ s/\r$//g;
+ my $search = $_;
+ my $dir_name = eval qq("$options{name}") or die $@;
+ if (not -d "$options{dir}/$dir_name") {
+ mkdir("$options{dir}/$dir_name") or die "Unable to make directory $options{dir}/$dir_name $!";
+ }
+ my $uri = URI->new($options{ensembl_site}.$options{ensembl_search_url});
+ $uri->query_form($uri->query_form(),
+ q => $search,
+ );
+ my $url = $uri->as_string;
+ my $mech = WWW::Mechanize->new(agent=>"DA_get_ensembl_results/$REVISION");
+ #print STDERR $url,qq(\n);
+ $mech->get($url);
+ #print STDERR $mech->content();
+ my @links = $mech->find_link(text_regex=>qr/Gene/);
+ $mech->follow_link(text_regex=>qr/Gene/);
+ #print STDERR $mech->content();
+ # now we need to walk through all of the pages
+ push @links,$mech->find_all_links(url_regex => qr/_s=\d{2,}$/);
+ for my $link (@links) {
+ my $link_url = $link->url_abs->as_string;
+ print STDERR "getting $link_url\n";
+ $mech->get($link_url);
+ my @gene_links = $mech->find_all_links(url_regex => qr/geneview/);
+ for my $gene_link (@gene_links) {
+ my $gene_url = $gene_link->url_abs->as_string;
+ print STDERR "saving $gene_url\n";
+ $mech->get($gene_url);
+ my $cleaned_url = $gene_url;
+ $cleaned_url =~ s{http://}{}g;
+ $cleaned_url =~ s/[^\w]//g;
+ eval {
+ $mech->save_content($options{dir}.'/'.$dir_name.'/'.$cleaned_url);
+ print "retreived $url\n";
+ };
+ if ($@) {
+ warn $@;
+ }
+ }
+ }
+}
+
+
+
+
+
+
+__END__