3 # get_harvester_results retreives files of search results from ncbi,
4 # and is released under the terms of the GPL version 2, or any later
5 # version, at your option. See the file README and COPYING for more
8 # Copyright 2004 by Don Armstrong <don@donarmstrong.com>.
10 # $Id: ss,v 1.1 2004/06/29 05:26:35 don Exp $
23 get_harvester_results [options]
29 --dir, -D directory to stick results into [default .]
30 --name, -n file naming scheme [default ${search}_results.$format]
31 --terms, -t file of search terms [default -]
32 --debug, -d debugging level [default 0]
33 --help, -h display this help
34 --man, -m display manual
42 Debug verbosity. (Default 0)
46 Display brief useage information.
56 get_harvester_results -D ./harvester_results/ -n '${search}_name.html' < search_parameters
58 Will pretty much do what you want
64 use vars qw($DEBUG $REVISION);
67 ($REVISION) = q$LastChangedRevision: 1$ =~ /LastChangedRevision:\s+([^\s]+)/;
68 $DEBUG = 0 unless defined $DEBUG;
75 use Time::HiRes qw(usleep);
77 # XXX parse config file
79 my %options = (debug => 0,
85 name => '${search}_results_harvester',
88 harvester_site => 'http://harvester.fzk.de',
91 GetOptions(\%options,'format|f=s','database|b=s','name|n=s',
92 'terms|t=s','dir|D=s','debug|d+','help|h|?','man|m');
94 pod2usage() if $options{help};
95 pod2usage({verbose=>2}) if $options{man};
97 $DEBUG = $options{debug};
99 if (not -d $options{dir}) {
100 die "$options{dir} does not exist or is not a directory";
104 $options{harvester_search_url} = '/cgi-bin/'.$options{orgn}.'/search.cgi?zoom_query=golgi&zoom_per_page=100&zoom_and=1&zoom_sort=0';
106 #open search terms file
108 if ($options{terms} eq '-') {
112 $terms = new IO::File $options{terms}, 'r' or die "Unable to open file $options{terms}: $!";
118 # Get uids to retrieve
121 my $uri = URI->new($options{harvester_site}.$options{harvester_search_url});
122 $uri->query_form(zoom_query =>[],
124 $uri->query_form(zoom_query => $search,
126 my $url = $uri->as_string;
127 my $queue = Thread::Queue->new();
128 my $dir_name = eval qq("$options{name}") or die $@;
129 if (not -d "$options{dir}/$dir_name") {
130 mkdir("$options{dir}/$dir_name") or die "Unable to make directory $options{dir}/$dir_name $!";
132 my $wget_thread = threads->new(\&get_url,"$options{dir}/$dir_name",$queue);
133 push @threads,$wget_thread;
135 my $mech = WWW::Mechanize->new(agent => "DA_get_harvester_results/$REVISION");
137 #HTTP::Request->new('GET', $url);
141 my @links = $mech->links;
143 for my $link (@links) {
144 if ($link->text() =~ /Next /) {
147 elsif ($link->url =~ m#http://harvester.fzk.de/harvester/human/[^\/]+/[^.]+.htm#) {
148 $queue->enqueue($link->url());
151 $mech->follow_link(url=>$next_link->url) if defined $next_link;
152 } while ($next_link);
153 $queue->enqueue(undef);
155 for my $thread (@threads) {
160 my ($dir,$queue) = @_;
164 while (my $url = $queue->dequeue) {
165 push @current_urls,$url;
166 if (@current_urls >= 30) {
167 wget_urls($dir,@current_urls);
171 wget_urls($dir,@current_urls) if @current_urls;
174 my ($dir,@urls) = @_;
176 # replacing wget with WWW::Mechanize
177 my $mech = WWW::Mechanize->new(agent => "DA_get_harvester_results/$REVISION");
178 for my $url (@urls) {
179 # sleep for around 2 seconds
180 usleep((0.5+rand)*2*1000000);
182 my $cleaned_url = $url;
183 $cleaned_url =~ s{http://}{}g;
184 $cleaned_url =~ s/[^\w]//g;
186 $mech->save_content($dir.'/'.$cleaned_url);
187 print "retreived $url\n";
193 #system(q(wget),'-nd','-nH','-w','2','--random-wait','-P',$dir,@urls) == 0 or warn "$!";