1 Bug 15772119 - SUNBT7144354 xscreensaver 5.15 fails to load image files |
|
2 |
|
3 xscreensaver-getimage-file fails to load image files because |
|
4 LWP::Simple perl modules missing in Solaris. I have reverted |
|
5 xscreensaver-getimage-file to older version 1.27 for time being. |
|
6 Already, RFE 15772127 is filed to add the LWP perl modules to Solaris. |
|
7 |
|
8 Fixed upstream in a different form in a later release. |
|
9 |
|
10 --- |
|
11 driver/xscreensaver-getimage-file | 428 ++++--------------------------------- |
|
12 1 files changed, 45 insertions(+), 383 deletions(-) |
|
13 |
|
14 diff --git a/driver/xscreensaver-getimage-file b/driver/xscreensaver-getimage-file |
|
15 --- a/driver/xscreensaver-getimage-file |
|
16 +++ b/driver/xscreensaver-getimage-file |
|
17 @@ -1,5 +1,5 @@ |
|
18 -#!/usr/bin/perl -w |
|
19 -# Copyright � 2001-2011 Jamie Zawinski <[email protected]>. |
|
20 +#!/usr/perl5/bin/perl -w |
|
21 +# Copyright � 2001-2009 Jamie Zawinski <[email protected]>. |
|
22 # |
|
23 # Permission to use, copy, modify, distribute, and sell this software and its |
|
24 # documentation for any purpose is hereby granted without fee, provided that |
|
25 @@ -13,18 +13,12 @@ |
|
26 # prints its name. The file will be an image file whose dimensions are |
|
27 # larger than a certain minimum size. |
|
28 # |
|
29 -# If the directory is a URL, it is assumed to be an RSS or Atom feed. |
|
30 -# The images from that feed will be downloaded, cached, and selected from |
|
31 -# at random. The feed will be re-polled periodically, as needed. |
|
32 -# |
|
33 # The various xscreensaver hacks that manipulate images ("jigsaw", etc.) get |
|
34 # the image to manipulate by running the "xscreensaver-getimage" program. |
|
35 # |
|
36 # Under X11, the "xscreensaver-getimage" program invokes this script, |
|
37 # depending on the value of the "chooseRandomImages" and "imageDirectory" |
|
38 # settings in the ~/.xscreensaver file (or .../app-defaults/XScreenSaver). |
|
39 -# The screen savers invoke "xscreensaver-getimage" via utils/grabclient.c, |
|
40 -# which then invokes this script. |
|
41 # |
|
42 # Under Cocoa, this script lives inside the .saver bundle, and is invoked |
|
43 # directly from utils/grabclient.c. |
|
44 @@ -49,12 +43,8 @@ use bytes; # Larry can take Unicode and shove it up his ass sideways. |
|
45 # Perl 5.8.0 causes us to start getting incomprehensible |
|
46 # errors about UTF-8 all over the place without this. |
|
47 |
|
48 -use Digest::MD5 qw(md5_base64); |
|
49 -use LWP::Simple qw($ua); |
|
50 - |
|
51 - |
|
52 my $progname = $0; $progname =~ s@.*/@@g; |
|
53 -my $version = q{ $Revision: 1.30 $ }; $version =~ s/^[^0-9]+([0-9.]+).*$/$1/; |
|
54 +my $version = q{ $Revision: 1.27 $ }; $version =~ s/^[^0-9]+([0-9.]+).*$/$1/; |
|
55 |
|
56 my $verbose = 0; |
|
57 |
|
58 @@ -74,10 +64,6 @@ my $cache_p = 1; |
|
59 # |
|
60 my $cache_max_age = 60 * 60 * 3; # 3 hours |
|
61 |
|
62 -# Re-poll RSS/Atom feeds when local copy is older than this many seconds. |
|
63 -# |
|
64 -my $feed_max_age = $cache_max_age; |
|
65 - |
|
66 |
|
67 # This matches files that we are allowed to use as images (case-insensitive.) |
|
68 # Anything not matching this is ignored. This is so you can point your |
|
69 @@ -118,19 +104,18 @@ my $stat_count = 0; # number of files/dirs stat'ed |
|
70 my $skip_count_unstat = 0; # number of files skipped without stat'ing |
|
71 my $skip_count_stat = 0; # number of files skipped after stat |
|
72 |
|
73 -sub find_all_files($); |
|
74 -sub find_all_files($) { |
|
75 +sub find_all_files { |
|
76 my ($dir) = @_; |
|
77 |
|
78 print STDERR "$progname: + reading dir $dir/...\n" if ($verbose > 1); |
|
79 |
|
80 - my $dd; |
|
81 - if (! opendir ($dd, $dir)) { |
|
82 + local *DIR; |
|
83 + if (! opendir (DIR, $dir)) { |
|
84 print STDERR "$progname: couldn't open $dir: $!\n" if ($verbose); |
|
85 return; |
|
86 } |
|
87 - my @files = readdir ($dd); |
|
88 - closedir ($dd); |
|
89 + my @files = readdir (DIR); |
|
90 + closedir (DIR); |
|
91 |
|
92 my @dirs = (); |
|
93 |
|
94 @@ -205,7 +190,7 @@ sub find_all_files($) { |
|
95 } |
|
96 |
|
97 |
|
98 -sub spotlight_all_files($) { |
|
99 +sub spotlight_all_files { |
|
100 my ($dir) = @_; |
|
101 |
|
102 my @terms = (); |
|
103 @@ -235,7 +220,7 @@ sub spotlight_all_files($) { |
|
104 # running at once, one will wait for the other, instead of both of |
|
105 # them spanking the same file system at the same time. |
|
106 # |
|
107 -my $cache_fd = undef; |
|
108 +local *CACHE_FILE; |
|
109 my $cache_file_name = undef; |
|
110 my $read_cache_p = 0; |
|
111 |
|
112 @@ -257,18 +242,18 @@ sub read_cache($) { |
|
113 if ($verbose > 1); |
|
114 |
|
115 my $file = $cache_file_name; |
|
116 - open ($cache_fd, '+>>', $file) || error ("unable to write $file: $!"); |
|
117 - flock ($cache_fd, LOCK_EX) || error ("unable to lock $file: $!"); |
|
118 - seek ($cache_fd, 0, 0) || error ("unable to rewind $file: $!"); |
|
119 + open (CACHE_FILE, "+>>$file") || error ("unable to write $file: $!"); |
|
120 + flock (CACHE_FILE, LOCK_EX) || error ("unable to lock $file: $!"); |
|
121 + seek (CACHE_FILE, 0, 0) || error ("unable to rewind $file: $!"); |
|
122 |
|
123 - my $mtime = (stat($cache_fd))[9]; |
|
124 + my $mtime = (stat(CACHE_FILE))[9]; |
|
125 |
|
126 if ($mtime + $cache_max_age < time) { |
|
127 print STDERR "$progname: cache is too old\n" if ($verbose); |
|
128 return (); |
|
129 } |
|
130 |
|
131 - my $odir = <$cache_fd>; |
|
132 + my $odir = <CACHE_FILE>; |
|
133 $odir =~ s/[\r\n]+$//s if defined ($odir); |
|
134 if (!defined ($odir) || ($dir ne $odir)) { |
|
135 print STDERR "$progname: cache is for $odir, not $dir\n" |
|
136 @@ -277,7 +262,7 @@ sub read_cache($) { |
|
137 } |
|
138 |
|
139 my @files = (); |
|
140 - while (<$cache_fd>) { |
|
141 + while (<CACHE_FILE>) { |
|
142 s/[\r\n]+$//s; |
|
143 push @files, "$odir/$_"; |
|
144 } |
|
145 @@ -300,17 +285,18 @@ sub write_cache($) { |
|
146 |
|
147 if (! $read_cache_p) { |
|
148 |
|
149 - truncate ($cache_fd, 0) || |
|
150 + truncate (CACHE_FILE, 0) || |
|
151 error ("unable to truncate $cache_file_name: $!"); |
|
152 - seek ($cache_fd, 0, 0) || |
|
153 + seek (CACHE_FILE, 0, 0) || |
|
154 error ("unable to rewind $cache_file_name: $!"); |
|
155 |
|
156 if ($#all_files >= 0) { |
|
157 - print $cache_fd "$dir\n"; |
|
158 + print CACHE_FILE "$dir\n"; |
|
159 + my $re = qr/$dir/; |
|
160 foreach (@all_files) { |
|
161 my $f = $_; # stupid Perl. do this to avoid modifying @all_files! |
|
162 - $f =~ s@^\Q$dir\L/@@so || die; # remove $dir from front |
|
163 - print $cache_fd "$f\n"; |
|
164 + $f =~ s@^$re/@@so || die; |
|
165 + print CACHE_FILE "$f\n"; |
|
166 } |
|
167 } |
|
168 |
|
169 @@ -318,319 +304,17 @@ sub write_cache($) { |
|
170 if ($verbose); |
|
171 } |
|
172 |
|
173 - flock ($cache_fd, LOCK_UN) || |
|
174 + flock (CACHE_FILE, LOCK_UN) || |
|
175 error ("unable to unlock $cache_file_name: $!"); |
|
176 - close ($cache_fd); |
|
177 - $cache_fd = undef; |
|
178 -} |
|
179 - |
|
180 - |
|
181 -# Returns a list of the image enclosures in the RSS or Atom feed. |
|
182 -# Elements of the list are references, [ "url", "guid" ]. |
|
183 -# |
|
184 -sub parse_feed($) { |
|
185 - my ($url) = @_; |
|
186 - |
|
187 - $ua->agent ("$progname/$version"); |
|
188 - $ua->timeout (10); # bail sooner than the default of 3 minutes |
|
189 - |
|
190 - my $body = (LWP::Simple::get($url) || ''); |
|
191 - |
|
192 - error ("not an RSS or Atom feed: $url") |
|
193 - unless ($body =~ m@^<\?xml\s@si); |
|
194 - |
|
195 - $body =~ s@(<ENTRY|<ITEM)@\001$1@gsi; |
|
196 - my @items = split(/\001/, $body); |
|
197 - shift @items; |
|
198 - |
|
199 - my @imgs = (); |
|
200 - my %ids; |
|
201 - |
|
202 - foreach my $item (@items) { |
|
203 - my $iurl = undef; |
|
204 - my $id = undef; |
|
205 - |
|
206 - # First look for <link rel="enclosure" href="..."> |
|
207 - # |
|
208 - if (! $iurl) { |
|
209 - $item =~ s!(<LINK[^<>]*>)!{ |
|
210 - my $link = $1; |
|
211 - my ($rel) = ($link =~ m/\bREL\s*=\s*[\"\']?([^<>\'\"]+)/si); |
|
212 - my ($type) = ($link =~ m/\bTYPE\s*=\s*[\"\']?([^<>\'\"]+)/si); |
|
213 - my ($href) = ($link =~ m/\bHREF\s*=\s*[\"\']([^<>\'\"]+)/si); |
|
214 - |
|
215 - if ($rel && lc($rel) eq 'enclosure') { |
|
216 - if ($type) { |
|
217 - $href = undef unless ($type =~ m@^image/@si); # omit videos |
|
218 - } |
|
219 - $iurl = $href if ($href); |
|
220 - } |
|
221 - $link; |
|
222 - }!gsexi; |
|
223 - } |
|
224 - |
|
225 - # Then look for <media:content url="..."> |
|
226 - # |
|
227 - if (! $iurl) { |
|
228 - $item =~ s!(<MEDIA:CONTENT[^<>]*>)!{ |
|
229 - my $link = $1; |
|
230 - my ($href) = ($link =~ m/\bURL\s*=\s*[\"\']([^<>\'\"]+)/si); |
|
231 - $iurl = $href if $href; |
|
232 - $link; |
|
233 - }!gsexi; |
|
234 - } |
|
235 - |
|
236 - # Then look for <description>... with an <img href="..."> inside. |
|
237 - # |
|
238 - if (! $iurl) { |
|
239 - $item =~ s!(<description[^<>]*>.*?</description>)!{ |
|
240 - my $desc = $1; |
|
241 - $desc =~ s/</</gs; |
|
242 - $desc =~ s/>/>/gs; |
|
243 - $desc =~ s/"/\"/gs; |
|
244 - $desc =~ s/'/\'/gs; |
|
245 - $desc =~ s/&/&/gs; |
|
246 - my ($href) = ($desc =~ m@<IMG[^<>]*\bSRC=[\"\']?([^\"\'<>]+)@si); |
|
247 - $iurl = $href if ($href); |
|
248 - $desc; |
|
249 - }!gsexi; |
|
250 - } |
|
251 - |
|
252 - # Could also do <content:encoded>, but the above probably covers all |
|
253 - # of the real-world possibilities. |
|
254 - |
|
255 - |
|
256 - # Find a unique ID for this image, to defeat image farms. |
|
257 - # First look for <id>...</id> |
|
258 - ($id) = ($item =~ m!<ID\b[^<>]*>\s*([^<>]+?)\s*</ID>!si) unless $id; |
|
259 - |
|
260 - # Then look for <guid isPermaLink=...> ... </guid> |
|
261 - ($id) = ($item =~ m!<GUID\b[^<>]*>\s*([^<>]+?)\s*</GUID>!si) unless $id; |
|
262 - |
|
263 - # Then look for <link> ... </link> |
|
264 - ($id) = ($item =~ m!<LINK\b[^<>]*>\s*([^<>]+?)\s*</LINK>!si) unless $id; |
|
265 - |
|
266 - |
|
267 - if ($iurl) { |
|
268 - $id = $iurl unless $id; |
|
269 - my $o = $ids{$id}; |
|
270 - if (! $o) { |
|
271 - $ids{$id} = $iurl; |
|
272 - my @P = ($iurl, $id); |
|
273 - push @imgs, \@P; |
|
274 - } elsif ($iurl ne $o) { |
|
275 - print STDERR "$progname: WARNING: dup ID \"$id\"" . |
|
276 - " for \"$o\" and \"$iurl\"\n"; |
|
277 - } |
|
278 - } |
|
279 - } |
|
280 - |
|
281 - return @imgs; |
|
282 -} |
|
283 - |
|
284 - |
|
285 -# Like md5_base64 but uses filename-safe characters. |
|
286 -# |
|
287 -sub md5_file($) { |
|
288 - my ($s) = @_; |
|
289 - $s = md5_base64($s); |
|
290 - $s =~ s@[/]@_@gs; |
|
291 - $s =~ s@[+]@-@gs; |
|
292 - return $s; |
|
293 -} |
|
294 - |
|
295 - |
|
296 -# Given the URL of an image, download it into the given directory |
|
297 -# and return the file name. |
|
298 -# |
|
299 -sub download_image($$$) { |
|
300 - my ($url, $uid, $dir) = @_; |
|
301 - |
|
302 - my ($ext) = ($url =~ m@\.([a-z\d]+)$@si); |
|
303 - my $file = md5_file ($uid); |
|
304 - $file .= '.' . lc($ext) if $ext; |
|
305 - |
|
306 - # Don't bother doing If-Modified-Since to see if the URL has changed. |
|
307 - # If we have already downloaded it, assume it's good. |
|
308 - if (-f "$dir/$file") { |
|
309 - print STDERR "$progname: exists: $dir/$file for $uid / $url\n" |
|
310 - if ($verbose > 1); |
|
311 - return $file; |
|
312 - } |
|
313 - |
|
314 - # Special-case kludge for Flickr: |
|
315 - # Their RSS feeds sometimes include only the small versions of the images. |
|
316 - # So if the URL ends in "s" (75x75), "t" (100x100) or "m" (240x240),then |
|
317 - # munge it to be "b" (1024x1024). |
|
318 - # |
|
319 - $url =~ s@_[stm](\.[a-z]+)$@_b$1@si |
|
320 - if ($url =~ m@^https?://[^/?#&]*?flickr\.com/@si); |
|
321 - |
|
322 - print STDERR "$progname: downloading: $dir/$file for $uid / $url\n" |
|
323 - if ($verbose > 1); |
|
324 - $ua->agent ("$progname/$version"); |
|
325 - my $status = LWP::Simple::mirror ($url, "$dir/$file"); |
|
326 - if (!LWP::Simple::is_success ($status)) { |
|
327 - print STDERR "$progname: error $status: $url\n"; # keep going |
|
328 - } |
|
329 - |
|
330 - return $file; |
|
331 -} |
|
332 - |
|
333 - |
|
334 -sub mirror_feed($) { |
|
335 - my ($url) = @_; |
|
336 - |
|
337 - if ($url !~ m/^https?:/si) { # not a URL: local directory. |
|
338 - return (undef, $url); |
|
339 - } |
|
340 - |
|
341 - my $dir = "$ENV{HOME}/Library/Caches"; # MacOS location |
|
342 - if (-d $dir) { |
|
343 - $dir = "$dir/org.jwz.xscreensaver.feeds"; |
|
344 - } elsif (-d "$ENV{HOME}/tmp") { |
|
345 - $dir = "$ENV{HOME}/tmp/.xscreensaver-feeds"; |
|
346 - } else { |
|
347 - $dir = "$ENV{HOME}/.xscreensaver-feeds"; |
|
348 - } |
|
349 - |
|
350 - if (! -d $dir) { |
|
351 - mkdir ($dir) || error ("mkdir $dir: $!"); |
|
352 - print STDERR "$progname: mkdir $dir/\n" if ($verbose); |
|
353 - } |
|
354 - |
|
355 - # MD5 for directory name to use for cache of a feed URL. |
|
356 - $dir .= '/' . md5_file ($url); |
|
357 - |
|
358 - if (! -d $dir) { |
|
359 - mkdir ($dir) || error ("mkdir $dir: $!"); |
|
360 - print STDERR "$progname: mkdir $dir/ for $url\n" if ($verbose); |
|
361 - } |
|
362 - |
|
363 - # At this point, we have the directory corresponding to this URL. |
|
364 - # Now check to see if the files in it are up to date, and download |
|
365 - # them if not. |
|
366 - |
|
367 - my $stamp = '.timestamp'; |
|
368 - my $lock = "$dir/$stamp"; |
|
369 - |
|
370 - print STDERR "$progname: awaiting lock: $lock\n" |
|
371 - if ($verbose > 1); |
|
372 - |
|
373 - my $mtime = ((stat($lock))[9]) || 0; |
|
374 - |
|
375 - my $lock_fd; |
|
376 - open ($lock_fd, '+>>', $lock) || error ("unable to write $lock: $!"); |
|
377 - flock ($lock_fd, LOCK_EX) || error ("unable to lock $lock: $!"); |
|
378 - seek ($lock_fd, 0, 0) || error ("unable to rewind $lock: $!"); |
|
379 - |
|
380 - my $poll_p = ($mtime + $feed_max_age < time); |
|
381 - |
|
382 - $poll_p = 1 unless ($cache_p); # poll again now with --no-cache cmd line arg. |
|
383 - |
|
384 - # Even if the cache is young, let's make sure there are at least |
|
385 - # a few files in it, and re-check if not. |
|
386 - # |
|
387 - if (! $poll_p) { |
|
388 - my $count = 0; |
|
389 - opendir (my $dirh, $dir) || error ("$dir: $!"); |
|
390 - foreach my $f (readdir ($dirh)) { |
|
391 - next if ($f =~ m/^\./s); |
|
392 - $count++; |
|
393 - last; |
|
394 - } |
|
395 - closedir $dirh; |
|
396 - |
|
397 - if ($count <= 0) { |
|
398 - print STDERR "$progname: no files in cache of $url\n" if ($verbose); |
|
399 - $poll_p = 1; |
|
400 - } |
|
401 - } |
|
402 - |
|
403 - if ($poll_p) { |
|
404 - |
|
405 - print STDERR "$progname: loading $url\n" if ($verbose); |
|
406 - |
|
407 - my %files; |
|
408 - opendir (my $dirh, $dir) || error ("$dir: $!"); |
|
409 - foreach my $f (readdir ($dirh)) { |
|
410 - next if ($f eq '.' || $f eq '..'); |
|
411 - $files{$f} = 0; # 0 means "file exists, should be deleted" |
|
412 - } |
|
413 - closedir $dirh; |
|
414 - |
|
415 - $files{$stamp} = 1; |
|
416 - |
|
417 - # Download each image currently in the feed. |
|
418 - # |
|
419 - my $count = 0; |
|
420 - my @urls = parse_feed ($url); |
|
421 - foreach my $p (@urls) { |
|
422 - my ($furl, $id) = @$p; |
|
423 - my $f = download_image ($furl, $id, $dir); |
|
424 - next unless $f; |
|
425 - $files{$f} = 1; # Got it, don't delete |
|
426 - $count++; |
|
427 - } |
|
428 - |
|
429 - print STDERR "$progname: empty feed: $url\n" if ($count <= 0); |
|
430 - |
|
431 - # Now delete any files that are no longer in the feed. |
|
432 - # But if there was nothing in the feed (network failure?) |
|
433 - # then don't blow away the old files. |
|
434 - # |
|
435 - my $kept = 0; |
|
436 - foreach my $f (keys(%files)) { |
|
437 - if ($count <= 0) { |
|
438 - $kept++; |
|
439 - } elsif ($files{$f}) { |
|
440 - $kept++; |
|
441 - } else { |
|
442 - if (unlink ("$dir/$f")) { |
|
443 - print STDERR "$progname: rm $dir/$f\n" if ($verbose > 1); |
|
444 - } else { |
|
445 - print STDERR "$progname: rm $dir/$f: $!\n"; # don't bail |
|
446 - } |
|
447 - } |
|
448 - } |
|
449 - |
|
450 - # Both feed and cache are empty. No files at all. |
|
451 - error ("empty feed: $url") if ($kept <= 1); |
|
452 - |
|
453 - $mtime = time(); # update the timestamp |
|
454 - |
|
455 - } else { |
|
456 - |
|
457 - # Not yet time to re-check the URL. |
|
458 - print STDERR "$progname: using cache: $url\n" if ($verbose); |
|
459 - |
|
460 - } |
|
461 - |
|
462 - # Unlock and update the write date on the .timestamp file. |
|
463 - # |
|
464 - truncate ($lock_fd, 0) || error ("unable to truncate $lock: $!"); |
|
465 - seek ($lock_fd, 0, 0) || error ("unable to rewind $lock: $!"); |
|
466 - utime ($mtime, $mtime, $lock_fd) || error ("unable to touch $lock: $!"); |
|
467 - flock ($lock_fd, LOCK_UN) || error ("unable to unlock $lock: $!"); |
|
468 - close ($lock_fd); |
|
469 - $lock_fd = undef; |
|
470 - print STDERR "$progname: unlocked $lock\n" if ($verbose > 1); |
|
471 - |
|
472 - # Don't bother using the imageDirectory cache. We know that this directory |
|
473 - # is flat, and we can assume that an RSS feed doesn't contain 100,000 images |
|
474 - # like ~/Pictures/ might. |
|
475 - # |
|
476 - $cache_p = 0; |
|
477 - |
|
478 - # Return the URL and directory name of the files of that URL's local cache. |
|
479 - # |
|
480 - return ($url, $dir); |
|
481 + close (CACHE_FILE); |
|
482 } |
|
483 |
|
484 |
|
485 sub find_random_file($) { |
|
486 my ($dir) = @_; |
|
487 |
|
488 + $dir =~ s@/+$@@g; |
|
489 + |
|
490 if ($use_spotlight_p == -1) { |
|
491 $use_spotlight_p = 0; |
|
492 if (-x '/usr/bin/mdfind') { |
|
493 @@ -638,14 +322,6 @@ sub find_random_file($) { |
|
494 } |
|
495 } |
|
496 |
|
497 - my $url; |
|
498 - ($url, $dir) = mirror_feed ($dir); |
|
499 - |
|
500 - if ($url) { |
|
501 - $use_spotlight_p = 0; |
|
502 - print STDERR "$progname: $dir is cache for $url\n" if ($verbose > 1); |
|
503 - } |
|
504 - |
|
505 @all_files = read_cache ($dir); |
|
506 |
|
507 if ($#all_files >= 0) { |
|
508 @@ -673,7 +349,7 @@ sub find_random_file($) { |
|
509 |
|
510 write_cache ($dir); |
|
511 |
|
512 -# @all_files = sort(@all_files); |
|
513 + @all_files = sort(@all_files); |
|
514 |
|
515 if ($#all_files < 0) { |
|
516 print STDERR "$progname: no files in $dir\n"; |
|
517 @@ -686,9 +362,6 @@ sub find_random_file($) { |
|
518 my $n = int (rand ($#all_files + 1)); |
|
519 my $file = $all_files[$n]; |
|
520 if (large_enough_p ($file)) { |
|
521 - if (! $url) { |
|
522 - $file =~ s@^\Q$dir\L/@@so || die; # remove $dir from front |
|
523 - } |
|
524 return $file; |
|
525 } |
|
526 } |
|
527 @@ -699,7 +372,7 @@ sub find_random_file($) { |
|
528 } |
|
529 |
|
530 |
|
531 -sub large_enough_p($) { |
|
532 +sub large_enough_p { |
|
533 my ($file) = @_; |
|
534 |
|
535 my ($w, $h) = image_file_size ($file); |
|
536 @@ -726,7 +399,7 @@ sub large_enough_p($) { |
|
537 |
|
538 # Given the raw body of a GIF document, returns the dimensions of the image. |
|
539 # |
|
540 -sub gif_size($) { |
|
541 +sub gif_size { |
|
542 my ($body) = @_; |
|
543 my $type = substr($body, 0, 6); |
|
544 my $s; |
|
545 @@ -738,7 +411,7 @@ sub gif_size($) { |
|
546 |
|
547 # Given the raw body of a JPEG document, returns the dimensions of the image. |
|
548 # |
|
549 -sub jpeg_size($) { |
|
550 +sub jpeg_size { |
|
551 my ($body) = @_; |
|
552 my $i = 0; |
|
553 my $L = length($body); |
|
554 @@ -789,7 +462,7 @@ sub jpeg_size($) { |
|
555 |
|
556 # Given the raw body of a PNG document, returns the dimensions of the image. |
|
557 # |
|
558 -sub png_size($) { |
|
559 +sub png_size { |
|
560 my ($body) = @_; |
|
561 return () unless ($body =~ m/^\211PNG\r/s); |
|
562 my ($bits) = ($body =~ m/^.{12}(.{12})/s); |
|
563 @@ -803,7 +476,7 @@ sub png_size($) { |
|
564 # Given the raw body of a GIF, JPEG, or PNG document, returns the dimensions |
|
565 # of the image. |
|
566 # |
|
567 -sub image_size($) { |
|
568 +sub image_size { |
|
569 my ($body) = @_; |
|
570 return () if (length($body) < 10); |
|
571 my ($w, $h) = gif_size ($body); |
|
572 @@ -816,17 +489,17 @@ sub image_size($) { |
|
573 |
|
574 # Returns the dimensions of the image file. |
|
575 # |
|
576 -sub image_file_size($) { |
|
577 +sub image_file_size { |
|
578 my ($file) = @_; |
|
579 - my $in; |
|
580 - if (! open ($in, '<', $file)) { |
|
581 + local *IN; |
|
582 + if (! open (IN, "<$file")) { |
|
583 print STDERR "$progname: $file: $!\n" if ($verbose); |
|
584 return undef; |
|
585 } |
|
586 - binmode ($in); # Larry can take Unicode and shove it up his ass sideways. |
|
587 + binmode (IN); # Larry can take Unicode and shove it up his ass sideways. |
|
588 my $body = ''; |
|
589 - sysread ($in, $body, 1024 * 50); # The first 50k should be enough. |
|
590 - close $in; # (It's not for certain huge jpegs... |
|
591 + sysread (IN, $body, 1024 * 50); # The first 50k should be enough. |
|
592 + close IN; # (It's not for certain huge jpegs... |
|
593 return image_size ($body); # but we know they're huge!) |
|
594 } |
|
595 |
|
596 @@ -837,19 +510,15 @@ sub error($) { |
|
597 exit 1; |
|
598 } |
|
599 |
|
600 -sub usage() { |
|
601 +sub usage { |
|
602 print STDERR "usage: $progname [--verbose] directory\n" . |
|
603 " Prints the name of a randomly-selected image file. The directory\n" . |
|
604 " is searched recursively. Images smaller than " . |
|
605 - "${min_image_width}x${min_image_height} are excluded.\n" . |
|
606 - "\n" . |
|
607 - " The directory may also be the URL of an RSS/Atom feed. Enclosed\n" . |
|
608 - " images will be downloaded cached locally.\n" . |
|
609 - "\n"; |
|
610 + "${min_image_width}x${min_image_height} are excluded.\n"; |
|
611 exit 1; |
|
612 } |
|
613 |
|
614 -sub main() { |
|
615 +sub main { |
|
616 my $dir = undef; |
|
617 |
|
618 while ($_ = $ARGV[0]) { |
|
619 @@ -868,18 +537,11 @@ sub main() { |
|
620 |
|
621 usage unless (defined($dir)); |
|
622 |
|
623 - $dir =~ s@^feed:@http:@si; |
|
624 - |
|
625 - if ($dir =~ m/^https?:/si) { |
|
626 - # ok |
|
627 - } else { |
|
628 - $dir =~ s@^~/@$ENV{HOME}/@s; # allow literal "~/" |
|
629 - $dir =~ s@/+$@@s; # omit trailing / |
|
630 + $dir =~ s@^~/@$ENV{HOME}/@s; # allow literal "~/" |
|
631 |
|
632 - if (! -d $dir) { |
|
633 - print STDERR "$progname: $dir: not a directory or URL\n"; |
|
634 - usage; |
|
635 - } |
|
636 + if (! -d $dir) { |
|
637 + print STDERR "$progname: $dir: not a directory\n"; |
|
638 + usage; |
|
639 } |
|
640 |
|
641 my $file = find_random_file ($dir); |
|
642 |
|