Merged some nasty programs from softz.harvie.cz
[mirrors/Programs.git] / php / crawler / probably-more-old / old / crawler_5.3.phps
1 #!/usr/bin/php
2 <?php
3 //HSp33der Web Crawler 5.3
4 //<-Harvie 2oo7
5 /*
6 * Description:
7 * This script crawls the web and printing found URLs to STDOUT.
8 *
9 * Installation:
10 * - Debian:
11 * # apt-get install php5-cli
12 * $ chmod +x crawler.php
13 * $ ./crawler.php (or $ php crawler.php)
14 * - Windows:
15 * c:/>path/to/php5/php.exe crawler.php
16 * (you can add php.exe to PATH or associate .php files with it)
17 *
18 * Notes:
19 * At this moment, there are few bugs in PHP5,
20 * so this crawler may freeze after few minutes.
21 * Check this for more info: http://bugs.php.net/bug.php?id=43098
22 */
23
24 ///SETTINGS
25 //Basic
26 $seed = 'http://www.secunia.com/'; //Start crawling from this page
27 $file = 'urls.txt'; //File to save crawled URLs (use sort to make URLs 100% exclusive)
28 $delete_url_db = true; //Delete file with saved URLs before crawling
29 //Advanced
30 $max_size = 10000; //How many bytes download from each page?
31 $max_urls = 30; //Size of URL buffer (prebuffered URLs to crawl, if full, no more URLs will be added to queue)
32 $buffer_increase = false; //Increase buffer at buffer underrun?
33 $random_url = false; //Select random urls from page? (This may override URL filter)
34 $history_max = 1000; //How many last URLs keep in history? (historied URLs will not be crawled again...)
35 //Misc.
36 ini_set('default_socket_timeout',1); //How long you want wait for a webserver? (seconds)
37 set_time_limit(0); //How long run? (seconds; 0==infinite)
38 $debug = false; //Use debuging mode? (Print errors and statistics)
39
40 //Filter
41 $eregi_url_blacklist = array(
42 '(W3\.org|W3C)', //Hell knows...
43 '(shop|xxx|porn|lesbian|hot)', //Commercial sites
44 '(google|209.85.135.104|amazon.com|youtube.com)', //Big sites
45 '(seznam.cz|centrum.cz|atlas.cz|zoznam.sk|quick.cz)', //Big local sites
46 '.\.(css|ico|gif|jpg|png|bmp|cgi|js|vbs)', //Misc. webpage content
47 '.\.(avi|mpg|mpeg|mov|wmv|wm|mp3|ogg|wma)', //Multimedia files
48 '.\.(pdf|swf|flv|cfm)', //Other text files
49 '.\.(exe|zip|rar|gz|bz|bz2|tar)' //Big and binary files
50 );
51
52 function check_crawl_url($url) { //Use this function to determine if url is wanted
53 foreach($GLOBALS['eregi_url_blacklist'] as $black_url) {
54 if(eregi($black_url, $url)) return(0);
55 }
56 if(in_array($url, $GLOBALS['urls'])) return(0);
57 $file_url = explode('?', $url, 2); $file_url = $file_url[0];
58 if(in_array($file_url, $GLOBALS['history'])) return(0);
59 return(1);
60 }
61
62 ///CODE
63 if($delete_url_db) @unlink($file);
64 if($debug) { $u = 0; $c = 0; $t = time(); }
65 $history[0] = '';
66 $fp = fopen($file, 'a+');
67 while(true) {
68 if(sizeof($urls) <= 0) {
69 $urls='';
70 $urls[0]=$seed;
71 if($buffer_increase) $max_urls++; //Increase size of buffer by one
72 if($debug) echo("!Buffer underrun! !Buffer size is: $max_urls!\n");
73 }
74
75 $url = array_shift($urls);
76 if($debug) echo("-Parsing: $url\n");
77 preg_match_all('(http:\/\/[_a-zA-Z0-9\.\-]+\.[a-zA-Z]{2,4}\/{1}[-_~&=\ ?\.a-z0-9\/]*)',htmlspecialchars_decode(@file_get_contents($url, false, null, 0, $max_size)), $new_urls);
78 $new_urls = $new_urls[0];
79
80 foreach($new_urls as $new_url) {
81 if($debug) $c++;///Debug
82 if(check_crawl_url($new_url)) {
83 echo($new_url."\n");
84 fwrite($fp, $new_url."\n");
85 $file_url = explode('?', $new_url, 2); $file_url = $file_url[0];
86 array_push($history, $file_url);
87 while(sizeof($history) > $history_max) @array_shift($history);
88 if($random_url) { if(sizeof($urls) < $max_urls) array_push($urls, $new_urls[rand(0,sizeof($new_urls)-1)]); }
89 else { if(sizeof($urls) < $max_urls) array_push($urls, $new_url); }
90 if($debug) { ///Debug Block
91 $u++;
92 $uspeed = round($u/(time()-$t+1), 2);
93 $cspeed = round($c/(time()-$t+1), 2);
94 $time = round((time()-$t)/60, 1);
95 $buffered = sizeof($urls);
96 $historied = sizeof($history);
97 echo("+$u URLs; $c Downloaded; UpTime: $time mins; Buffered: $buffered URLs; History: $historied URLs; Speed: $uspeed URLs/s, $cspeed Downloads/s\n");
98 }
99 }
100 }
101 $new_urls = ''; //Memory cleanup
102 }
103
104 ##EOF
This page took 0.346863 seconds and 4 git commands to generate.