docs
[mirrors/Programs.git] / php / skripty / wmvbot.php
1 <?php
2 //Jyxo seek
3 //<-Harvie (2oo7)
4
5 function url_bot($url) {
6 $file = file($url);
7 $file = implode("\n", $file);
8 $file = spliti("http://", $file);
9 //print_r($file);
10 $urls[-1] = "";
11 $i = 0;
12 foreach($file as $url) {
13 $url = split(">| ", $url);
14 //print_r($url);
15 $url = $url[0];
16 if(eregi(".com", $url)) {
17 $urls[$i] = "http://".$url;
18 $i++;
19 }
20 }
21 //print_r($urls);
22 return($urls);
23 }
24
25 function wmv_bot($url, $ext) {
26 foreach(url_bot($url) as $url) {
27 $file = file($url);
28 $file[-1] = "";
29 $file = implode("\n", $file);
30 $file = spliti("http://", $file);
31 //print_r($file);
32 $urlz[-1] = "";
33 $i = 0;
34 foreach($file as $url) {
35 $url = explode(">", $url);
36 $url = $url[0];
37 if(eregi(".com", $url) && eregi($ext, $url) && !eregi(" |\"", $url)) {
38 $urlz[$i] = "http://".$url;
39 $i++;
40 }
41 }
42 print_r($urlz);
43 }
44 return($urlz);
45 }
46
47 //
48 function download_url_array($results, $download_dir) {
49 //$download_dir = $GLOBALS["download_dir"];
50 $fails = 0; $done = 0;
51 echo("\nDownloading to $download_dir ...\n");
52 if(!is_dir($download_dir)) {
53 echo("Creating directory: $download_dir\n\n");
54 mkdir($download_dir);
55 } else { echo("\n"); }
56 foreach($results as $id => $url) {
57 $file = url2filename($url);
58 echo("Downloading $file (#$id)... ");
59 $file = "$download_dir\\$file";
60 if(!is_file($file) && @copy($url, $file)) {
61 echo("Done.\n");
62 $done++;
63 } else {
64 if(is_file($file)) { echo("File already exists "); }
65 echo("Failed!\n");
66 $fails++;
67 }
68 }
69 $totaldls = ($fails+$done);
70 echo("\n$done/$totaldls files successfully downloaded to $download_dir ($fails failed)\n");
71 }
72
73 ///CODE//////////////////////////////////////////////////////////////////////////////////////
74 $results = wmv_bot("http://www.loliti.com/cz/m/bonde19e_s_vodou_0.php", ".wmv");
75 print_r($results);
76 //download_url_array($results, "X:\\wmvBot"); //Automatic Download
77
78 ?>
This page took 0.581203 seconds and 4 git commands to generate.