author | Christian Urban <christian dot urban at kcl dot ac dot uk> |
Tue, 24 Sep 2013 01:12:36 +0100 | |
changeset 95 | dbe49327b6c5 |
parent 93 | 4794759139ea |
child 96 | 9fcd3de53c06 |
permissions | -rw-r--r-- |
7 | 1 |
import io.Source |
2 |
import scala.util.matching.Regex |
|
3 |
||
4 |
// gets the first ~10K of a page |
|
5 |
def get_page(url: String) : String = { |
|
6 |
try { |
|
7 |
Source.fromURL(url).take(10000).mkString |
|
8 |
} |
|
9 |
catch { |
|
95
dbe49327b6c5
added new stuff
Christian Urban <christian dot urban at kcl dot ac dot uk>
parents:
93
diff
changeset
|
10 |
case _ : Throwable => { |
dbe49327b6c5
added new stuff
Christian Urban <christian dot urban at kcl dot ac dot uk>
parents:
93
diff
changeset
|
11 |
println(s" Problem with: $url") |
7 | 12 |
"" |
13 |
} |
|
14 |
} |
|
15 |
} |
|
16 |
||
17 |
// staring URL for the crawler |
|
18 |
val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/""" |
|
19 |
||
20 |
// regex for URLs |
|
21 |
val http_pattern = """\"https?://[^\"]*\"""".r |
|
22 |
val my_urls = """urbanc""".r |
|
23 |
val email_pattern = """([a-z0-9_\.-]+)@([\da-z\.-]+)\.([a-z\.]{2,6})""".r |
|
24 |
||
25 |
// http://net.tutsplus.com/tutorials/other/8-regular-expressions-you-should-know/ |
|
26 |
||
27 |
def unquote(s: String) = s.drop(1).dropRight(1) |
|
28 |
||
29 |
def get_all_URLs(page: String) : Set[String] = { |
|
30 |
(http_pattern.findAllIn(page)).map { unquote(_) }.toSet |
|
31 |
} |
|
32 |
||
33 |
// naive version - seraches until a given depth |
|
34 |
// visits pages potentially more than once |
|
35 |
def crawl(url: String, n: Int) : Unit = { |
|
36 |
if (n == 0) () |
|
37 |
//else if (my_urls.findFirstIn(url) == None) () |
|
38 |
else { |
|
95
dbe49327b6c5
added new stuff
Christian Urban <christian dot urban at kcl dot ac dot uk>
parents:
93
diff
changeset
|
39 |
println(s"Visiting: $n $url") |
7 | 40 |
val page = get_page(url) |
41 |
println(email_pattern.findAllIn(page).mkString("\n")) |
|
42 |
for (u <- get_all_URLs(page)) crawl(u, n - 1) |
|
43 |
} |
|
44 |
} |
|
45 |
||
46 |
// can now deal with depth 3 |
|
47 |
// start on command line |
|
48 |
crawl(startURL, 3) |
|
49 |