10 Try(Source.fromURL(url).take(10000).mkString) getOrElse |
10 Try(Source.fromURL(url).take(10000).mkString) getOrElse |
11 { println(s" Problem with: $url"); ""} |
11 { println(s" Problem with: $url"); ""} |
12 } |
12 } |
13 |
13 |
14 // regex for URLs |
14 // regex for URLs |
15 val http_pattern = """\"https?://[^\"]*\"""".r |
15 val http_pattern = """"https?://[^"]*"""".r |
16 |
16 |
17 // drops the first and last character from a string |
17 // drops the first and last character from a string |
18 def unquote(s: String) = s.drop(1).dropRight(1) |
18 def unquote(s: String) = s.drop(1).dropRight(1) |
19 |
19 |
20 def get_all_URLs(page: String) : Set[String] = { |
20 def get_all_URLs(page: String) : Set[String] = { |
21 http_pattern.findAllIn(page).map(unquote).toSet |
21 http_pattern.findAllIn(page).map(unquote).toSet |
22 } |
22 } |
23 |
23 |
24 // naive version - seraches until a given depth |
24 // naive version of crawl - searches until a given depth, |
25 // visits pages potentially more than once |
25 // visits pages potentially more than once |
26 def crawl(url: String, n: Int) : Unit = { |
26 def crawl(url: String, n: Int) : Unit = { |
27 if (n == 0) () |
27 if (n == 0) () |
28 else { |
28 else { |
29 println(s"Visiting: $n $url") |
29 println(s"Visiting: $n $url") |
30 for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1) |
30 for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1) |
31 } |
31 } |
32 } |
32 } |
33 |
33 |
34 // staring URL for the crawler |
34 // some starting URLs for the crawler |
35 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/""" |
35 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc""" |
36 //val startURL = """http://www.inf.kcl.ac.uk/staff/mml/""" |
36 //val startURL = """http://www.inf.kcl.ac.uk/staff/mcburney""" |
37 |
37 |
38 crawl(startURL, 2) |
38 crawl(startURL, 2) |
39 |
39 |