equal
deleted
inserted
replaced
|
1 import io.Source |
|
2 import scala.util.matching.Regex |
|
3 |
|
4 // gets the first ~10K of a page |
|
5 def get_page(url: String) : String = { |
|
6 try { |
|
7 Source.fromURL(url).take(10000).mkString |
|
8 } |
|
9 catch { |
|
10 case e => { |
|
11 println(" Problem with: " + url) |
|
12 "" |
|
13 } |
|
14 } |
|
15 } |
|
16 |
|
17 // staring URL for the crawler |
|
18 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/""" |
|
19 |
|
20 // regex for URLs |
|
21 val http_pattern = """\"https?://[^\"]*\"""".r |
|
22 val my_urls = """urbanc""".r |
|
23 val email_pattern = """([a-z0-9_\.-]+)@([\da-z\.-]+)\.([a-z\.]{2,6})""".r |
|
24 |
|
25 // http://net.tutsplus.com/tutorials/other/8-regular-expressions-you-should-know/ |
|
26 |
|
27 def unquote(s: String) = s.drop(1).dropRight(1) |
|
28 |
|
29 def get_all_URLs(page: String) : Set[String] = { |
|
30 (http_pattern.findAllIn(page)).map { unquote(_) }.toSet |
|
31 } |
|
32 |
|
33 // naive version - seraches until a given depth |
|
34 // visits pages potentially more than once |
|
35 def crawl(url: String, n: Int) : Unit = { |
|
36 if (n == 0) () |
|
37 //else if (my_urls.findFirstIn(url) == None) () |
|
38 else { |
|
39 println("Visiting: " + n + " " + url) |
|
40 val page = get_page(url) |
|
41 println(email_pattern.findAllIn(page).mkString("\n")) |
|
42 for (u <- get_all_URLs(page)) crawl(u, n - 1) |
|
43 } |
|
44 } |
|
45 |
|
46 // can now deal with depth 3 |
|
47 // start on command line |
|
48 crawl(startURL, 3) |
|
49 |