equal
deleted
inserted
replaced
1 // This version of the crawler that also |
|
2 // "harvests" email addresses from webpages |
|
3 |
|
4 import io.Source |
|
5 import scala.util.matching.Regex |
|
6 import scala.util._ |
|
7 |
|
8 def get_page(url: String) : String = { |
|
9 Try(Source.fromURL(url)("ISO-8859-1").take(10000).mkString). |
|
10 getOrElse { println(s" Problem with: $url"); ""} |
|
11 } |
|
12 |
|
13 // regexes for URLs, for "my" domain and for email addresses |
|
14 val http_pattern = """"https?://[^"]*"""".r |
|
15 val email_pattern = """([a-z0-9_\.-]+)@([\da-z\.-]+)\.([a-z\.]{2,6})""".r /*@\label{emailline}@*/ |
|
16 |
|
17 def unquote(s: String) = s.drop(1).dropRight(1) |
|
18 |
|
19 def get_all_URLs(page: String) : Set[String] = |
|
20 http_pattern.findAllIn(page).map(unquote).toSet |
|
21 |
|
22 def print_str(s: String) = |
|
23 if (s == "") () else println(s) |
|
24 |
|
25 def crawl(url: String, n: Int) : Unit = { |
|
26 if (n == 0) () |
|
27 else { |
|
28 println(s" Visiting: $n $url") |
|
29 val page = get_page(url) |
|
30 print_str(email_pattern.findAllIn(page).mkString("\n")) /*@\label{mainline}@*/ |
|
31 for (u <- get_all_URLs(page).par) crawl(u, n - 1) |
|
32 } |
|
33 } |
|
34 |
|
35 // staring URL for the crawler |
|
36 val startURL = """https://nms.kcl.ac.uk/christian.urban/""" |
|
37 |
|
38 |
|
39 crawl(startURL, 3) |
|