100
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
1 |
// This version of the crawler only
|
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
2 |
// checks links in the "domain" urbanc
|
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
3 |
|
1
|
4 |
import io.Source
|
|
5 |
import scala.util.matching.Regex
|
96
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
6 |
import scala.util._
|
1
|
7 |
|
112
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
8 |
// gets the first 10K of a web-page
|
100
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
9 |
def get_page(url: String) : String = {
|
428
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
10 |
Try(Source.fromURL(url)("ISO-8859-1").take(10000).mkString).
|
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
11 |
getOrElse { println(s" Problem with: $url"); ""}
|
100
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
12 |
}
|
1
|
13 |
|
112
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
14 |
// regexes for URLs and "my" domain
|
242
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
15 |
val http_pattern = """"https?://[^"]*"""".r
|
427
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
16 |
val my_urls = """urbanc""".r /*@\label{myurlline}@*/
|
1
|
17 |
|
|
18 |
def unquote(s: String) = s.drop(1).dropRight(1)
|
|
19 |
|
254
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
20 |
def get_all_URLs(page: String) : Set[String] =
|
96
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
21 |
http_pattern.findAllIn(page).map(unquote).toSet
|
1
|
22 |
|
|
23 |
def crawl(url: String, n: Int) : Unit = {
|
427
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
24 |
if (n == 0) () /*@\label{changestartline}@*/
|
116
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
25 |
else if (my_urls.findFirstIn(url) == None) {
|
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
26 |
println(s"Visiting: $n $url")
|
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
27 |
get_page(url); ()
|
427
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
28 |
} /*@\label{changeendline}@*/
|
1
|
29 |
else {
|
95
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
30 |
println(s"Visiting: $n $url")
|
1
|
31 |
for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1)
|
|
32 |
}
|
|
33 |
}
|
|
34 |
|
242
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
35 |
// starting URL for the crawler
|
415
|
36 |
val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc"""
|
432
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
37 |
val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/bsc-projects-16.html"""
|
112
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
38 |
|
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
39 |
// can now deal with depth 3 and beyond
|
303
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
40 |
crawl(startURL, 2)
|
1
|
41 |
|
112
Christian Urban <christian dot urban at kcl dot ac dot uk>
diff
changeset
|
42 |
|