author | Christian Urban <christian dot urban at kcl dot ac dot uk> |
Tue, 24 Sep 2013 01:56:53 +0100 | |
changeset 96 | 9fcd3de53c06 |
parent 95 | dbe49327b6c5 |
child 100 | cbc2270c2938 |
permissions | -rw-r--r-- |
1 | 1 |
import io.Source |
2 |
import scala.util.matching.Regex |
|
96
9fcd3de53c06
updated
Christian Urban <christian dot urban at kcl dot ac dot uk>
parents:
95
diff
changeset
|
3 |
import scala.util._ |
1 | 4 |
|
5 |
// gets the first ~10K of a page |
|
96
9fcd3de53c06
updated
Christian Urban <christian dot urban at kcl dot ac dot uk>
parents:
95
diff
changeset
|
6 |
def get_page(url: String) : String = |
9fcd3de53c06
updated
Christian Urban <christian dot urban at kcl dot ac dot uk>
parents:
95
diff
changeset
|
7 |
Try(Source.fromURL(url).take(10000).mkString) getOrElse |
9fcd3de53c06
updated
Christian Urban <christian dot urban at kcl dot ac dot uk>
parents:
95
diff
changeset
|
8 |
{ println(s" Problem with: $url"); ""} |
1 | 9 |
|
10 |
// staring URL for the crawler |
|
11 |
val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/""" |
|
12 |
||
13 |
// regex for URLs |
|
14 |
val http_pattern = """\"https?://[^\"]*\"""".r |
|
15 |
val my_urls = """urbanc""".r |
|
16 |
||
17 |
def unquote(s: String) = s.drop(1).dropRight(1) |
|
18 |
||
19 |
def get_all_URLs(page: String) : Set[String] = { |
|
96
9fcd3de53c06
updated
Christian Urban <christian dot urban at kcl dot ac dot uk>
parents:
95
diff
changeset
|
20 |
http_pattern.findAllIn(page).map(unquote).toSet |
1 | 21 |
} |
22 |
||
23 |
// naive version - seraches until a given depth |
|
24 |
// visits pages potentially more than once |
|
25 |
def crawl(url: String, n: Int) : Unit = { |
|
26 |
if (n == 0) () |
|
27 |
else if (my_urls.findFirstIn(url) == None) () |
|
28 |
else { |
|
95
dbe49327b6c5
added new stuff
Christian Urban <christian dot urban at kcl dot ac dot uk>
parents:
93
diff
changeset
|
29 |
println(s"Visiting: $n $url") |
1 | 30 |
for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1) |
31 |
} |
|
32 |
} |
|
33 |
||
34 |
// can now deal with depth 3 |
|
3 | 35 |
// start on command line |
7 | 36 |
crawl(startURL, 4) |
1 | 37 |