equal
deleted
inserted
replaced
|
1 // A crawler which checks whether there |
|
2 // are problems with links in web-pages |
|
3 |
1 import io.Source |
4 import io.Source |
2 import scala.util.matching.Regex |
5 import scala.util.matching.Regex |
3 import scala.util._ |
6 import scala.util._ |
4 |
7 |
5 // gets the first ~10K of a page |
8 // gets the first ~10K of a web-page |
6 def get_page(url: String) : String = { |
9 def get_page(url: String) : String = { |
7 Try(Source.fromURL(url).take(10000).mkString) getOrElse |
10 Try(Source.fromURL(url).take(10000).mkString) getOrElse |
8 { println(s" Problem with: $url"); ""} |
11 { println(s" Problem with: $url"); ""} |
9 } |
12 } |
10 |
13 |
11 // regex for URLs |
14 // regex for URLs |
12 val http_pattern = """\"https?://[^\"]*\"""".r |
15 val http_pattern = """\"https?://[^\"]*\"""".r |
13 |
16 |
|
17 // drops the first and last character from a string |
14 def unquote(s: String) = s.drop(1).dropRight(1) |
18 def unquote(s: String) = s.drop(1).dropRight(1) |
15 |
19 |
16 def get_all_URLs(page: String) : Set[String] = { |
20 def get_all_URLs(page: String) : Set[String] = { |
17 (http_pattern.findAllIn(page)).map { unquote(_) }.toSet |
21 (http_pattern.findAllIn(page)).map { unquote(_) }.toSet |
18 } |
22 } |
30 // staring URL for the crawler |
34 // staring URL for the crawler |
31 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/""" |
35 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/""" |
32 //val startURL = """http://www.inf.kcl.ac.uk/staff/mml/""" |
36 //val startURL = """http://www.inf.kcl.ac.uk/staff/mml/""" |
33 |
37 |
34 |
38 |
35 // call on the command line |
|
36 crawl(startURL, 2) |
39 crawl(startURL, 2) |
37 |
40 |
38 crawl("""http://www.inf.kcl.ac.uk/staff/urbanc/bsc-projects-13.html""", 2) |
|