11 { println(s" Problem with: $url"); ""} |
11 { println(s" Problem with: $url"); ""} |
12 } |
12 } |
13 |
13 |
14 // regexes for URLs and "my" domain |
14 // regexes for URLs and "my" domain |
15 val http_pattern = """"https?://[^"]*"""".r |
15 val http_pattern = """"https?://[^"]*"""".r |
16 val my_urls = """urbanc""".r (*@\label{myurlline}@*) |
16 val my_urls = """urbanc""".r (*@\label{myurlline}@*) |
17 |
17 |
18 def unquote(s: String) = s.drop(1).dropRight(1) |
18 def unquote(s: String) = s.drop(1).dropRight(1) |
19 |
19 |
20 def get_all_URLs(page: String) : Set[String] = |
20 def get_all_URLs(page: String) : Set[String] = |
21 http_pattern.findAllIn(page).map(unquote).toSet |
21 http_pattern.findAllIn(page).map(unquote).toSet |
22 |
22 |
23 def crawl(url: String, n: Int) : Unit = { |
23 def crawl(url: String, n: Int) : Unit = { |
24 if (n == 0) () (*@\label{changestartline}@*) |
24 if (n == 0) () (*@\label{changestartline}@*) |
25 else if (my_urls.findFirstIn(url) == None) { |
25 else if (my_urls.findFirstIn(url) == None) { |
26 println(s"Visiting: $n $url") |
26 println(s"Visiting: $n $url") |
27 get_page(url); () |
27 get_page(url); () |
28 } (*@\label{changeendline}@*) |
28 } (*@\label{changeendline}@*) |
29 else { |
29 else { |
30 println(s"Visiting: $n $url") |
30 println(s"Visiting: $n $url") |
31 for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1) |
31 for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1) |
32 } |
32 } |
33 } |
33 } |