# HG changeset patch # User Christian Urban # Date 1474370221 -3600 # Node ID 7a04f2c532c1c70d490aa43e5b767ab520f2303c # Parent 25bc57b32efa0eaf077a472e497b82636d91647c updated diff -r 25bc57b32efa -r 7a04f2c532c1 progs/crawler1.scala --- a/progs/crawler1.scala Tue Sep 20 12:13:11 2016 +0100 +++ b/progs/crawler1.scala Tue Sep 20 12:17:01 2016 +0100 @@ -32,8 +32,8 @@ } // some starting URLs for the crawler -//val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc""" -val startURL = """http://www.inf.kcl.ac.uk/staff/mcburney""" +val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc""" +//val startURL = """http://www.inf.kcl.ac.uk/staff/mcburney""" crawl(startURL, 2) diff -r 25bc57b32efa -r 7a04f2c532c1 progs/crawler2.scala --- a/progs/crawler2.scala Tue Sep 20 12:13:11 2016 +0100 +++ b/progs/crawler2.scala Tue Sep 20 12:17:01 2016 +0100 @@ -13,7 +13,7 @@ // regexes for URLs and "my" domain val http_pattern = """"https?://[^"]*"""".r -val my_urls = """urbanc""".r (*@\label{myurlline}@*) +val my_urls = """urbanc""".r (*@\label{myurlline}@*) def unquote(s: String) = s.drop(1).dropRight(1) @@ -21,11 +21,11 @@ http_pattern.findAllIn(page).map(unquote).toSet def crawl(url: String, n: Int) : Unit = { - if (n == 0) () (*@\label{changestartline}@*) + if (n == 0) () (*@\label{changestartline}@*) else if (my_urls.findFirstIn(url) == None) { println(s"Visiting: $n $url") get_page(url); () - } (*@\label{changeendline}@*) + } (*@\label{changeendline}@*) else { println(s"Visiting: $n $url") for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1)