updated
authorChristian Urban <christian dot urban at kcl dot ac dot uk>
Tue, 20 Sep 2016 12:17:01 +0100
changeset 421 7a04f2c532c1
parent 420 25bc57b32efa
child 423 e3acf2bf3895
updated
progs/crawler1.scala
progs/crawler2.scala
--- a/progs/crawler1.scala	Tue Sep 20 12:13:11 2016 +0100
+++ b/progs/crawler1.scala	Tue Sep 20 12:17:01 2016 +0100
@@ -32,8 +32,8 @@
 }
 
 // some starting URLs for the crawler
-//val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc"""
-val startURL = """http://www.inf.kcl.ac.uk/staff/mcburney"""
+val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc"""
+//val startURL = """http://www.inf.kcl.ac.uk/staff/mcburney"""
 
 crawl(startURL, 2)
 
--- a/progs/crawler2.scala	Tue Sep 20 12:13:11 2016 +0100
+++ b/progs/crawler2.scala	Tue Sep 20 12:17:01 2016 +0100
@@ -13,7 +13,7 @@
 
 // regexes for URLs and "my" domain
 val http_pattern = """"https?://[^"]*"""".r
-val my_urls = """urbanc""".r (*@\label{myurlline}@*) 
+val my_urls = """urbanc""".r       (*@\label{myurlline}@*) 
 
 def unquote(s: String) = s.drop(1).dropRight(1)
 
@@ -21,11 +21,11 @@
   http_pattern.findAllIn(page).map(unquote).toSet
 
 def crawl(url: String, n: Int) : Unit = {
-  if (n == 0) () (*@\label{changestartline}@*) 
+  if (n == 0) ()                   (*@\label{changestartline}@*) 
   else if (my_urls.findFirstIn(url) == None) { 
     println(s"Visiting: $n $url")
     get_page(url); () 
-  } (*@\label{changeendline}@*) 
+  }                                (*@\label{changeendline}@*) 
   else {
     println(s"Visiting: $n $url")
     for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1)