progs/crawler1.scala
changeset 242 35104ee14f87
parent 112 95ee5cc5c05d
child 254 dcd4688690ce
equal deleted inserted replaced
241:10f02605a46a 242:35104ee14f87
     1 // A crawler which checks whether there
     1 // A crawler which checks whether there are 
     2 // are problems with links in web-pages
     2 // dead links in web-pages
     3 
     3 
     4 import io.Source
     4 import io.Source
     5 import scala.util.matching.Regex
     5 import scala.util.matching.Regex
     6 import scala.util._
     6 import scala.util._
     7 
     7 
    10   Try(Source.fromURL(url).take(10000).mkString) getOrElse 
    10   Try(Source.fromURL(url).take(10000).mkString) getOrElse 
    11     { println(s"  Problem with: $url"); ""}
    11     { println(s"  Problem with: $url"); ""}
    12 }
    12 }
    13 
    13 
    14 // regex for URLs
    14 // regex for URLs
    15 val http_pattern = """\"https?://[^\"]*\"""".r
    15 val http_pattern = """"https?://[^"]*"""".r
    16 
    16 
    17 // drops the first and last character from a string
    17 // drops the first and last character from a string
    18 def unquote(s: String) = s.drop(1).dropRight(1)
    18 def unquote(s: String) = s.drop(1).dropRight(1)
    19 
    19 
    20 def get_all_URLs(page: String) : Set[String] = {
    20 def get_all_URLs(page: String) : Set[String] = {
    21   http_pattern.findAllIn(page).map(unquote).toSet
    21   http_pattern.findAllIn(page).map(unquote).toSet
    22 }
    22 }
    23 
    23 
    24 // naive version - seraches until a given depth
    24 // naive version of crawl - searches until a given depth,
    25 // visits pages potentially more than once
    25 // visits pages potentially more than once
    26 def crawl(url: String, n: Int) : Unit = {
    26 def crawl(url: String, n: Int) : Unit = {
    27   if (n == 0) ()
    27   if (n == 0) ()
    28   else {
    28   else {
    29     println(s"Visiting: $n $url")
    29     println(s"Visiting: $n $url")
    30     for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1)
    30     for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1)
    31   }
    31   }
    32 }
    32 }
    33 
    33 
    34 // staring URL for the crawler
    34 // some starting URLs for the crawler
    35 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/"""
    35 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc"""
    36 //val startURL = """http://www.inf.kcl.ac.uk/staff/mml/"""
    36 //val startURL = """http://www.inf.kcl.ac.uk/staff/mcburney"""
    37 
    37 
    38 crawl(startURL, 2)
    38 crawl(startURL, 2)
    39 
    39