progs/crawler2.scala
changeset 112 95ee5cc5c05d
parent 101 4758a6155878
child 116 010ae7288327
equal deleted inserted replaced
111:1933e88cb73e 112:95ee5cc5c05d
     3 
     3 
     4 import io.Source
     4 import io.Source
     5 import scala.util.matching.Regex
     5 import scala.util.matching.Regex
     6 import scala.util._
     6 import scala.util._
     7 
     7 
     8 // gets the first ~10K of a web-page
     8 // gets the first 10K of a web-page
     9 def get_page(url: String) : String = {
     9 def get_page(url: String) : String = {
    10   Try(Source.fromURL(url).take(10000).mkString) getOrElse 
    10   Try(Source.fromURL(url).take(10000).mkString) getOrElse 
    11     { println(s"  Problem with: $url"); ""}
    11     { println(s"  Problem with: $url"); ""}
    12 }
    12 }
    13 
    13 
    14 // staring URL for the crawler
    14 // regexes for URLs and "my" domain
    15 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/"""
       
    16 
       
    17 // regex for URLs
       
    18 val http_pattern = """\"https?://[^\"]*\"""".r
    15 val http_pattern = """\"https?://[^\"]*\"""".r
    19 val my_urls = """urbanc""".r
    16 val my_urls = """urbanc""".r
    20 
    17 
    21 def unquote(s: String) = s.drop(1).dropRight(1)
    18 def unquote(s: String) = s.drop(1).dropRight(1)
    22 
    19 
    23 def get_all_URLs(page: String) : Set[String] = {
    20 def get_all_URLs(page: String) : Set[String] = {
    24   http_pattern.findAllIn(page).map(unquote).toSet
    21   http_pattern.findAllIn(page).map(unquote).toSet
    25 }
    22 }
    26 
    23 
    27 // naive version - seraches until a given depth
       
    28 // visits pages potentially more than once
       
    29 def crawl(url: String, n: Int) : Unit = {
    24 def crawl(url: String, n: Int) : Unit = {
    30   if (n == 0) ()
    25   if (n == 0) ()
    31   else if (my_urls.findFirstIn(url) == None) ()
    26   else if (my_urls.findFirstIn(url) == None) ()
    32   else {
    27   else {
    33     println(s"Visiting: $n $url")
    28     println(s"Visiting: $n $url")
    34     for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1)
    29     for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1)
    35   }
    30   }
    36 }
    31 }
    37 
    32 
    38 // can now deal with depth 3
    33 // staring URL for the crawler
    39 // start on command line
    34 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/"""
       
    35 
       
    36 // can now deal with depth 3 and beyond
    40 crawl(startURL, 4)
    37 crawl(startURL, 4)
    41 
    38 
    42 crawl("""http://www.inf.kcl.ac.uk/staff/urbanc/bsc-projects-13.html""", 2)
    39