progs/crawler2.scala
changeset 242 35104ee14f87
parent 116 010ae7288327
child 254 dcd4688690ce
equal deleted inserted replaced
241:10f02605a46a 242:35104ee14f87
    10   Try(Source.fromURL(url).take(10000).mkString) getOrElse 
    10   Try(Source.fromURL(url).take(10000).mkString) getOrElse 
    11     { println(s"  Problem with: $url"); ""}
    11     { println(s"  Problem with: $url"); ""}
    12 }
    12 }
    13 
    13 
    14 // regexes for URLs and "my" domain
    14 // regexes for URLs and "my" domain
    15 val http_pattern = """\"https?://[^\"]*\"""".r
    15 val http_pattern = """"https?://[^"]*"""".r
    16 val my_urls = """urbanc""".r
    16 val my_urls = """urbanc""".r
    17 
    17 
    18 def unquote(s: String) = s.drop(1).dropRight(1)
    18 def unquote(s: String) = s.drop(1).dropRight(1)
    19 
    19 
    20 def get_all_URLs(page: String) : Set[String] = {
    20 def get_all_URLs(page: String) : Set[String] = {
    31     println(s"Visiting: $n $url")
    31     println(s"Visiting: $n $url")
    32     for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1)
    32     for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1)
    33   }
    33   }
    34 }
    34 }
    35 
    35 
    36 // staring URL for the crawler
    36 // starting URL for the crawler
    37 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/"""
    37 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc"""
    38 
    38 
    39 // can now deal with depth 3 and beyond
    39 // can now deal with depth 3 and beyond
    40 crawl(startURL, 3)
    40 crawl(startURL, 3)
    41 
    41 
    42 
    42