progs/crawler2.scala
changeset 550 71fc4a7a7039
parent 432 55be90b2a642
equal deleted inserted replaced
549:352d15782d35 550:71fc4a7a7039
    11     getOrElse { println(s"  Problem with: $url"); ""}
    11     getOrElse { println(s"  Problem with: $url"); ""}
    12 }
    12 }
    13 
    13 
    14 // regexes for URLs and "my" domain
    14 // regexes for URLs and "my" domain
    15 val http_pattern = """"https?://[^"]*"""".r
    15 val http_pattern = """"https?://[^"]*"""".r
    16 val my_urls = """urbanc""".r       /*@\label{myurlline}@*/
    16 val my_urls = """urban""".r       /*@\label{myurlline}@*/
       
    17 //val my_urls = """kcl.ac.uk""".r 
    17 
    18 
    18 def unquote(s: String) = s.drop(1).dropRight(1)
    19 def unquote(s: String) = s.drop(1).dropRight(1)
    19 
    20 
    20 def get_all_URLs(page: String) : Set[String] = 
    21 def get_all_URLs(page: String) : Set[String] = 
    21   http_pattern.findAllIn(page).map(unquote).toSet
    22   http_pattern.findAllIn(page).map(unquote).toSet
    26     println(s"Visiting: $n $url")
    27     println(s"Visiting: $n $url")
    27     get_page(url); () 
    28     get_page(url); () 
    28   }                                /*@\label{changeendline}@*/
    29   }                                /*@\label{changeendline}@*/
    29   else {
    30   else {
    30     println(s"Visiting: $n $url")
    31     println(s"Visiting: $n $url")
    31     for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1)
    32     for (u <- get_all_URLs(get_page(url)).par) crawl(u, n - 1)
    32   }
    33   }
    33 }
    34 }
    34 
    35 
    35 // starting URL for the crawler
    36 // starting URL for the crawler
    36 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc"""
    37 val startURL = """https://nms.kcl.ac.uk/christian.urban/"""
    37 val startURL = """http://www.inf.kcl.ac.uk/staff/urbanc/bsc-projects-16.html"""
    38 //val startURL = """https://nms.kcl.ac.uk/christian.urban/bsc-projects-17.html"""
       
    39 
    38 
    40 
    39 // can now deal with depth 3 and beyond
    41 // can now deal with depth 3 and beyond
    40 crawl(startURL, 2)
    42 crawl(startURL, 3)
    41 
    43 
    42 
    44