// A crawler which checks whether there are // dead links in web-pagesimport io.Sourceimport scala.util.matching.Regeximport scala.util._// gets the first 10K of a web-pagedef get_page(url: String) : String = { Try(Source.fromURL(url)("ISO-8859-1").take(10000).mkString). getOrElse { println(s" Problem with: $url"); ""}}// e.g. get_page("https://nms.kcl.ac.uk/christiana.urban/")// regex for URLsval http_pattern = """"https?://[^"]*"""".r /*@\label{httpline}@*/ // drops the first and last characters from a stringdef unquote(s: String) = s.drop(1).dropRight(1)def get_all_URLs(page: String) : Set[String] = http_pattern.findAllIn(page).map(unquote).toSet /*@\label{findallline}@*/// a very naive version of crawl - searches until a given // depth, visits pages potentially more than oncedef crawl(url: String, n: Int) : Unit = { if (n == 0) () else { println(s"Visiting: $n $url") for (u <- get_all_URLs(get_page(url))) crawl(u, n - 1) }}// some starting URLs for the crawlerval startURL = """https://nms.kcl.ac.uk/christian.urban/"""//val startURL = """https://nms.kcl.ac.uk/luc.moreau/"""crawl(startURL, 2)