wsheets/wsh02.scala
changeset 447 f51e593903ac
parent 444 7a0735db4788
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/wsheets/wsh02.scala	Mon Nov 21 15:57:45 2022 +0000
@@ -0,0 +1,77 @@
+
+// Task 1 
+
+List(7,2,3,4,5,6).find(_ < 4)  // => Some(3)
+List(5,6,7,8,9).find(_ < 4)    // => None
+List(5,6,7,8,9).min            // => 5
+List(5,6,7,8,9).minOption      // => Some(5)
+List[Int]().minOption          // => None 
+
+
+// Task 2
+
+Try(Some(List(5,6,7,8,9).min)).getOrElse(None)
+Try(Some(List[Int]().min)).getOrElse(None)
+
+// Task 3
+import scala.util._
+import io.Source
+
+val my_url = "https://nms.kcl.ac.uk/christian.urban/"
+
+// fails if there is no file with that name
+Source.fromFile("test.txt")("ISO-8859-1").mkString
+Source.fromFile("test.txt")("ISO-8859-1").getLines().toList
+
+// encapsulates the failure case as None
+Try(Some(Source.fromFile("test.txt")("ISO-8859-1").mkString)).getOrElse(None)
+Try(Source.fromFile("test.txt")("ISO-8859-1").mkString).toOption // same but shorter
+
+// for files with propper closing of the file after reading
+Using(Source.fromFile("test.txt")("ISO-8859-1"))(_.mkString).toOption
+
+// Task 4 (Higher-Order Functions)
+
+List(7,2,3,4,5,6).find(_ < 4)
+List(7,2,3,4,5,6).count(_ % 2 == 0)
+List(7,2,3,4,5,6).sortWith(_ > _)
+List(7,2,3,4,5,6).filter(_ > 4)
+
+// Task 5 (Maps)
+
+List(7,2,3,4,5,6).map(n => n * n)
+
+for (n <- List(7,2,3,4,5,6)) yield n * n
+
+// The advantages of for-comprehensions is that they
+// can be nested and also can contain guards. In such
+// cases the translations to maps and filters is a bit
+// involved. 
+
+// Task 6 (Pattern-Matching)
+
+def my_map(lst: List[Int], f: Int => Int) : List[Int] = {
+ if (lst == Nil) Nil
+ else f(lst.head) :: my_map(lst.tail, f)
+}
+
+def my_map(lst: List[Int], f: Int => Int) : List[Int] = lst macth {
+ case Nil => Nil
+ case x::xs => f(x) :: my_map(xs, f)
+}
+
+// Task 7 (Web-Crawler, hard)
+
+// see lecture2.scala
+
+// requires an accumulator that records all pages that have 
+// already been visited, for example
+
+def crawl(url: String, n: Int, acc : Set[String] = Set()) : Unit = {
+  if (n == 0) ()
+  else {
+    println(s"  Visiting: $n $url")
+    val urls = get_all_URLs(get_page(url))
+    for (u <- urls) crawl(u, n - 1, acc | urls)
+  }
+}