|
1 |
|
2 // Task 1 |
|
3 |
|
4 List(7,2,3,4,5,6).find(_ < 4) // => Some(3) |
|
5 List(5,6,7,8,9).find(_ < 4) // => None |
|
6 List(5,6,7,8,9).min // => 5 |
|
7 List(5,6,7,8,9).minOption // => Some(5) |
|
8 List[Int]().minOption // => None |
|
9 |
|
10 |
|
11 // Task 2 |
|
12 |
|
13 Try(Some(List(5,6,7,8,9).min)).getOrElse(None) |
|
14 Try(Some(List[Int]().min)).getOrElse(None) |
|
15 |
|
16 // Task 3 |
|
17 import scala.util._ |
|
18 import io.Source |
|
19 |
|
20 val my_url = "https://nms.kcl.ac.uk/christian.urban/" |
|
21 |
|
22 // fails if there is no file with that name |
|
23 Source.fromFile("test.txt")("ISO-8859-1").mkString |
|
24 Source.fromFile("test.txt")("ISO-8859-1").getLines().toList |
|
25 |
|
26 // encapsulates the failure case as None |
|
27 Try(Some(Source.fromFile("test.txt")("ISO-8859-1").mkString)).getOrElse(None) |
|
28 Try(Source.fromFile("test.txt")("ISO-8859-1").mkString).toOption // same but shorter |
|
29 |
|
30 // for files with propper closing of the file after reading |
|
31 Using(Source.fromFile("test.txt")("ISO-8859-1"))(_.mkString).toOption |
|
32 |
|
33 // Task 4 (Higher-Order Functions) |
|
34 |
|
35 List(7,2,3,4,5,6).find(_ < 4) |
|
36 List(7,2,3,4,5,6).count(_ % 2 == 0) |
|
37 List(7,2,3,4,5,6).sortWith(_ > _) |
|
38 List(7,2,3,4,5,6).filter(_ > 4) |
|
39 |
|
40 // Task 5 (Maps) |
|
41 |
|
42 List(7,2,3,4,5,6).map(n => n * n) |
|
43 |
|
44 for (n <- List(7,2,3,4,5,6)) yield n * n |
|
45 |
|
46 // The advantages of for-comprehensions is that they |
|
47 // can be nested and also can contain guards. In such |
|
48 // cases the translations to maps and filters is a bit |
|
49 // involved. |
|
50 |
|
51 // Task 6 (Pattern-Matching) |
|
52 |
|
53 def my_map(lst: List[Int], f: Int => Int) : List[Int] = { |
|
54 if (lst == Nil) Nil |
|
55 else f(lst.head) :: my_map(lst.tail, f) |
|
56 } |
|
57 |
|
58 def my_map(lst: List[Int], f: Int => Int) : List[Int] = lst macth { |
|
59 case Nil => Nil |
|
60 case x::xs => f(x) :: my_map(xs, f) |
|
61 } |
|
62 |
|
63 // Task 7 (Web-Crawler, hard) |
|
64 |
|
65 // see lecture2.scala |
|
66 |
|
67 // requires an accumulator that records all pages that have |
|
68 // already been visited, for example |
|
69 |
|
70 def crawl(url: String, n: Int, acc : Set[String] = Set()) : Unit = { |
|
71 if (n == 0) () |
|
72 else { |
|
73 println(s" Visiting: $n $url") |
|
74 val urls = get_all_URLs(get_page(url)) |
|
75 for (u <- urls) crawl(u, n - 1, acc | urls) |
|
76 } |
|
77 } |