slides/slides10.tex
changeset 154 525c512629c7
parent 153 af8fff37dd1c
child 339 0e78c809b17f
--- a/slides/slides10.tex	Tue Dec 10 07:49:48 2013 +0000
+++ b/slides/slides10.tex	Tue Dec 10 16:56:59 2013 +0000
@@ -453,10 +453,28 @@
 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
 \mode<presentation>{
 \begin{frame}[c]
-\frametitle{\begin{tabular}{c}8th Lecture:\\ Privacy\end{tabular}}
+\frametitle{\begin{tabular}{c}8th Lecture: Privacy\end{tabular}}
 
 \begin{itemize}
 \item differential privacy for annonymizing research data
+
+\begin{center}
+User\;\;\;\;    
+\begin{tabular}{c}
+tell me \bl{$f(x)$} $\Rightarrow$\\
+$\Leftarrow$ \bl{$f(x) + \text{noise}$}
+\end{tabular}
+\;\;\;\;\begin{tabular}{@{}c}
+Database\\
+\bl{$x_1, \ldots, x_n$}
+\end{tabular}
+\end{center}
+
+\item \bl{$f(x)$} can be released, if \bl{$f$} is insensitive to
+individual entries  \bl{$x_1, \ldots, x_n$}\\
+\item The intuition: whatever is learned from the dataset would be learned regardless of whether
+\bl{$x_i$} participates\bigskip\pause
+
 \item Tor webservice
 \end{itemize}
 
@@ -471,7 +489,8 @@
 
 \begin{itemize}
 \item zero-knowledge proofs
-\item requires NP problems, for example graph isomorphisms
+\item requires NP problems, for example graph isomorphisms\bigskip\pause
+\item random number generators
 \end{itemize}