123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326 |
- % Copyright (C) 2017 Koz Ross <koz.ross@retro-freedom.nz>
- % Copyright (C) 2006 BenduKiwi
- % This work is licensed under the Creative Commons Attribution-ShareAlike 4.0
- % International License. To view a copy of this license, visit
- % http://creativecommons.org/licenses/by-sa/4.0/ or send a letter to Creative
- % Commons, PO Box 1866, Mountain View, CA 94042, USA.
- % BenduKiwi's attribution applies to the image 'cthulhu.jpg', per
- % the requirements of the CC-BY-SA license.
- \documentclass[style=fyma]{powerdot}
- \usepackage{enumitem}
- \usepackage{graphicx}
- \usepackage{amsmath}
- \usepackage{amsfonts}
- \usepackage{amsthm}
- \usepackage[font={itshape},begintext={``},endtext={''}]{quoting}
- \usepackage{algpseudocode}
- \usepackage[normalem]{ulem}
- \usepackage{xcolor}
- \usepackage{booktabs}
- \usepackage{bm}
- \newcommand\redsout{\bgroup\markoverwith{\textcolor{red}{\rule[0.5ex]{2pt}{0.4pt}}}\ULon}
- \newtheorem*{definition}{Definition}
- \newtheorem*{lemma}{Lemma}
- \newtheorem*{theorem}{Theorem}
- \newtheorem*{observation}{Observation}
- \pdsetup{theslide=,
- palette=gray}
- \begin{document}
- \begin{slide}[toc=]{}
- \begin{center}
- {\LARGE \textbf{Uses and limitations of asymptotic analysis} \par}
- {\Large Or: How to assume like a computer scientist \par}
- \vspace{1cm}
- \includegraphics[scale=0.75]{logo.eps}
- \vspace{1cm}
- \linebreak
- Koz Ross
- \medskip
- April 13, 2017
- \end{center}
- \end{slide}
- \begin{slide}[toc=]{Overview}
- \tableofcontents[content=sections]
- \end{slide}
- \section{Recap and formalisms}
- \begin{slide}{Where we last left our intrepid heroes\ldots}
- \vspace{1cm}
- \begin{itemize}
- \item Algorithms are recipes for a computer\pause{}
- \item We want them to be efficient (time and space)\pause{}
- \item Must have a way to compare algorithms, and explain their
- performance\pause{}
- \item Implement-and-measure is {\em not\/} a good approach\pause{}
- \item A model of a computer (RAM)\pause{}
- \item Worst-case, asymptotic analysis
- \end{itemize}
- \end{slide}
- \begin{slide}{Example from last week}
- \begin{algorithmic}
- \Function{$\mathsf{Max}$}{$\mathsf{arr}$}
- \State{}$\mathsf{x} \gets \mathsf{arr}[1]$
- \For{$\mathsf{i} \in 2, 3, \ldots$
- \Call{$\mathsf{,length}$}{$\mathsf{arr}$}}
- \If{$\mathsf{x} < \mathsf{arr}[\mathsf{i}]$}
- \State{}$\mathsf{x} \gets \mathsf{arr}[\mathsf{i}]$
- \EndIf{}
- \EndFor{}
- \State{}\Return{}$\mathsf{x}$
- \EndFunction{}
- \end{algorithmic}\pause{}
-
- \medskip
- $T_{\mathsf{Max}}(n) \text{ is } O(n)$\pause{}
- \medskip
- $S_{\mathsf{Max}}(n) \text{ is } O(1)$
- \end{slide}
- \begin{slide}[toc=]{What people see when they look at a formal definition of $O$}
- \begin{center}
- \includegraphics[scale=0.4]{cthulhu.eps}\pause{}
-
- \medskip
- Don't worry --- we'll go {\bf slow}.
- \end{center}
- \end{slide}
- \begin{slide}{Definition of $O$ (at last!)}
- \vspace{1cm}
- \begin{definition}
- Let $f, g$ be functions.\pause{} If there exist $n_0, c > 0$,\pause{} such
- that for all $n > n_0$, we have:\pause{}
- $$f(n) \leq c \cdot g(n)$$\pause{}
- then we say that {\em $f$ is $O(g)$}.
- \end{definition}\pause{}
- \medskip
- We write $O(n)$ as a kind of shorthand --- otherwise, we would have to write
- $O(f \text{ such that } f(n) = n)$, which is far too long.
- \end{slide}
- \begin{slide}{Examples of use}
- \vspace{1cm}
- \begin{lemma}
- $6n-1$ is $O(n)$.
- \end{lemma}\pause{}
- \begin{proof}
- Let $n_0 = 1, c = 7$. \pause{}We observe that, for all $n > 1$, we have
- $$6n - 1 \leq 7n$$\pause{}
- Thus, by definition, $6n-1$ is $O(n)$.
- \end{proof}
- \end{slide}
- \begin{slide}[toc=]{Examples of use}
- \begin{lemma}
- $n^2$ is {\em not\/} $O(n)$.
- \end{lemma}\pause{}
- \begin{proof}
- Suppose for the sake of a contradition that $n^2$ is $O(n)$.\pause{} By
- definition, there exist some $n_0, c > 0$ such that for all
- $n > n_0$, $n^2 \leq c \cdot n$.\pause{} Consider $n =
- \max\{n_0 + 1, c + 1\}$.\pause{} There are two cases:\pause{}
- \medskip
- {\bf Case 1:} $n = n_0 + 1$\pause{}
- By substitution, we have ${(n_0 + 1)}^2 \leq c \cdot (n_0 + 1)$,
- which simplifies to $n_0 + 1 \leq c$.\pause{} However, this
- is a contradiction, as $c < n_0 + 1$.\pause{}
- \medskip
- {\bf Case 2:} $n = c + 1$\pause{}
- By substitution, we have ${(c + 1)}^2 \leq c \cdot (c + 1)$, which
- simplifies to $c + 1 \leq c$, which is a contradiction.\pause{}
- \medskip
- As both cases lead to contradictions, no such $n_0, c$ can exist. Thus,
- $n^2$ is not $O(n)$.
- \end{proof}
- \end{slide}
- \section{The usual suspects}
- \begin{slide}{The `good' ones}
- \vspace{1cm}
- \pause{}
- $\bm{O(1)}$
- {\em Constant\/}: input size does not affect complexity.\pause{}
- \bigskip
- $\bm{O(\log(n))}$
-
- {\em Logarithmic\/}: when input size doubles, complexity goes up by a
- fixed amount.\pause{} We don't care about the base of the logarithm here,
- because we can change to any base using a multiplication by a
- constant.\pause{}
- \bigskip
- $\bm{O(n)}$
- {\em Linear\/}: when input size doubles, complexity also doubles.
- \end{slide}
- \begin{slide}{The `acceptable' ones}
- \vspace{1cm}
- \pause{}
- $\bm{O(n \log(n))}$
- {\em Linearithmic\/}: when input size doubles, complexity doubles, then
- also increases by a fixed amount.\pause{} Its `wordy' name is rarely used
- outside of a textbook.\pause{}
- \bigskip
- $\bm{O(n^2)}$
- {\em Quadratic\/}: when input size doubles, complexity {\em
- quadruples}.\pause{}
- \bigskip
- $\bm{O(n^3)}$
- {\em Cubic\/}: when input size doubles, complexity increases by a factor of
- eight.
- \end{slide}
- \begin{slide}{The `{\em un\/}acceptable' ones}
- \vspace{1cm}
- \pause{}
- $\mathbf{O(2^n)}$
- {\em Exponential\/}: when input size goes up by 1, complexity doubles.\pause{}
- If your algorithm has exponential time or space complexity, it may as well not
- exist.\pause{}
- \bigskip
- Only gets {\em worse\/} from here --- but we usually don't bother at this
- point.
- \end{slide}
- \section{Limitations}
- \begin{slide}{Limitations of worst-case analysis}
- \vspace{1cm}
- Worst-case analysis is {\em pessimistic\/} --- this can be a good thing,
- because it stops us from having unrealistic expectations in the face of
- unknown inputs.\pause{} However, it is possible for it to be {\em too\/}
- pessimistic:\pause{}
- \begin{itemize}
- \item If worst cases are rare or improbable, an algorithm might look much
- worse than it actually is\pause{}
- \item Focus on the wrong places (exactly what we sought to {\em avoid\/})\pause{}
- \item Won't match reality (and we won't know why)\pause{}
- \end{itemize}
- Is it really sensible to {\em always\/} be {\em uncompromisingly\/} pessimistic?
- \end{slide}
- \begin{slide}{Limitations of RAM}
- \vspace{1cm}
- \begin{quoting}
- RAM describes computers, and if we design algorithms around it, their actual
- performance should reflect what RAM says it should be.
- \end{quoting}\pause{}
- \medskip
- That statement was certainly true in the 1950s, but things have changed {\em
- considerably\/} since then:\pause{}
-
- \begin{itemize}
- \item Processing units are no longer singular\pause{}
- \item Memory is not uniform or uniformally-addressable nowadays\pause{}
- \item Processing units haven't been sequential since the mid-70s\pause{}
- \end{itemize}
- Is it really sensible to analyze (and {\em design\/}) algorithms based on
- such an old view of our machines?
- \end{slide}
- \begin{slide}{Limitations of asymptotic analysis}
- The assumptions of asymptotic analysis are self-consistent, but are they
- consistent with {\em reality\/}?\pause{}
- \begin{itemize}
- \item There {\em is\/} a limit on how big inputs can get\pause{}
- \item $2n$ and $2^{10}n$ are both $O(n)$, but practically, are {\em
- very\/} different, especially for space\pause{}
- \item Constants can, and {\em do}, dominate algorithm performance\pause{}
- \end{itemize}
- By clever `asymptotic abuse', we can easily come up with algorithms that
- `look good', but are worthless in practice:
-
- \begin{quoting}
- Our algorithms have theoretical interest only; the constant factors involve
- in the execution times preclude practicality.
- \end{quoting}\pause{}
- This seems to fly in the face of why we're doing this in the first place. But
- where to draw the line?
- \end{slide}
- \begin{slide}{Should we bother with it, then?}
- \vspace{1cm}
- \begin{itemize}
- \item Asymptotic worst-case analysis based on RAM underpins many results,
- and would be worth knowing just for that reason alone\pause{}
- \item Alternatives to it exist --- but to understand them, you need a
- foundation (guess what {\em that\/} is?)\pause{}
- \item In many cases ({\em taking assumptions into account\/}), still works
- very well (similar to classical mechanics)\pause{}
- \item `The beginning, but not the end'\pause{}
- \item ``If you don't have an implementation, you don't have an
- algorithm''\pause{}
- \end{itemize}
- \begin{quoting}
- Trust, but verify.
- \end{quoting}
- \end{slide}
- \section{Questions}
- \end{document}
|