diff --git a/lectures/2020-06-03.tex b/lectures/2020-06-03.tex index 63a0b6e..2fa546d 100644 --- a/lectures/2020-06-03.tex +++ b/lectures/2020-06-03.tex @@ -33,7 +33,7 @@ \chapter{Recursive Identification} \section{Least square} \[ - \hat{\theta_N} = \argmin_\theta \left\{ J_N(\theta) = \frac{1}{N} \sum_{t=1}^N \left( y(t) - \hat{y}(t|t-1, \theta) \right)^2 \right\} + \hat{\theta}_N = \argmin_\theta \left\{ J_N(\theta) = \frac{1}{N} \sum_{t=1}^N \left( y(t) - \hat{y}(t|t-1, \theta) \right)^2 \right\} \] We need to find the model predictor $\hat{y}(t|t-1, \theta)$. @@ -100,7 +100,7 @@ \subsection{First form} \hat{\theta}_{N-1} &= S(N-1)^{-1} \sum_{t=1}^{N-1} \phi(t)y(t) \\ \sum_{t=1}^{N-1} \phi(t)y(t) &= S(N-1)\hat{\theta}_{N-1} \\ \sum_{t=1}^{N} \phi(t)y(t) &= \sum_{t=1}^{N-1} \phi(t)y(t) + \phi(N)y(N) \\ - \sum_{t=1}^{N} \phi(t)y(t) &= S(N-1)\hat{\theta}_{N-1} + \phi(N)y(N) = \text{ equation } (1.2) \\ + \sum_{t=1}^{N} \phi(t)y(t) &= S(N-1)\hat{\theta}_{N-1} + \phi(N)y(N) = \text{ equation } (7.2) \\ S(N) \hat{\theta}_N &= S(N-1) \hat{\theta}_{N-1} + \phi(N)y(N) \end{align} @@ -229,7 +229,7 @@ \section{Recursive Least Square with Forgetting Factor} $\hat{\alpha}_0$ is the correct estimation at time $N$, but it does not minimizes the objective function $J_N(\alpha)= \frac{1}{N} \sum_{t=1}^N \left( y(t) - \hat{y}(t|t-1, \alpha) \right)^2$ because it considers the entire time history of the system. -In order to identify a time varying parameter the RLS must be force to forget old data. +In order to identify a time varying parameter the RLS must be forced to forget old data. The solution is provided by the minimization of $J_N$: \[ J_N(\theta) = \frac{1}{N} \sum_{t=1}^N \rho^{N-t}\left( y(t) - \hat{y}(t|t-1,\theta) \right)^2 diff --git a/lectures/2020-06-04.tex b/lectures/2020-06-04.tex index 0e179d4..29fc720 100644 --- a/lectures/2020-06-04.tex +++ b/lectures/2020-06-04.tex @@ -203,8 +203,8 @@ \subsection*{D to A converter} Another simple discretization technique frequently used is the discretization of time-derivative $\dot{x}$. \begin{align*} - \text{\textbf{eulero backward}} &\qquad \dot{x} \approx \frac{x(t)-x(t-1)}{\Delta T} = \frac{x(t)-z^{-1}x(t)}{\Delta T} = \frac{z-1}{z\Delta T} x(t) \\ - \text{\textbf{eulero forward}} &\qquad \dot{x} \approx \frac{x(t+1)-x(t)}{\Delta T} = \frac{zx(t)-x(t)}{\Delta T} = \frac{z-1}{\Delta T} x(t) + \text{\textbf{Eulero backward}} &\qquad \dot{x} \approx \frac{x(t)-x(t-1)}{\Delta T} = \frac{x(t)-z^{-1}x(t)}{\Delta T} = \frac{z-1}{z\Delta T} x(t) \\ + \text{\textbf{Eulero forward}} &\qquad \dot{x} \approx \frac{x(t+1)-x(t)}{\Delta T} = \frac{zx(t)-x(t)}{\Delta T} = \frac{z-1}{\Delta T} x(t) \end{align*} General formula @@ -344,9 +344,9 @@ \subsection*{D to A converter} \draw[dotted] (1.5,1.875) -- (1.5,0) node[below] {}; \node[below] at (0.75,0) {\footnotesize B.W. of interest}; - \draw (4,0.1) -- (4,0) node[below] {$\scriptstyle f_S$}; + \draw (4,0.1) -- (4,0) node[below] {$\scriptstyle \omega_S$}; \draw (1.5,0) edge[bend left=20,->] (4,0); - \node at (2.75,0.45) {\footnotesize x10}; + \node at (2.75,0.45) {$\scriptstyle \times 10$}; \end{tikzpicture} \end{figure}