本稿は、「 カーネル関数の深層探訪:現代技術とその応用 」を参照しています。
$\Large \displaystyle C= \begin{pmatrix} b_0 & b_1 \cdots & b_{N-2} & b_{N-1}\\ b_{N-1} & b_0 \cdots & b_{N-3} & b_{N-2}\\ \vdots & \vdots \ddots & \vdots & \vdots\\ b_2 & b_3 \cdots & b_0 & b_1\\ b_1 & b_2 \cdots & b_{N-1} & b_0 \end{pmatrix} $
$\Large \displaystyle C= \begin{pmatrix} b_0 & b_1 & b_2\\ b_2 & b_0 & b_1\\ b_1 & b_2 & b_0 \end{pmatrix} $
$\Large \displaystyle b_0^3 + b_1^3 + b_2^3 - 3b_0 b_1 b_2$
$\Large \displaystyle =(b_0 + b_1 + b_2)(b_0 + b_1\omega + b_2\omega^2)(b_0 + b_1\omega^2 + b_2\omega)$
$\Large \displaystyle P= \begin{pmatrix} 0 & 1 & 0 & 0\\ 1 & 0 & 0 & 0\\ 0 & 0 & 0 & 1\\ 0 & 0 & 1 & 0 \end{pmatrix} $
$\Large \displaystyle P\vec{x}= \begin{pmatrix} 0 & 1 & 0 & 0\\ 1 & 0 & 0 & 0\\ 0 & 0 & 0 & 1\\ 0 & 0 & 1 & 0 \end{pmatrix} \begin{pmatrix} x_0\\ x_1\\ x_2\\ x_3 \end{pmatrix} = \begin{pmatrix} x_1\\ x_0\\ x_3\\ x_2 \end{pmatrix} $
$\Large \displaystyle P= \begin{pmatrix} 0 & 1 & 0 & \cdots & 0 & 0\\ 1 & 0 & 0 & \cdots & 0 & 0\\ \vdots & \vdots & \vdots & \ddots & \vdots & \vdots\\ 0 & 0 & 0 & \cdots & 1 & 0\\ 0 & 0 & 0 & \cdots & 0 & 1\\ 1 & 0 & 0 & \cdots & 0 & 0 \end{pmatrix} $
$\Large \displaystyle \vec{x_k}=(1,\lambda_k,\lambda_k^2,\cdots,\lambda_k^{n-1})^T$
$\Large \displaystyle \lambda_k=\sum_{l=0}^{n-1}\zeta_{k}^{l}c_{l}\ \ \ (定理2)$
$\Large \displaystyle C \vec{x}_k=\lambda_k \vec{x}_k$
$\Large \displaystyle \begin{pmatrix} c_0 & c_1 & c_2\\ c_2 & c_0 & c_1\\ c_1 & c_2 & c_0 \end{pmatrix} \begin{pmatrix} 1\\ 1\\ 1 \end{pmatrix} = (c_0+c_1+c_2) \begin{pmatrix} 1\\ 1\\ 1 \end{pmatrix} $
$\Large \displaystyle \lambda_0=c_0+c_1+c_2$
$\Large \displaystyle \begin{pmatrix} c_0 & c_1 & c_2\\ c_2 & c_0 & c_1\\ c_1 & c_2 & c_0 \end{pmatrix} \begin{pmatrix} 1\\ \omega\\ \omega^2 \end{pmatrix} = (c_0+\omega c_1+\omega^2 c_2) \begin{pmatrix} 1\\ \omega\\ \omega^2 \end{pmatrix} $
$\Large \displaystyle \lambda_1=c_0+w c_1+w^2 c_2$
$\Large \displaystyle \begin{pmatrix} c_0 & c_1 & c_2\\ c_2 & c_0 & c_1\\ c_1 & c_2 & c_0 \end{pmatrix} \begin{pmatrix} 1\\ \omega^2\\ \omega \end{pmatrix} = (c_0+\omega^2 c_1+\omega c_2) \begin{pmatrix} 1\\ \omega^2\\ \omega \end{pmatrix} $
$\Large \displaystyle \lambda_2=c_0+w^2 c_1+w c_2$
$\Large \displaystyle det C=\prod_{k=0}^{n-1}\lambda_k$
$\Large \displaystyle =\prod_{k=0}^{n-1}(\sum_{l=0}^{n-1}\zeta_{k}^{l}c_{l})$
$\Large \displaystyle \zeta_{k}=exp(\frac{2\pi kj}{n})$
$\Large \displaystyle \zeta_{k}^n=exp(\frac{2\pi kj+2\pi kj+,\cdots,+2\pi kj}{n})$
$\Large \displaystyle =exp(2\pi kj)=1$
$\Large \displaystyle h(x)=\int_{-\infty}^{\infty}f(t)g(x-t)dt$
$\Large \displaystyle h(x)=f(x)*g(x),\ \ h=f*g$
$\Large \displaystyle c_n=\sum_{t=0}^{n}a_t b_{n-t}$
$\Large \displaystyle c_n=a_n*b_n,\ \ c=a*b$
$\Large \displaystyle c_6=a_0 b_6 + a_1 b_5 + a_2 b_4 + a_3 b_3 + a_4 b_2 + a_5 b_1 + a_6 b_0$
$\Large \displaystyle w=e^{-\frac{2\pi}{N}i}$
$\Large \displaystyle F(k)=\sum_{n=0}^{N-1}f(n)e^{-i\frac{2\pi k}{N}n}$
$\Large \displaystyle =\sum_{n=0}^{N-1}f(n)e^{(-\frac{2\pi}{N})kn}$
$\Large \displaystyle =\sum_{n=0}^{N-1}f(n)w^{kn}$
$\Large \displaystyle F(0)=w^{0-0}f(0)+w^{0-1}f(1)+,\cdots,+w^{0-(N-1)}f(N-1)$
$\Large \displaystyle F(1)=w^{1-0}f(0)+w^{1-1}f(1)+,\cdots,+w^{1-(N-1)}f(N-1)$
$\Large \displaystyle F(2)=w^{2-0}f(0)+w^{2-1}f(1)+,\cdots,+w^{2-(N-1)}f(N-1)$
$\Large \displaystyle F(N-1)=w^{(N-1)-0}f(0)+w^{(N-1)-1}f(1)+,\cdots,+w^{(N-1)-(N-1)}f(N-1)$
$\Large \displaystyle \begin{pmatrix} F(0) \\ F(1) \\ F(2) \\ \vdots \\ F(N-1) \end{pmatrix} = \begin{pmatrix} w^{0-0} & w^{0-1} & w^{0-2} & \cdots & w^{0-(N-1)} \\ w^{1-0} & w^{1-1} & w^{1-2} & \cdots & w^{1-(N-1)} \\ w^{2-0} & w^{2-1} & w^{2-2} & \cdots & w^{2-(N-1)} \\ \vdots & \vdots & \vdots & \ddots & \vdots \\ w^{(N-1)-0} & w^{(N-1)-1} & w^{(N-1)-2} & \cdots & w^{(N-1)-(N-1)} \end{pmatrix} \begin{pmatrix} f(0) \\ f(1) \\ f(2) \\ \vdots \\ f(N-1) \end{pmatrix} $
$\Large \displaystyle \begin{pmatrix} F(0) \\ F(1) \\ F(2) \\ \vdots \\ F(N-1) \end{pmatrix} = W_N \begin{pmatrix} f(0) \\ f(1) \\ f(2) \\ \vdots \\ f(N-1) \end{pmatrix} $
$\Large \displaystyle \vec{y}=W_N \vec{x}$
$\Large \displaystyle W_N= \begin{pmatrix} 1 & 1 & 1 \\ 1 & w & w^2 \\ 1 & w^2 & w^4 \end{pmatrix} = \begin{pmatrix} 1 & 1 & 1 \\ 1 & w & w^2 \\ 1 & w^2 & w \end{pmatrix} $
$\Large \displaystyle W_N\vec{x}= \begin{pmatrix} 1 & 1 & 1\\ 1 & \omega & \omega^2\\ 1 & \omega^2 & \omega^4 \end{pmatrix} \vec{x} = \begin{pmatrix} 1 & 1 & 1\\ 1 & \omega & \omega^2\\ 1 & \omega^2 & \omega \end{pmatrix} \vec{x} $
$\Large \displaystyle C \vec{x}_k=\lambda_k \vec{x}_k$
$\Large \displaystyle \vec{x}_k=(1,\lambda_k,\lambda_k^2,\cdots,\lambda_k^{N-1})^T$
$\Large \displaystyle C \vec{x}_0=\lambda_0 \vec{x}_0$
$\Large \displaystyle C \vec{x}_1=\lambda_1 \vec{x}_1$
$\Large \displaystyle C \vec{x}_2=\lambda_2 \vec{x}_2$
以上
$\Large \displaystyle \hat{y}=a+bx\ \ (a,b\in R)$
$\Large \displaystyle y_n - \hat{y}_n=y_n - (a+bx_n)$
$\Large \displaystyle E=\sum_{n=1}^{N}(y_n - \hat{y}_n)^2=\sum_{n=1}^{N}(y_n - (a+b x_n))^2$
$\Large \displaystyle E=\sum_{n=1}^{N}(y_n - a - b x_n)^2$
$\Large \displaystyle =\sum_{n=1}^{N}(y_n^2 + a^2 + b^2 x_n^2 + 2a y_n -2a b x_n - 2b x_n y_n)$
$\Large \displaystyle \frac{\partial E}{\partial a}=\sum_{n=1}^{N}(2a - 2y_n + 2b x_n)=0$
$\Large \displaystyle \frac{\partial E}{\partial b}=\sum_{n=1}^{N}(2b x_n^2 + 2a x_n - 2x_n y_n)=0$
$\Large \displaystyle a N + b \sum_{n=1}^{N} x_n=\sum_{n=1}^{N} y_n\ \ (1)$
$\Large \displaystyle a \sum_{n=1}^{N}x_n + b \sum_{n=1}^{N} x_n^2=\sum_{n=1}^{N} x_n y_n\ \ (2)$
$\Large \displaystyle a= \frac{1}{N}(\sum_{n=1}^{N} y_n - b \sum_{n=1}^{N} x_n)=\bar{y}-b\bar{x}$
$\Large \displaystyle \sum_{n=1}^{N}x_n(\sum_{n=1}^{N} y_n - b \sum_{n=1}^{N} x_n) + N b \sum_{n=1}^{N} x_n^2=N\sum_{n=1}^{N} x_n y_n$
$\Large \displaystyle b (-(\sum_{n=1}^{N}x_n)^2 + N \sum_{n=1}^{N} x_n^2)=N\sum_{n=1}^{N} x_n y_n - \sum_{n=1}^{N}x_n\sum_{n=1}^{N} y_n$
$\Large \displaystyle b=\frac{N\sum_{n=1}^{N} x_n y_n - \sum_{n=1}^{N}x_n\sum_{n=1}^{N} y_n}{N \sum_{n=1}^{N} x_n^2 - (\sum_{n=1}^{N}x_n)^2}$
$\Large \displaystyle a=\frac{\sum_{n=1}^{N} y_n}{N} - \frac{\sum_{n=1}^{N} x_n}{N} \frac{N\sum_{n=1}^{N} x_n y_n - \sum_{n=1}^{N}x_n\sum_{n=1}^{N} y_n}{N \sum_{n=1}^{N} x_n^2 - (\sum_{n=1}^{N}x_n)^2}$
$\Large \displaystyle =\frac{\sum_{n=1}^{N} y_n}{N}\frac{N \sum_{n=1}^{N} x_n^2 - (\sum_{n=1}^{N}x_n)^2}{N \sum_{n=1}^{N} x_n^2 - (\sum_{n=1}^{N}x_n)^2} - \frac{\sum_{n=1}^{N} x_n}{N} \frac{N\sum_{n=1}^{N} x_n y_n - \sum_{n=1}^{N}x_n\sum_{n=1}^{N} y_n}{N \sum_{n=1}^{N} x_n^2 - (\sum_{n=1}^{N}x_n)^2}$
$\Large \displaystyle = \frac{\sum_{n=1}^{N} x_n^2\sum_{n=1}^{N} y_n - \sum_{n=1}^{N}x_n\sum_{n=1}^{N} x_n y_n}{N \sum_{n=1}^{N} x_n^2 - (\sum_{n=1}^{N}x_n)^2}$
$\Large \displaystyle \sum_{n=1}^{N} x_n=3+2-1=4$
$\Large \displaystyle \sum_{n=1}^{N} y_n=2+4+1=7$
$\Large \displaystyle \sum_{n=1}^{N} x_n^2=9+4+1=14$
$\Large \displaystyle \sum_{n=1}^{N} x_n y_n=6+8-1=13$
$\Large \displaystyle a=\frac{14*7-4*13}{3*14-4*4}=\frac{98-52}{42-16}=\frac{46}{26}=1.77$
$\Large \displaystyle b=\frac{3*13-4*7}{3*14-4*4}=\frac{39-28}{42-16}=\frac{11}{26}=0.42$
$\Large \displaystyle A= \begin{pmatrix} a & b \\ c & d \end{pmatrix} $
$\Large \displaystyle B= \begin{pmatrix} e & f \\ g & h \end{pmatrix} $
$\Large \displaystyle \begin{pmatrix} a & b \\ c & d \end{pmatrix} \begin{pmatrix} e & f \\ g & h \end{pmatrix} = \begin{pmatrix} ae+bg & af+bh \\ ce+dg & cf+dh \end{pmatrix} = \begin{pmatrix} 1 & 0 \\ 0 & 1 \end{pmatrix} $
$\Large \displaystyle ae+bg=1\ \ ①$
$\Large \displaystyle af+bh=0\ \ ②$
$\Large \displaystyle ce+dg=0\ \ ③$
$\Large \displaystyle cf+dh=1\ \ ④$
$\Large \displaystyle (ae+bg)d-(ce+dg)b=ade-bce=e(ad-bc)=d$
$\Large \displaystyle e=\frac{d}{ad-bc}$
$\Large \displaystyle (af+bh)d-(cf+dh)b=adf-bcf=f(ad-bc)=-b$
$\Large \displaystyle e=\frac{-b}{ad-bc}$
$\Large \displaystyle (ae+bg)c-(ce+dg)a=bcg-adg=-g(ad-bc)=c$
$\Large \displaystyle g=\frac{-c}{ad-bc}$
$\Large \displaystyle (af+bh)c-(cf+dh)a=bch-adh=-h(ad-bc)=-a$
$\Large \displaystyle h=\frac{a}{ad-bc}$
$\Large \displaystyle A^{-1}=B=\frac{1}{ad-bc} \begin{pmatrix} d & -b \\ -c & a \end{pmatrix} = \frac{1}{|A|} \begin{pmatrix} d & -b \\ -c & a \end{pmatrix} $
$\Large \displaystyle \begin{pmatrix} N & \sum_{n=1}^{N} x_n \\ \sum_{n=1}^{N} x_n & \sum_{n=1}^{N} x_n^2 \end{pmatrix} \begin{pmatrix} a \\ b \end{pmatrix} = \begin{pmatrix} \sum_{n=1}^{N} y_n \\ \sum_{n=1}^{N} x_n y_n \end{pmatrix} $
$\Large \displaystyle \begin{pmatrix} A & B \\ C & D \end{pmatrix} ^{-1}= \frac{1}{AD-BC} \begin{pmatrix} D & -B \\ -C & A \end{pmatrix} $
$\Large \displaystyle \begin{pmatrix} a \\ b \end{pmatrix} = \frac{1}{N\sum_{n=1}^{N} x_n^2-(\sum_{n=1}^{N} x_n)^2} \begin{pmatrix} \sum_{n=1}^{N}x_n^2 & -\sum_{n=1}^{N} x_n\\ -\sum_{n=1}^{N}x_n & N \end{pmatrix} \begin{pmatrix} \sum_{n=1}^{N}y_n \\ \sum_{n=1}^{N}x_n y_n \end{pmatrix} $
$\Large \displaystyle = \frac{1}{N\sum_{n=1}^{N} x_n^2-(\sum_{n=1}^{N} x_n)^2} \begin{pmatrix} \sum_{n=1}^{N}x_n^2 \sum_{n=1}^{N}y_n - \sum_{n=1}^{N}x_n\sum_{n=1}^{N}x_n y_n\\ N\sum_{n=1}^{N}x_n y_n - \sum_{n=1}^{N}x_n \sum_{n=1}^{N}y_n \end{pmatrix} $
$\Large \displaystyle X^T=(x_1,x_2,\cdots,x_D)$
$\Large \displaystyle \hat{y}=w_0+w_1 x_1+w_2 x_2\cdots+w_D x_D)$
$\Large \displaystyle (y_n-\hat{y}_n)^2=(y_n - (w_0+w_1 x_1+w_2 x_2\cdots+w_D x_D))$
$\Large \displaystyle w^T=(w_0,w_1,w_2,\cdots,w_D)$
$\Large \displaystyle x^T=(1,x_1,x_2,\cdots,x_D)$
$\Large \displaystyle \hat{y}=w_0+w_1 x_1+w_2 x_2\cdots+w_D x_D)$
$\Large \displaystyle =(w_0,w_1,w_2,\cdots,w_D) \begin{pmatrix} 1 \\ x_1 \\ x_2 \\ \vdots \\ x_D \end{pmatrix} =w^T x $
$\Large \displaystyle \begin{pmatrix} \hat{y}_1 \\ \hat{y}_2 \\ \vdots \\ \hat{y}_N \end{pmatrix} = \begin{pmatrix} w^T x_1 \\ w^T x_2 \\ \vdots \\ w^T x_N \end{pmatrix} = \begin{pmatrix} x_1^T \\ x_2^T \\ \vdots \\ x_N^T \end{pmatrix} w $
$\Large \displaystyle \begin{pmatrix} \hat{y}_1 \\ \hat{y}_2 \\ \vdots \\ \hat{y}_N \end{pmatrix} = \begin{pmatrix} 1 & x_{11} & x_{12} & \cdots & x_{1D} \\ 1 & x_{21} & x_{22} & \cdots & x_{2D} \\ \vdots & & & & \vdots \\ 1 & x_{N1} & x_{N2} & \cdots & x_{ND} \\ \end{pmatrix} \begin{pmatrix} w_0 \\ w_1 \\ w_2 \\ \vdots \\ w_D \end{pmatrix} $
$\Large \displaystyle X= \begin{pmatrix} 1 & x_{11} & x_{12} & \cdots & x_{1D} \\ 1 & x_{21} & x_{22} & \cdots & x_{2D} \\ \vdots & & & & \vdots \\ 1 & x_{N1} & x_{N2} & \cdots & x_{ND} \\ \end{pmatrix} $
$\Large \displaystyle \hat{y}= \begin{pmatrix} \hat{y}_1 \\ \hat{y}_2 \\ \vdots \\ \hat{y}_N \end{pmatrix} $
$\Large \displaystyle \hat{y}=Xw $
$\Large \displaystyle \bar{x}=\frac{1}{N}\sum_{n=1}^{N}x_n$
$\Large \displaystyle \bar{y}=\frac{1}{N}\sum_{n=1}^{N}y_n$
$\Large \displaystyle b=\frac{N\sum_{n=1}^{N} x_n y_n - \sum_{n=1}^{N}x_n\sum_{n=1}^{N} y_n}{N \sum_{n=1}^{N} x_n^2 - (\sum_{n=1}^{N}x_n)^2}$
$\Large \displaystyle =\frac {\sum_{n=1}^{N} x_n y_n - N\sum_{n=1}^{N}\bar{x}\bar{y}} {\sum_{n=1}^{N} x_n^2 - N\bar{x}^2}$
$\Large \displaystyle \sum_{n=1}^{N}(x_n-\bar{x})(y_n-\bar{y}) = \sum_{n=1}^{N}(x_n y_n - x_n \bar{y}- \bar{x}y_n + \bar{x}\bar{y}) = \sum_{n=1}^{N}(x_n y_n) - N\bar{x}\bar{y} $
$\Large \displaystyle \sum_{n=1}^{N}(x_n-\bar{x})^2 = \sum_{n=1}^{N}(x_n^2 - 2\bar{x}x_n + \bar{x}^2) = \sum_{n=1}^{N}(x_n^2) - N\bar{x}^2 $
$\Large \displaystyle b= \frac{\sum_{n=1}^{N}(x_n-\bar{x})(y_n-\bar{y})} {\sum_{n=1}^{N}(x_n-\bar{x})^2} $
$\Large \displaystyle = \frac{\sum_{n=1}^{N}(x_n-\bar{x})(y_n-\bar{y})} {\sqrt{\sum_{n=1}^{N}(x_n-\bar{x})^2}\sqrt{\sum_{n=1}^{N}(y_n-\bar{y})^2}} \cdot \frac{\sqrt{\sum_{n=1}^{N}(y_n-\bar{y})^2}} {\sqrt{\sum_{n=1}^{N}(x_n-\bar{x})^2}} $
$\Large \displaystyle =\gamma_{xy}\cdot \frac{\sigma_y}{\sigma_x} $
以上
$\Large \displaystyle p(x)=\frac{1}{\sqrt{2\pi\sigma^2}}exp\{-\frac{1}{2}\frac{(x-\mu)^2}{\sigma^2}\}$
$\Large \displaystyle \mu=E[X]$
$\Large \displaystyle V[X]=E[(X-\mu)^2]$
$\Large \displaystyle \sigma=\sqrt{V[X]}$
$\Large \displaystyle Cov[X,Y]=E[(X-\mu)(Y-\nu)]$
$\Large \displaystyle Cov[X,Y]=Cov[Y,X]$
$\Large \displaystyle Cov[X,X]=V[X]$
$\Large \displaystyle Cov[X+a,Y+b]=Cov[X,Y]$
$\Large \displaystyle Cov[aX,bY]=abCov[X,Y]$
$\Large \displaystyle \rho_{XY}=\frac{Cov[X,Y]}{\sqrt{\mathstrut V[X]}\sqrt{\mathstrut V[Y]}}$
$\Large \displaystyle \hat{X}=\frac{X}{\sqrt{V[X]}}$
$\Large \displaystyle \hat{Y}=\frac{Y}{\sqrt{V[Y]}}$
$\Large \displaystyle Cov[\hat{X},\hat{Y}]=Cov[\frac{X}{\sqrt{V[X]}},\frac{Y}{\sqrt{V[Y]}}]=\frac{Cov[X,Y]}{\sqrt{\mathstrut V[X]}\sqrt{\mathstrut V[Y]}}=\rho_{XY}$
$X_1$ | $X_2$ | $X_3$ | |
---|---|---|---|
$X_1$ | $Cov(X_1,X_1)$ | $Cov(X_1,X_2)$ | $Cov(X_1,X_3)$ |
$X_2$ | $Cov(X_2,X_1)$ | $Cov(X_2,X_2)$ | $Cov(X_2,X_3)$ |
$X_3$ | $Cov(X_3,X_1)$ | $Cov(X_3,X_2)$ | $Cov(X_3,X_3)$ |
$\Large \displaystyle V[X]= \begin{pmatrix} V[X_1] & Cov[X_1,X_2] & Cov[X_1,X_3] \\ Cov[X_2,X_1] & V[X_2] & Cov[X_2,X_3] \\ Cov[X_3,X_1] & Cov[X_3,X_2] & V[X_3] \\ \end{pmatrix} $
$\Large \displaystyle X= \begin{pmatrix} X_1 \\ X_2 \\ \vdots \\ X_n \end{pmatrix} $
$\Large \displaystyle V[X]=E[(X-\mu)(X-\mu)^T]$
$\Large \displaystyle \mu=E[X]$
$\Large \displaystyle V[X]= \begin{pmatrix} V[X_1] & Cov[X_1,X_2] & Cov[X_1,X_3] \\ Cov[X_2,X_1] & V[X_2] & Cov[X_2,X_3] \\ Cov[X_3,X_1] & Cov[X_3,X_2] & V[X_3] \\ \end{pmatrix} $
$\Large \displaystyle \gamma_k =\frac{1}{T}\sum_{t=k+1}^{T}(y_t-\bar{y})(y_{t-k}-\bar{y})$
$\Large \displaystyle \phi(x)=(1,x,x^2,x^3)^T$
$\Large \displaystyle w=(w_0,w_1,w_2,w_3)^T$
$\Large \displaystyle y=w_0+w_1 x + w_2 x^2 + w_3 x^3$
$\Large \displaystyle =w^T \phi(x)$
$\Large \displaystyle \phi_h(x)=exp(-\frac{(x-\mu_h)^2}{\sigma~2})$
$\Large \displaystyle y=w^T \phi(x)=\sum_{h=-H}^{H}w_h exp(-\frac{(x-\mu_h)^2}{\sigma~2})$
$\Large \displaystyle \begin{pmatrix} \hat{y_1} \\ \hat{y_2} \\ \vdots \\ \hat{y_N} \end{pmatrix} = \begin{pmatrix} \phi_0(x_1) & \phi_1(x_1) & \cdots & \phi_H(x_1) \\ \phi_0(x_2) & \phi_1(x_2) & \cdots & \phi_H(x_2) \\ \vdots & \vdots & \ddots & \vdots \\ \phi_0(x_N) & \phi_1(x_N) & \cdots & \phi_H(x_N) \\ \end{pmatrix} \begin{pmatrix} w_0 \\ w_1 \\ \vdots \\ \vdots \\ w_H \end{pmatrix} $
$\Large \displaystyle \hat{y}=\Phi w$
$\Large \displaystyle w \sim \mathscr{N}(0,\lambda^2 I)$
$\Large \displaystyle K(x,x')=exp(-\frac{|x-x'|^2}{2\sigma^2})$
$\Large \displaystyle \rho_f(t) =\int_{-\infty}^{\infty}f(t+\tau)f(\tau)d\tau$
$\Large \displaystyle f(x)=\int_{-\infty}^{\infty}f_1(y)f_2(x-y)dy=f_1(x)*f_2(x)$
$\Large \displaystyle \int_{-\infty}^{\infty}f_1(\tau)f_2(t-\tau)d\tau \leftrightarrow F_1(\omega)*F_2(\omega) $
$\Large \displaystyle f_1(t)f_2(t) \leftrightarrow \frac{1}{2\pi}\int_{-\infty}^{\infty}F_1(y)F_2(\omega-y)dy$
$\Large \displaystyle F(\omega) =A(\omega)e^{j\phi(\omega)}$
$\Large \displaystyle E(\omega) =A^2(\omega)$
$\Large \displaystyle E(\omega) =A^2(\omega)$
$\Large \displaystyle g(t) =\int_{-\infty}^{\infty}f(\tau)h(t-\tau)d\tau=f(t)*g(t)$
$\Large \displaystyle \rho_g(t) =\rho_f(t)*h(t)*h(-t)$
$\Large \displaystyle g(t)\leftrightarrow F(\omega)*H(\omega)$
以下の記事は、 「 自己相関関数と周期性解析」を引用しています。
$\Large \displaystyle x(k+1)=Ax(k)+bv(k)$
$\Large \displaystyle y(k)=c^Tx(k)+w(k)$
$\Large \displaystyle \hat{x}(k)=G(k)\hat{x}^{-}(k)+g(k)y(k)$
事後推定値 = G(k)・事前推定値 + g(k)・観測値
$\Large \displaystyle \tilde{x}(k)=x(k)-\hat{x}(k)$
$\Large \displaystyle E[\tilde{x}(k)y(i)]=0,\ \ i=1,2,\cdots,k-1$
以上
$\Large \displaystyle y(k)=\sum_{l=0}^{\infty}g(l)u(k-l)$
$\Large \displaystyle y_t=\int_0^t g(\tau)u(t-\tau)d\tau=g(t)*u(t)$
$\Large \displaystyle y(z)=G(z)u(z)$
$\Large \displaystyle u(z)=\sum_{l=0}^{\infty}u(k)z^{-k}$
$\Large \displaystyle y(z)=\sum_{l=0}^{\infty}y(k)z^{-k}$
$\Large \displaystyle G(z)=\sum_{l=0}^{\infty}g(k)z^{-k}$
$\Large \displaystyle y(k)+a_1 y(k-1)+\cdots+a_n y(k-n)=b_1 u(k-1)+b_2 u(k-2)+\cdots+b_mu(k-m)$
$\Large \displaystyle (1+a_1 z^{-1}+\cdots+a_n z^{-n})y(z)=(b_1 z^{-1}+b_2 z^{-2}+\cdots+b_m z^{-m})u(z)$
$\Large \displaystyle G(z)=\frac{y(z)}{u(z)}=\frac{b_1 z^{-1}+b_2 z^{-2}+\cdots+b_m z^{-m}}{1+a_1 z^{-1}+\cdots+a_n z^{-n}}$
$\Large \displaystyle qy(k)=y(k+1)$
$\Large \displaystyle q^{-1}y(k)=y(k-1)$
$\Large \displaystyle A(q)y(k)=B(q)u(k)+w(k)$
以上
$\Large \displaystyle x(k+1)=x(k)+v(k)$
$\Large \displaystyle y(k)=x(k)+w(k)$
$\Large \displaystyle x(0)\sim N(0,\sigma_0^2)$
$\Large \displaystyle v(k)\sim N(0,\sigma_v^2)$
$\Large \displaystyle w(k)\sim N(0,\sigma_w^2)$
$\Large \displaystyle y=cx +w$
$\Large \displaystyle \hat{x}=f(y)=\alpha y + \beta$
$\Large \displaystyle e=x-\hat{x}$
$\Large \displaystyle E[e]=E[x-\hat{x}]=E[x-\alpha y - \beta]$
$\Large \displaystyle =E[x-\alpha (cx+w) - \beta]$
$\Large \displaystyle =(1-\alpha c)\bar{x}-\alpha\bar{w}-\beta=0$
$\Large \displaystyle \hat{\beta}=(1-\alpha c)\bar{x}-\alpha\bar{w}=\bar{x}-\alpha(c\bar{x}+\bar{w})$
$\Large \displaystyle E[\{e-E(e)\}^2]=E[[(x-\alpha y-\beta)-\{(1-\alpha c)\bar{x}-\alpha \bar{w}-\beta \}]^2]$
$\Large \displaystyle =E[\{ x-\alpha(cx+w)-(1-\alpha c)\bar{x}+\alpha \bar{w}\}^2]$
$\Large \displaystyle =E[\{ (1-\alpha c)(x-\bar{x})-\alpha(w-\bar{w}) \}^2]$
$\Large \displaystyle =E[(1-\alpha c)^2(x-\bar{x})^2+\alpha^2(w-\bar{w})^2 -2\alpha(1-\alpha c)(x-\bar{x})(w-\bar{w}) ]$
$\Large \displaystyle =(1-\alpha c)^2E[(x-\bar{x})^2]+\alpha^2E[(w-\bar{w})^2] -2\alpha(1-\alpha c)E[(x-\bar{x})(w-\bar{w})] $
$\Large \displaystyle =(1-\alpha c)^2\sigma_x^2+\alpha^2\sigma_w^2$
$\Large \displaystyle E[\{e-E(e)\}^2]=(1-2\alpha c+\alpha^2 c^2)\sigma_x^2+\alpha^2\sigma_w^2$
$\Large \displaystyle =(c^2 \sigma_x^2 +\sigma_w^2)\alpha^2 - 2c\sigma_x^2 \alpha + \sigma_x^2$
$\Large \displaystyle =(c^2 \sigma_x^2 +\sigma_w^2)[\alpha^2 -\frac{2c\sigma_x^2}{c^2 \sigma_x^2 +\sigma_w^2}\alpha+\frac{\sigma_w^2}{c^2 \sigma_x^2 +\sigma_w^2} ]$
$\Large \displaystyle =(c^2 \sigma_x^2 +\sigma_w^2)(\alpha -\frac{c\sigma_x^2}{c^2 \sigma_x^2 + \sigma_w^2})^2 - \frac{c^2 \sigma_x^4}{c^2 \sigma_x^2 + \sigma_w^2} + \sigma_x^2$
$\Large \displaystyle =(c^2 \sigma_x^2 +\sigma_w^2)(\alpha -\frac{c\sigma_x^2}{c^2 \sigma_x^2 + \sigma_w^2})^2 - \frac{\sigma_x^2 \sigma_w^2}{c^2 \sigma_x^2 + \sigma_w^2}$
$\Large \displaystyle =(c^2 \sigma_x^2 +\sigma_w^2)(\alpha -\frac{c\sigma_w^{-2}}{c^2 \sigma_w^{-2} + \sigma_x^{-2}})^2 - \frac{1}{c^2 \sigma_w^{-2} + \sigma_x^{-2}}$
$\Large \displaystyle \sigma^2=\frac{1}{c^2 \sigma_w^{-2} + \sigma_x^{-2}}$
$\Large \displaystyle E[\{e-E(e)\}^2]=(c^2 \sigma_x^2+\sigma_w^2)(\alpha - c\sigma_w^{-2}\sigma^2)^2+\sigma^2$
$\Large \displaystyle \alpha=c\frac{\sigma^2}{\sigma_w^2}$
$\Large \displaystyle E[\{e-E(e)\}^2]=\sigma^2$
$\Large \displaystyle x(k+1)=Ax(k)+bv(k)$
$\Large \displaystyle y(k)=c^Tx(k)+w(k)$
$\Large \displaystyle x(1)=x(0)+v(0)$
$\Large \displaystyle y(1)=x(1)+w(1)=x(0)+v(0)+w(1)$
$\Large \displaystyle \begin{pmatrix} y(1) \\ x(1) \end{pmatrix} = \begin{pmatrix} 1 & 1 & 1 \\ 0 & 1 & 1 \end{pmatrix} \begin{pmatrix} w(1) \\ v(0) \\ x(0) \end{pmatrix} $
$\Large \displaystyle x(k+1)=Ax(k)+bv(k)$
$\Large \displaystyle y(k)=c^Tx(k)+w(k)$