@misc{Vardanyan_Edgar_A._Analyzing, author={Vardanyan, Edgar A.}, howpublished={online}, publisher={Изд-во НАН РА}, abstract={Hebbian learning, an important concept in neural networks, is the basis for various learning algorithms that model the adaptation of neural connections, also known as synapses. Among these models, Oja’s rule stands out as an important example, giving valuable insights into the dynamics of unsupervised learning algorithms. The fact that the final steady-state solution of a single-layer network that learns using Oja’s rule equals the solution of Principal component analysis is well known. However, the way in which the learning rate can affect the variance of the final parameters is less explored. In this paper, we investigate how different learning rates can influence the variance of parameters in Oja’s rule, utilizing the moment closure approximation. By focusing on the variance, we offer new perspectives on the behavior of Oja’s rule under varying conditions. We derive a closed-form equation that connects the parameter variance with the learning rate and shows that the relationship between these is linear. This gives valuable insights that may help to optimize the learning process of Hebbian models.}, type={Հոդված}, title={Analyzing steady state Variance in Hebbian Learning: A Moment Closure Approach}, keywords={Mathematical cybernetics, Computer science}, }