@conference {48835, title = {Learning visual motion in recurrent neural networks.}, booktitle = {Neural Information Processing Systems (NIPS 2012)}, year = {2012}, month = {12/2003}, address = {Lake Tahoe}, abstract = {

We present a dynamic nonlinear generative model for visual motion based on a latent representation of binary-gated Gaussian variables. Trained on sequences of images, the model learns to represent different movement directions in different variables. We use an online approximate-inference scheme that can be mapped to the dynamics of networks of neurons. Probed with drifting grating stimuli and moving bars of light, neurons in the model show patterns of responses analogous to those of direction-selective simple cells in primary visual cortex. Most model neurons also show speed tuning and respond equally well to a range of motion directions and speeds aligned to the constraint line of their respective preferred speed. We show how these computations are enabled by a specific pattern of recurrent connections learned by the model.

}, url = {http://papers.nips.cc/paper/4814-learning-visual-motion-in-recurrent-neural-networks}, author = {Pachitariu, Marius and Sahani, Maneesh} }