# random inputs with normalized Hebbian rule 
# divide by the weight length
# 
#  integrate and see the weights become anticorrelated
#
#  change the input mean to 2 and watch the weights become 
#  positive and similar since the principle component is now
#  (1,1)
#
#  set the parameter cv=1 -- this turns on covariance learning
#  and subtracts away the mean of the activity  
# the weigts return to (-,+)
#


n1=normal(0,sig1)
n2=normal(0,sig2)
x1=n1+n2+mean
x2=n1-n2+mean
v=w1*x1+w2*x2
vbar=cv*mean*(w1+w2)
w1p=w1+eps*(v-vbar)*x1
w2p=w2+eps*(v-vbar)*x2
r=sqrt(w1p^2+w2p^2)
w1'=w1p/r
w2'=w2p/r
par sig1=.06,sig2=.25
par eps=.02
par mean=0,cv=0
init w1=.1,w2=.2
aux in1=x1
aux in2=x2
@ meth=discrete
@ total=4000
@ nplot=2,yp2=w2,ylo=-1.5,yhi=1.5,xhi=4000
done