Regression
Generate Data
> npts := 50:
> with(stats);
> noise := map(x->.1*x,[stats[random,normald](npts)]):
> inputs := [stats[random,uniform](npts)]:
> targs := map(x->2*x + .5,inputs) + noise:
> with(plots):
> with(linalg):
Warning, new definition for norm
Warning, new definition for trace
> datapts := transpose([inputs,targs]):
> dataplot := pointplot(datapts, color = red):
> display(dataplot);
Network Function
> w := [stats[random,normald](2)] ;
> nout := proc(w,pt) w[1] + pt*w[2];
> end;
> plotNet := proc(w,range,col)
> plot(nout(w,x),x=range[1]..range[2],color=col);
> end;
> display(dataplot,plotNet(w,[min(op(inputs)),max(op(inputs))],green));
> calcSqErr := proc(inputs,targs,w) local totalerrSq, i;
> totalerrSq := 0;
> for i from 1 to nops(inputs) do
>
totalerrSq := totalerrSq +
.5*(targs[i] - nout(w,inputs[i]))^2;
> od;
> end:
> sqErrs := [calcSqErr(inputs,targs,w)];
>
train := proc(inputs,targs,mu)
local pick, i, p, index, err, wold; global w;
> wold := w;
> pick := rand(1..npts);
> for i from 1 to 100 do
> index := pick();
> p := inputs[index]; # pick an input
> index := pick();
> err := targs[index] - nout(w,inputs[index]);
> w[1] := w[1] + mu*err; # bias term
> w[2] := w[2] + mu*err*inputs[index];
> od;
>
display(dataplot,plotNet(w,[min(op(inputs)),max(op(inputs))],green),
dataplot,plotNet(wold,[min(op(inputs)),max(op(inputs))],blue));
> end:
> wold := w:
> train(inputs,targs,.02);
> sqErrs := [op(sqErrs),calcSqErr(inputs,targs,w)];
> print("The old weight:",wold); print(" The new weight:",w);
Plot the error as a function of time.
>
s1 := [seq(i,i=1..nops(sqErrs))];
pointplot(transpose([s1,sqErrs ] ),color=red);
> sqLogErrs :=evalf(map(x->log10(x),sqErrs));
> pointplot(transpose([s1,sqLogErrs ] ),color=red);
>