Very simple neural network
Based on the blog posts:
Here is a very simple version for the Prime. Programming the solution in CAS Python syntax turned out to be more awkward than HP-PPL, so the latter is used.
The Train() and Think() functions are exposed for reuse, but you just need to run the NeuralNetwork function to perform training and test all possible outputs of the sample network.
It is surprisingly slow (about 20s), even on a G2 Prime, so some optimisation in the training phase would be beneficial to allow for the increased number of iterations in the more complex models in the original article.
Code:
EXPORT NeuralNetwork()
BEGIN
LOCAL wghts=Train([[0,0,1],[1,1,1],[1,0,1],[0,1,1]],[[0,1,1,0]],10000);
PRINT();
PRINT("Weights = "+wghts);
PRINT("Think[0,0,0] = "+Think([[0,0,0]],wghts));
PRINT("Think[0,0,1] = "+Think([[0,0,1]],wghts));
PRINT("Think[0,1,0] = "+Think([[0,1,0]],wghts));
PRINT("Think[0,1,1] = "+Think([[0,1,1]],wghts));
PRINT("Think[1,0,0] = "+Think([[1,0,0]],wghts));
PRINT("Think[1,0,1] = "+Think([[1,0,1]],wghts));
PRINT("Think[1,1,0] = "+Think([[1,1,0]],wghts));
PRINT("Think[1,1,1] = "+Think([[1,1,1]],wghts));
RETURN(wghts);
END;
EXPORT Train(trngSetIns,ttrngSetOuts,iterations)
BEGIN
LOCAL trngSetOuts=TRN(ttrngSetOuts);
LOCAL list,output,n,t,u,v;
LOCAL wghts=RANDMAT(3,1,−1,1);
FOR n FROM 1 TO iterations DO
output:=trngSetIns*wghts;
list:=mat2list(output);
list:=1/(1+e^(−list));
output:=list2mat(list,1);
t:=trngSetOuts-output;
// list = output from above
list:=list*(1−list);
u:=list2mat(list,1);
v:=t.*u;
wghts:=wghts+TRN(trngSetIns)*v;
END;
RETURN wghts;
END;
EXPORT Think(m,wghts)
BEGIN
LOCAL list;
list:=mat2list(m*wghts);
list:=1/(1+e^(−list));
RETURN(list2mat(list,1));
END;
Output:
Code:
Weights = [[9.6728],[−0.2082],[−4.6293]]
Think[0,0,0] = [[0.5000]]
Think[0,0,1] = [[0.0097]]
Think[0,1,0] = [[0.4481]]
Think[0,1,1] = [[0.0079]]
Think[1,0,0] = [[0.9999]]
Think[1,0,1] = [[0.9936]]
Think[1,1,0] = [[0.9999]]
Think[1,1,1] = [[0.9921]]
|