changeset 3684:db12bdeaafd4 octave-forge

Modified Files: doc/latex/common/bibliography.tex doc/latex/developers/codingGuideline/codingGuideline.tex doc/pdf/neuralNetworkToolboxForOctaveUsersGuide.pdf Added Files: doc/latex/asymptote/transferFunctions/logsig.asy doc/latex/asymptote/transferFunctions/logsiglogo.asy doc/latex/asymptote/transferFunctions/purelin.asy doc/latex/asymptote/transferFunctions/purelinlogo.asy doc/latex/asymptote/transferFunctions/tansig.asy doc/latex/asymptote/transferFunctions/tansiglogo.asy doc/latex/common/version.tex
author michaelschmid
date Tue, 24 Jul 2007 12:19:48 +0000
parents f21855d2fa4c
children af142a29105b
files main/nnet/doc/latex/asymptote/transferFunctions/logsig.asy main/nnet/doc/latex/asymptote/transferFunctions/logsiglogo.asy main/nnet/doc/latex/asymptote/transferFunctions/purelin.asy main/nnet/doc/latex/asymptote/transferFunctions/purelinlogo.asy main/nnet/doc/latex/asymptote/transferFunctions/tansig.asy main/nnet/doc/latex/asymptote/transferFunctions/tansiglogo.asy main/nnet/doc/latex/common/bibliography.tex main/nnet/doc/latex/common/version.tex main/nnet/doc/latex/developers/codingGuideline/codingGuideline.tex main/nnet/doc/pdf/neuralNetworkToolboxForOctaveUsersGuide.pdf
diffstat 10 files changed, 119 insertions(+), 12 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/main/nnet/doc/latex/asymptote/transferFunctions/logsig.asy	Tue Jul 24 12:19:48 2007 +0000
@@ -0,0 +1,18 @@
+import graph;
+size(100,0);
+
+real f(real x) {return 1/(1+exp(-x));}
+pair F(real x) {return (x,f(x));}
+
+
+xaxis("$n$",EndArrow);
+yaxis("$a$",-1.75,1.75,EndArrow);
+
+draw(graph(f,-2.5,2.5,operator ..));
+draw((-2.5,-1)--(2.5,-1),currentpen+dashed);
+draw((-2.5,1)--(2.5,1),currentpen+dashed);
+
+label("$a = logsig(n) $",(0,-2.00));
+label("$0$",(0.2,-0.3));
+label("$-1$",(0.6,-1.35));
+label("$+1$",(0.75,1.35));
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/main/nnet/doc/latex/asymptote/transferFunctions/logsiglogo.asy	Tue Jul 24 12:19:48 2007 +0000
@@ -0,0 +1,17 @@
+// logsig symbol for nnet
+
+// define size of outer square = 1cm
+unitsize(1cm);
+draw(unitsquare);
+
+// in the middle one short line from left to right
+draw((0.1,0.3)--(0.9,0.3));
+
+// now draw logsig
+import graph;
+
+real f(real x) {return tanh(x);}
+draw(shift(0.5,0.5)*((scale(0.2)*graph(f,-2.0,2.0,operator ..))));
+//shift(2,1);
+
+//scale(real 0.5);
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/main/nnet/doc/latex/asymptote/transferFunctions/purelin.asy	Tue Jul 24 12:19:48 2007 +0000
@@ -0,0 +1,19 @@
+// purelin
+import graph;
+size(100,0);
+
+real f(real x) {return 1*x;}
+pair F(real x) {return (x,f(x));}
+
+
+xaxis("$n$",EndArrow);
+yaxis("$a$",-1.75,1.75,EndArrow);
+
+draw(graph(f,-2.5,2.5,operator ..));
+draw((-2.5,-1)--(2.5,-1),currentpen+dashed);
+draw((-2.5,1)--(2.5,1),currentpen+dashed);
+
+label("$a = purelin(n) $",(0,-2.00));
+label("$0$",(0.2,-0.3));
+label("$-1$",(0.6,-1.35));
+label("$+1$",(0.75,1.35));
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/main/nnet/doc/latex/asymptote/transferFunctions/purelinlogo.asy	Tue Jul 24 12:19:48 2007 +0000
@@ -0,0 +1,17 @@
+// purelin symbol for nnet
+
+// define size of outer square = 1cm
+unitsize(1cm);
+draw(unitsquare);
+
+// in the middle one short line from left to right
+draw((0.1,0.5)--(0.9,0.5));
+
+// now draw purelin
+import graph;
+
+real f(real x) {return 1*x;}
+draw(shift(0.5,0.5)*((scale(0.2)*graph(f,-2.0,2.0,operator ..))));
+//shift(2,1);
+
+//scale(real 0.5);
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/main/nnet/doc/latex/asymptote/transferFunctions/tansig.asy	Tue Jul 24 12:19:48 2007 +0000
@@ -0,0 +1,18 @@
+import graph;
+size(100,0);
+
+real f(real x) {return tanh(x);}
+pair F(real x) {return (x,f(x));}
+
+
+xaxis("$n$",EndArrow);
+yaxis("$a$",-1.75,1.75,EndArrow);
+
+draw(graph(f,-2.5,2.5,operator ..));
+draw((-2.5,-1)--(2.5,-1),currentpen+dashed);
+draw((-2.5,1)--(2.5,1),currentpen+dashed);
+
+label("$a = tansig(n) $",(0,-2.00));
+label("$0$",(0.2,-0.3));
+label("$-1$",(0.6,-1.35));
+label("$+1$",(0.75,1.35));
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/main/nnet/doc/latex/asymptote/transferFunctions/tansiglogo.asy	Tue Jul 24 12:19:48 2007 +0000
@@ -0,0 +1,17 @@
+// tansig symbol for nnet
+
+// define size of outer square = 1cm
+unitsize(1cm);
+draw(unitsquare);
+
+// in the middle one short line from left to right
+draw((0.1,0.5)--(0.9,0.5));
+
+// now draw tansig
+import graph;
+
+real f(real x) {return tanh(x);}
+draw(shift(0.5,0.5)*((scale(0.2)*graph(f,-2.0,2.0,operator ..))));
+//shift(2,1);
+
+//scale(real 0.5);
\ No newline at end of file
--- a/main/nnet/doc/latex/common/bibliography.tex	Mon Jul 23 23:41:31 2007 +0000
+++ b/main/nnet/doc/latex/common/bibliography.tex	Tue Jul 24 12:19:48 2007 +0000
@@ -20,7 +20,7 @@
 
 \bibitem [2]{2} The MathWorks, Inc.
 
-MATLAB Help, MATLAB Version 7.1 (R14SP3), Neural Network Toolbox Version 4.0.6 (R14SP3) 
+MATLAB Online-Help
 
 \bibitem [3]{3} Steven W. Smith
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/main/nnet/doc/latex/common/version.tex	Tue Jul 24 12:19:48 2007 +0000
@@ -0,0 +1,1 @@
+Version: 0.1.3
\ No newline at end of file
--- a/main/nnet/doc/latex/developers/codingGuideline/codingGuideline.tex	Mon Jul 23 23:41:31 2007 +0000
+++ b/main/nnet/doc/latex/developers/codingGuideline/codingGuideline.tex	Tue Jul 24 12:19:48 2007 +0000
@@ -28,24 +28,24 @@
 \end{tabbing}
 
 \subsection{Nn}
-\textbf{Nn} is a cell array and has one entry for each layer. This means in the actual allowed network
-structure, 2 entries.\\
-In \textbf{Nn\{1,1\}} are the values for the first (and only) hidden layer. The size of this matrix depends
+\textbf{Nn} is a cell array and has one entry for each layer. In reality, this will have 2 or 3 layers.\\
+In \textbf{Nn\{1,1\}} are the values for the first hidden layer. The size of this matrix depends
 on the number of neurons used for this layer.\\
-In \textbf{Nn\{2,1\}} are the values for the output layer. The size of this matrix depends
-on the number of neurons used for this layer.\\
+In \textbf{Nn\{2,1\}} are the values for the second hidden layer or the output layer. The size of this matrix depends
+on the number of neurons used for this layer and so on ...\\
+\textbf{Nn\{x,1\}} where \textbf{x} can be $\infty$.\\
 
 \subsection{Aa}
-\textbf{Aa} is a cell array and has one entry for each layer. This means in the actual allowed network
-structure, 2 entries.\\
-In \textbf{Aa\{1,1\}} are the values for the first (and only) hidden layer. The size of this matrix depends
+\textbf{Aa} is a cell array and has one entry for each layer.\\
+In \textbf{Aa\{1,1\}} are the values for the first hidden layer. The size of this matrix depends
 on the number of neurons used for this layer.\\
-In \textbf{Aa\{2,1\}} are the values for the output layer. The size of this matrix depends
+In \textbf{Aa\{2,1\}} are the values for the second hidden layer or the output layer. The size of this matrix depends
 on the number of neurons used for this layer.\\
+See \textbf{Nn} for a more detailed description.\\
 
 \subsection{vE}
-\textbf{vE} is also a cell array which holds (till now) in the second element the error vector. It's not completly clear, why in the second element.\\
-The number of rows depends on the number of output neurons. For one output neuron, \textbf{vE} holds only one row, for 2 output neurons, this holds of course 2 rows. 
+\textbf{vE} is also a cell array which holds in the last (second) element the error vector. It's not completly clear, why in the last (second) element.\\
+The number of rows depends on the number of output neurons. For one output neuron, \textbf{vE} holds only one row, for 2 output neurons, this holds of course 2 rows, and so on. 
 
 \subsection{Jj}
 This is the short term for the Jacobi matrix.
\ No newline at end of file
Binary file main/nnet/doc/pdf/neuralNetworkToolboxForOctaveUsersGuide.pdf has changed