Postingan

Menampilkan postingan dari Mei, 2017

listing bab 13

LAMSTAR CODE ( MATLAB ) Main.m clear all close all X = train_pattern; %pause(1) %close all n = 12 % Number of subwords flag = zeros(1,n); % To make 12 subwords from 1 input for i = 1:min(size(X)), X_r{i} = reshape(X(:,i),6,6); for j = 1:n, if (j<=6), X_in{i}(j,:) = X_r{i}(:,j)’; else X_in{i}(j,:) = X_r{i}(j-6,:); end end % To check if a subword is all ’0’s and makes it normalized value equal to zero % and to normalize all other input subwords p(1,:) = zeros(1,6); for k = 1:n, for t = 1:6, if (X_in{i}(k,t)~= p(1,t)), X_norm{i}(k,:) = X_in{i}(k,:)/sqrt(sum(X_in{i}(k,:).^2)); else X_norm{i}(k,:) = zeros(1,6); end end end end%%%End of for %%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Dynamic Building of neurons %%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Building of the first neuron is done as Kohonen Layer neuron %(this is for all the subwords in the first input pattern for all SOM modules i = 1; ct = 1; while (i<=n), i cl = 0; for t = 1:6, if (X_norm{ct}(i,t)==...

listing bab 6

/* */ #include<math.h> #include<iostream> #include<fstream> using namespace std; #define N_DATASETS 9 #define N_INPUTS 36 #define N_OUTPUTS 2 #define N_LAYERS 3 // {# inputs, # of neurons in L1, # of neurons in L2, # of neurons in L3}short conf[4] = {N_INPUTS, 2, 2, N_OUTPUTS}; float **w[3], *z[3], *y[3], *Fi[3], eta; // According to the number of layers ofstream ErrorFile("error.txt", ios::out); // 3 training sets bool dataset[N_DATASETS][N_INPUTS] = { { 0, 0, 1, 1, 0, 0, // ‘A’ 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1}, { 1, 1, 1, 1, 1, 0, // ‘B’ 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0}, { 0, 1, 1, 1, 1, 1, // ‘C’ 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1}, { 1, 1, 1, 1, 1, 0, // ‘D’ 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0}, {...

listing bab 5

% Training Patterns X = train_pattern; nu = 0.04; % Displaying the 15 training patterns figure(1) for i = 1:15, subplot(5,3,i) display_image(X(:,i),6,6,1); end % Testing Patterns Y = test_pattern; nu = 0.04; % Displaying the 15 testing patterns figure(2) for i = 1:15, subplot(5,3,i) display_image(Y(:,i),6,6,1); end % Initializations index = zeros(2,6); counter1 = 0; counter2 = 0; % Assign random weights initially at the start of training w_hidden = (rand(6,36)-0.5)*2 w_output = (rand(2,6)-0.5)*2 %load w_hidden.mat %load w_output.mat % Function to calculate the parameters (z,y at the hidden and output layers given the weights at the two layers) [z_hidden, w_hidden, y_hidden, z_output, w_output, y_output, counter] =calculation(w_hidden, w_output, X); disp( 'Before Any Changes' ) w_output z_output y_output save z_output z_output ; save z_hidden z_hidden ; save y_hidden y_hidden ; save y_output ...