class NeuralNetwork { constructor(inputNodes, hiddenNodes, outputNodes) { this.inputNodes = inputNodes; this.hiddenNodes = hiddenNodes; this.outputNodes = outputNodes; // Initialize weights with random values between -1 and 1 this.weights_ih = this.initializeWeights(this.hiddenNodes, this.inputNodes); this.weights_ho = this.initializeWeights(this.outputNodes, this.hiddenNodes); // Biases this.bias_h = this.initializeBiases(this.hiddenNodes); this.bias_o = this.initializeBiases(this.outputNodes); // Learning rate this.learningRate = 0.1; } // Helper method to initialize weights initializeWeights(rows, cols) { return Array(rows).fill().map(() => Array(cols).fill().map(() => Math.random() * 2 - 1)); } // Helper method to initialize biases initializeBiases(size) { return Array(size).fill().map(() => Math.random() * 2 - 1); } // Sigmoid activation function sigmoid(x) { return 1 / (1 + Math.exp(-x)); } // Sigmoid derivative (used for backpropagation) sigmoidDerivative(x) { return x * (1 - x); } // Feedforward function feedforward(inputArray) { // Calculate hidden layer outputs let hidden = this.weights_ih.map((row, i) => this.sigmoid(row.reduce((sum, weight, j) => sum + weight * inputArray[j], 0) + this.bias_h[i]) ); // Calculate output layer outputs let output = this.weights_ho.map((row, i) => this.sigmoid(row.reduce((sum, weight, j) => sum + weight * hidden[j], 0) + this.bias_o[i]) ); return output; } // Train the network train(inputArray, targetArray) { // Feedforward let hidden = this.weights_ih.map((row, i) => this.sigmoid(row.reduce((sum, weight, j) => sum + weight * inputArray[j], 0) + this.bias_h[i]) ); let outputs = this.weights_ho.map((row, i) => this.sigmoid(row.reduce((sum, weight, j) => sum + weight * hidden[j], 0) + this.bias_o[i]) ); // Calculate the output errors let outputErrors = targetArray.map((target, i) => target - outputs[i]); // Calculate output gradients let outputGradients = outputs.map((output, i) => this.sigmoidDerivative(output) * outputErrors[i] * this.learningRate ); // Calculate hidden errors let hiddenErrors = this.weights_ho[0].map((_, i) => this.weights_ho.reduce((sum, row) => sum + row[i] * outputErrors[0], 0) ); // Calculate hidden gradients let hiddenGradients = hidden.map((h, i) => this.sigmoidDerivative(h) * hiddenErrors[i] * this.learningRate ); // Update weights and biases this.weights_ho = this.weights_ho.map((row, i) => row.map((weight, j) => weight + outputGradients[i] * hidden[j]) ); this.weights_ih = this.weights_ih.map((row, i) => row.map((weight, j) => weight + hiddenGradients[i] * inputArray[j]) ); this.bias_o = this.bias_o.map((bias, i) => bias + outputGradients[i]); this.bias_h = this.bias_h.map((bias, i) => bias + hiddenGradients[i]); } } // Example usage: let nn = new NeuralNetwork(2, 4, 1); // Increased hidden nodes to 4 // Example dataset (XOR problem) let inputs = [ [0, 0], [0, 1], [1, 0], [1, 1] ]; let targets = [ [0], [1], [1], [0] ]; // Training the neural network for (let i = 0; i < 50000; i++) { // Increased training iterations let index = Math.floor(Math.random() * 4); nn.train(inputs[index], targets[index]); } // Testing the neural network console.log(nn.feedforward([0, 0])); console.log(nn.feedforward([0, 1])); console.log(nn.feedforward([1, 0])); console.log(nn.feedforward([1, 1]));