Upload files to "/"
This commit is contained in:
parent
4d6df73fac
commit
13e2a4b8ee
2 changed files with 254 additions and 0 deletions
132
sam-neurons-words.js
Normal file
132
sam-neurons-words.js
Normal file
|
@ -0,0 +1,132 @@
|
|||
class NeuralNetwork {
|
||||
constructor(inputNodes, hiddenNodes, outputNodes) {
|
||||
this.inputNodes = inputNodes;
|
||||
this.hiddenNodes = hiddenNodes;
|
||||
this.outputNodes = outputNodes;
|
||||
|
||||
// Initialize weights with random values between -1 and 1
|
||||
this.weights_ih = this.initializeWeights(this.hiddenNodes, this.inputNodes);
|
||||
this.weights_ho = this.initializeWeights(this.outputNodes, this.hiddenNodes);
|
||||
|
||||
// Biases
|
||||
this.bias_h = this.initializeBiases(this.hiddenNodes);
|
||||
this.bias_o = this.initializeBiases(this.outputNodes);
|
||||
|
||||
// Learning rate
|
||||
this.learningRate = 0.1;
|
||||
}
|
||||
|
||||
// Helper method to initialize weights
|
||||
initializeWeights(rows, cols) {
|
||||
return Array(rows).fill().map(() => Array(cols).fill().map(() => Math.random() * 2 - 1));
|
||||
}
|
||||
|
||||
// Helper method to initialize biases
|
||||
initializeBiases(size) {
|
||||
return Array(size).fill().map(() => Math.random() * 2 - 1);
|
||||
}
|
||||
|
||||
// Sigmoid activation function
|
||||
sigmoid(x) {
|
||||
return 1 / (1 + Math.exp(-x));
|
||||
}
|
||||
|
||||
// Sigmoid derivative (used for backpropagation)
|
||||
sigmoidDerivative(x) {
|
||||
return x * (1 - x);
|
||||
}
|
||||
|
||||
// Feedforward function
|
||||
feedforward(inputArray) {
|
||||
// Calculate hidden layer outputs
|
||||
let hidden = this.weights_ih.map((row, i) =>
|
||||
this.sigmoid(row.reduce((sum, weight, j) => sum + weight * inputArray[j], 0) + this.bias_h[i])
|
||||
);
|
||||
|
||||
// Calculate output layer outputs
|
||||
let output = this.weights_ho.map((row, i) =>
|
||||
this.sigmoid(row.reduce((sum, weight, j) => sum + weight * hidden[j], 0) + this.bias_o[i])
|
||||
);
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
// Train the network
|
||||
train(inputArray, targetArray) {
|
||||
// Feedforward
|
||||
let hidden = this.weights_ih.map((row, i) =>
|
||||
this.sigmoid(row.reduce((sum, weight, j) => sum + weight * inputArray[j], 0) + this.bias_h[i])
|
||||
);
|
||||
let outputs = this.weights_ho.map((row, i) =>
|
||||
this.sigmoid(row.reduce((sum, weight, j) => sum + weight * hidden[j], 0) + this.bias_o[i])
|
||||
);
|
||||
|
||||
// Calculate the output errors
|
||||
let outputErrors = targetArray.map((target, i) => target - outputs[i]);
|
||||
|
||||
// Calculate output gradients
|
||||
let outputGradients = outputs.map((output, i) =>
|
||||
this.sigmoidDerivative(output) * outputErrors[i] * this.learningRate
|
||||
);
|
||||
|
||||
// Calculate hidden errors
|
||||
let hiddenErrors = this.weights_ho[0].map((_, i) =>
|
||||
this.weights_ho.reduce((sum, row) => sum + row[i] * outputErrors[0], 0)
|
||||
);
|
||||
|
||||
// Calculate hidden gradients
|
||||
let hiddenGradients = hidden.map((h, i) =>
|
||||
this.sigmoidDerivative(h) * hiddenErrors[i] * this.learningRate
|
||||
);
|
||||
|
||||
// Update weights and biases
|
||||
this.weights_ho = this.weights_ho.map((row, i) =>
|
||||
row.map((weight, j) => weight + outputGradients[i] * hidden[j])
|
||||
);
|
||||
this.weights_ih = this.weights_ih.map((row, i) =>
|
||||
row.map((weight, j) => weight + hiddenGradients[i] * inputArray[j])
|
||||
);
|
||||
this.bias_o = this.bias_o.map((bias, i) => bias + outputGradients[i]);
|
||||
this.bias_h = this.bias_h.map((bias, i) => bias + hiddenGradients[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Example usage:
|
||||
let nn = new NeuralNetwork(3, 4, 1); // Increased hidden nodes to 4
|
||||
|
||||
// Example dataset (XOR problem)
|
||||
let strings = ["hello", "neural", "world"]
|
||||
|
||||
let inputs = [
|
||||
[0, 0, 0],
|
||||
[1, 0, 0],
|
||||
[1, 0, 1],
|
||||
[1, 1, 1]
|
||||
];
|
||||
let targets = [
|
||||
[0],
|
||||
[1],
|
||||
[2],
|
||||
[3]
|
||||
];
|
||||
|
||||
// Training the neural network
|
||||
for (let i = 0; i < 50000; i++) { // Increased training iterations
|
||||
let index = Math.floor(Math.random() * 4);
|
||||
nn.train(inputs[index], targets[index]);
|
||||
}
|
||||
|
||||
// Testing the neural network
|
||||
let feeds = inputs;
|
||||
|
||||
|
||||
feeds.forEach((item, index) => {
|
||||
let feededVal = Math.round(nn.feedforward(item))
|
||||
console.log([feededVal, item])
|
||||
if (Math.round(feededVal) == 1) {
|
||||
let sum = 0;
|
||||
item.forEach(num => sum += num);
|
||||
try { console.log(strings[sum - 1]) } catch {}
|
||||
}
|
||||
|
||||
})
|
122
sam-neurons.js
Normal file
122
sam-neurons.js
Normal file
|
@ -0,0 +1,122 @@
|
|||
class NeuralNetwork {
|
||||
constructor(inputNodes, hiddenNodes, outputNodes) {
|
||||
this.inputNodes = inputNodes;
|
||||
this.hiddenNodes = hiddenNodes;
|
||||
this.outputNodes = outputNodes;
|
||||
|
||||
// Initialize weights with random values between -1 and 1
|
||||
this.weights_ih = this.initializeWeights(this.hiddenNodes, this.inputNodes);
|
||||
this.weights_ho = this.initializeWeights(this.outputNodes, this.hiddenNodes);
|
||||
|
||||
// Biases
|
||||
this.bias_h = this.initializeBiases(this.hiddenNodes);
|
||||
this.bias_o = this.initializeBiases(this.outputNodes);
|
||||
|
||||
// Learning rate
|
||||
this.learningRate = 0.1;
|
||||
}
|
||||
|
||||
// Helper method to initialize weights
|
||||
initializeWeights(rows, cols) {
|
||||
return Array(rows).fill().map(() => Array(cols).fill().map(() => Math.random() * 2 - 1));
|
||||
}
|
||||
|
||||
// Helper method to initialize biases
|
||||
initializeBiases(size) {
|
||||
return Array(size).fill().map(() => Math.random() * 2 - 1);
|
||||
}
|
||||
|
||||
// Sigmoid activation function
|
||||
sigmoid(x) {
|
||||
return 1 / (1 + Math.exp(-x));
|
||||
}
|
||||
|
||||
// Sigmoid derivative (used for backpropagation)
|
||||
sigmoidDerivative(x) {
|
||||
return x * (1 - x);
|
||||
}
|
||||
|
||||
// Feedforward function
|
||||
feedforward(inputArray) {
|
||||
// Calculate hidden layer outputs
|
||||
let hidden = this.weights_ih.map((row, i) =>
|
||||
this.sigmoid(row.reduce((sum, weight, j) => sum + weight * inputArray[j], 0) + this.bias_h[i])
|
||||
);
|
||||
|
||||
// Calculate output layer outputs
|
||||
let output = this.weights_ho.map((row, i) =>
|
||||
this.sigmoid(row.reduce((sum, weight, j) => sum + weight * hidden[j], 0) + this.bias_o[i])
|
||||
);
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
// Train the network
|
||||
train(inputArray, targetArray) {
|
||||
// Feedforward
|
||||
let hidden = this.weights_ih.map((row, i) =>
|
||||
this.sigmoid(row.reduce((sum, weight, j) => sum + weight * inputArray[j], 0) + this.bias_h[i])
|
||||
);
|
||||
let outputs = this.weights_ho.map((row, i) =>
|
||||
this.sigmoid(row.reduce((sum, weight, j) => sum + weight * hidden[j], 0) + this.bias_o[i])
|
||||
);
|
||||
|
||||
// Calculate the output errors
|
||||
let outputErrors = targetArray.map((target, i) => target - outputs[i]);
|
||||
|
||||
// Calculate output gradients
|
||||
let outputGradients = outputs.map((output, i) =>
|
||||
this.sigmoidDerivative(output) * outputErrors[i] * this.learningRate
|
||||
);
|
||||
|
||||
// Calculate hidden errors
|
||||
let hiddenErrors = this.weights_ho[0].map((_, i) =>
|
||||
this.weights_ho.reduce((sum, row) => sum + row[i] * outputErrors[0], 0)
|
||||
);
|
||||
|
||||
// Calculate hidden gradients
|
||||
let hiddenGradients = hidden.map((h, i) =>
|
||||
this.sigmoidDerivative(h) * hiddenErrors[i] * this.learningRate
|
||||
);
|
||||
|
||||
// Update weights and biases
|
||||
this.weights_ho = this.weights_ho.map((row, i) =>
|
||||
row.map((weight, j) => weight + outputGradients[i] * hidden[j])
|
||||
);
|
||||
this.weights_ih = this.weights_ih.map((row, i) =>
|
||||
row.map((weight, j) => weight + hiddenGradients[i] * inputArray[j])
|
||||
);
|
||||
this.bias_o = this.bias_o.map((bias, i) => bias + outputGradients[i]);
|
||||
this.bias_h = this.bias_h.map((bias, i) => bias + hiddenGradients[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Example usage:
|
||||
let nn = new NeuralNetwork(2, 4, 1); // Increased hidden nodes to 4
|
||||
|
||||
// Example dataset (XOR problem)
|
||||
let inputs = [
|
||||
[0, 0],
|
||||
[0, 1],
|
||||
[1, 0],
|
||||
[1, 1]
|
||||
];
|
||||
let targets = [
|
||||
[0],
|
||||
[1],
|
||||
[1],
|
||||
[0]
|
||||
];
|
||||
|
||||
// Training the neural network
|
||||
for (let i = 0; i < 50000; i++) { // Increased training iterations
|
||||
let index = Math.floor(Math.random() * 4);
|
||||
nn.train(inputs[index], targets[index]);
|
||||
}
|
||||
|
||||
// Testing the neural network
|
||||
console.log(nn.feedforward([0, 0]));
|
||||
console.log(nn.feedforward([0, 1]));
|
||||
console.log(nn.feedforward([1, 0]));
|
||||
console.log(nn.feedforward([1, 1]));
|
||||
|
Loading…
Reference in a new issue