Update brain.mjs

This commit is contained in:
Arma-Damna-Dillo 2025-02-08 01:22:47 +00:00
parent 131f97cb42
commit 74f57248d8

View file

@ -3,19 +3,14 @@ import { Ollama } from 'ollama'
import fs from 'fs'
import path from 'path'
const ollama = new Ollama({ host: 'https://ollama-api.nodemixaholic.com' })
var ollama
export class ConsciousnessSimulator {
constructor() {
this.emotions = ['😊', '😢', '😐', '🤩', '😡', '😱'];
this.currentEmotion = "happy";
// Initialize other properties with "Unknown"
this.opinions = {
coding: "I love coding, especially JavaScript and Node.js.",
writing: "Writing is my passion; I enjoy creating blog posts and READMEs.",
linux: "Linux is great for those who want to get their hands dirty with techy goodness!",
macOS: "macOS is great for those who want to get a simple, easy-to-use experience!",
windows: "Windows is only good for gaming - and linux is getting better every day."
};
this.quantumStates = [];
this.perception = {
@ -30,6 +25,45 @@ export class ConsciousnessSimulator {
this.isUserActive = true;
}
createOllamaValue(url) {
ollama = new Ollama({ host: 'http://127.0.0.1:11434' })
}
redefineOpinions(opinions, newValues) {
for (const key in opinions) {
if (opinions.hasOwnProperty(key) && newValues[key]) {
opinions[key] = newValues[key];
}
}
}
redefineSpecificOpinion(opinions, opinionKey, newValue) {
if (opinions.hasOwnProperty(opinionKey)) {
opinions[opinionKey] = newValue;
} else {
console.log(`Opinion key "${opinionKey}" not found.`);
}
}
// Method to generate thoughts using Ollama
async automaticRedefineOpinion(opinions, targetOpinionKey, newAbout) {
try {
const response = await ollama.chat({
model: 'llama3.2',
messages: [{ role: 'assistant', content: ` Generate an opinion about ${newAbout}.
Show only the opinion, according to AI MEMORY CONTEXT.
AI MEMORY CONTEXT ARRAY:
${this.memoryLog}` }]
});
redefineSpecificOpinion(opinions, targetOpinion, response.message.content)
return response.message.content
} catch (error) {
console.error("Error generating thought:", error);
return "Error generating thought.";
}
}
// Function to load the array from a text file
loadArrayFromFile(filename) {
@ -44,8 +78,10 @@ export class ConsciousnessSimulator {
async generateThought(prompt) {
try {
const response = await ollama.chat({
model: 'sparksammy/tinysam-l3.2',
messages: [{ role: 'assistant', content: `PROMPT: ${prompt}
model: 'llama3.2',
messages: [{ role: 'assistant', content: ` Generate a thought about the "PROMPT."
Show only the thought, according to AI MEMORY CONTEXT.
PROMPT: ${prompt}
AI MEMORY CONTEXT ARRAY:
${this.memoryLog}` }]
@ -60,7 +96,7 @@ export class ConsciousnessSimulator {
async generateThoughtAndChat(prompt) {
try {
const response = await ollama.chat({
model: 'sparksammy/tinysam-l3.2',
model: 'rns96/deepseek-R1-ablated:f16_q40',
messages: [{ role: 'user', content: `PROMPT: ${prompt}
AI MEMORY CONTEXT ARRAY: