Update index.mjs
This commit is contained in:
parent
6091b5987f
commit
9cbabf89e2
1 changed files with 80 additions and 15 deletions
95
index.mjs
95
index.mjs
|
@ -1,22 +1,41 @@
|
||||||
import { RecursiveUrlLoader } from "@langchain/community/document_loaders/web/recursive_url";
|
import { RecursiveUrlLoader } from "@langchain/community/document_loaders/web/recursive_url";
|
||||||
import { compile } from "html-to-text";
|
import { compile } from "html-to-text";
|
||||||
|
|
||||||
import { writeFile } from 'fs/promises';
|
import { writeFile, fs } from 'fs/promises';
|
||||||
|
|
||||||
import { exec } from 'child_process';
|
import { exec } from 'child_process';
|
||||||
import { promisify } from 'util';
|
import { promisify } from 'util';
|
||||||
|
|
||||||
|
import readline from 'node:readline';
|
||||||
|
|
||||||
|
import ollama from 'ollama'
|
||||||
|
|
||||||
// Promisify exec for using async/await
|
// Promisify exec for using async/await
|
||||||
const execPromise = promisify(exec);
|
const execPromise = promisify(exec);
|
||||||
|
|
||||||
const modelfilePath = "/Volumes/AGI/agi-mf"
|
const modelfilePath = "/Volumes/AGI/agi-mf"
|
||||||
|
|
||||||
|
const systemSavePath = "/Volumes/AGI/agi-save"
|
||||||
|
|
||||||
const modelID = "sparksammy/agsamantha"
|
const modelID = "sparksammy/agsamantha"
|
||||||
|
|
||||||
const maxDepthCount = 11
|
const maxDepthCount = 11
|
||||||
|
|
||||||
|
function ask(q) {
|
||||||
|
let ans = "";
|
||||||
|
const rl = readline.createInterface({
|
||||||
|
input: process.stdin,
|
||||||
|
output: process.stdout,
|
||||||
|
});
|
||||||
|
rl.question(q, a => {
|
||||||
|
ans = a
|
||||||
|
rl.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
// Function to run the publishing commands
|
// Function to run the publishing commands
|
||||||
async function runPublishCommands() {
|
async function runPublishCommands(sys) {
|
||||||
try {
|
try {
|
||||||
// Execute the 'ollama create' command
|
// Execute the 'ollama create' command
|
||||||
const createCommand = `ollama create ${modelID} -f ${modelfilePath}`;
|
const createCommand = `ollama create ${modelID} -f ${modelfilePath}`;
|
||||||
|
@ -42,13 +61,11 @@ async function runPublishCommands() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const ags_template_part1 = `FROM stable-code
|
const ags_template_part1 = ` # base systems
|
||||||
FROM yi-coder:1.5b
|
FROM deepseek-coder
|
||||||
FROM granite-code:8b
|
|
||||||
FROM dolphin-mistral
|
FROM dolphin-mistral
|
||||||
FROM dolphin-llama3
|
FROM CognitiveComputations/dolphin-llama3.1
|
||||||
FROM dolphin-mixtral
|
FROM llava
|
||||||
FROM knoopx/mobile-vlm:3b-fp16
|
|
||||||
|
|
||||||
# sets the temperature to 1 [higher is more creative, lower is more coherent]
|
# sets the temperature to 1 [higher is more creative, lower is more coherent]
|
||||||
PARAMETER temperature .75
|
PARAMETER temperature .75
|
||||||
|
@ -117,6 +134,18 @@ ${c}
|
||||||
`
|
`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function createUserContextPart(c) {
|
||||||
|
return `
|
||||||
|
|
||||||
|
***USER CHATS FOR EXTRA KNOWLEDGE BEGIN***
|
||||||
|
|
||||||
|
${c}
|
||||||
|
|
||||||
|
***USER CHATS FOR EXTRA KNOWLEDGE END***
|
||||||
|
|
||||||
|
`
|
||||||
|
}
|
||||||
|
|
||||||
function generateSearchTerm(hostname) {
|
function generateSearchTerm(hostname) {
|
||||||
//return `https://search.sparksammy.com/search.php?q=site%3A${encodeURIComponent(String(hostname))}&p=0&t=0`
|
//return `https://search.sparksammy.com/search.php?q=site%3A${encodeURIComponent(String(hostname))}&p=0&t=0`
|
||||||
return `http://${hostname}`
|
return `http://${hostname}`
|
||||||
|
@ -150,22 +179,40 @@ async function contextAdd(hostnames) {
|
||||||
return contexts;
|
return contexts;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let userAGIInput = `
|
||||||
|
|
||||||
|
`;
|
||||||
|
|
||||||
async function generateModelfile(c) {
|
async function generateModelfile(c) {
|
||||||
let ags_modelfile = ags_template_part1;
|
let ags_modelfile = ""
|
||||||
|
try {
|
||||||
|
const ags_modelfile = await fs.readFile(`${systemSavePath}`, { encoding: 'utf8' });
|
||||||
|
} catch (err) {
|
||||||
|
ags_modelfile = ags_template_part1;
|
||||||
|
}
|
||||||
|
|
||||||
for (const item of c) {
|
for (const item of c) {
|
||||||
try {
|
try {
|
||||||
ags_modelfile += createContextPart(`${JSON.stringify(item)}`);
|
ags_modelfile += createContextPart(`${JSON.stringify(item)}`);
|
||||||
} catch {} //very hacky.
|
} catch {} //very hacky.
|
||||||
}
|
}
|
||||||
|
ags_modelfile += createUserContextPart(userAGIInput)
|
||||||
|
await writeFile(systemSavePath, ags_modelfile)
|
||||||
|
.then(() => {
|
||||||
|
console.log('File written successfully!');
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
console.error('Error writing file:', err);
|
||||||
|
});
|
||||||
ags_modelfile += ags_template_finalpart;
|
ags_modelfile += ags_template_finalpart;
|
||||||
return ags_modelfile;
|
return ags_modelfile;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function learner() {
|
||||||
try {
|
try {
|
||||||
await contextAdd(["en.wikipedia.org", "toontownrewritten.wiki", "cnn.com", "rezero.fandom.com", "fategrandorder.fandom.com"]);
|
await contextAdd(["en.wikipedia.org", "toontownrewritten.wiki", "rezero.fandom.com", "fategrandorder.fandom.com"]);
|
||||||
await contextAdd(["clubpenguin.fandom.com", "foxnews.com", "nytimes.com"])
|
await contextAdd(["68k.news"])
|
||||||
await contextAdd(["stackoverflow.com"]);
|
await contextAdd(["old.reddit.com"]);
|
||||||
//await contextAdd(["tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-algebra-i-answer-key.pdf", "tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-english-ii-answer-key.pdf", "tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-biology-answer-key.pdf", "tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-us-history-answer-key.pdf"])
|
//await contextAdd(["tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-algebra-i-answer-key.pdf", "tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-english-ii-answer-key.pdf", "tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-biology-answer-key.pdf", "tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-us-history-answer-key.pdf"])
|
||||||
const modelfile = await generateModelfile(contexts);
|
const modelfile = await generateModelfile(contexts);
|
||||||
console.log(modelfile);
|
console.log(modelfile);
|
||||||
|
@ -186,11 +233,29 @@ async function main() {
|
||||||
|
|
||||||
const delay = ms => new Promise(res => setTimeout(res, ms));
|
const delay = ms => new Promise(res => setTimeout(res, ms));
|
||||||
|
|
||||||
async function mainLoop() {
|
async function learningLoop() {
|
||||||
|
let i = 0
|
||||||
while (true) {
|
while (true) {
|
||||||
await main()
|
await main()
|
||||||
await delay(60000*20) //20 minutes in ms.
|
await delay(60000*20) //20 minutes in ms.
|
||||||
|
i = i+1;
|
||||||
|
return i;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mainLoop()
|
async function talkLoop() {
|
||||||
|
let q = await ask(`To ${modelID}: `)
|
||||||
|
userAGIInput += `USER: ${q}`;
|
||||||
|
let a = await ollama.chat({
|
||||||
|
model: `${modelID}`,
|
||||||
|
messages: [{ role: 'user', content: `${q}` }],
|
||||||
|
})
|
||||||
|
userAGIInput += `AI: ${a}`;
|
||||||
|
console.log(`${a}`)
|
||||||
|
generateModelfile(contexts)
|
||||||
|
return [q, a]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let [learningRound, talkLoopResults] = Promise.allSettled([learningLoop(), talkLoop()])
|
||||||
|
|
Loading…
Reference in a new issue