Update index.mjs
This commit is contained in:
parent
6091b5987f
commit
9cbabf89e2
1 changed files with 80 additions and 15 deletions
95
index.mjs
95
index.mjs
|
@ -1,22 +1,41 @@
|
|||
import { RecursiveUrlLoader } from "@langchain/community/document_loaders/web/recursive_url";
|
||||
import { compile } from "html-to-text";
|
||||
|
||||
import { writeFile } from 'fs/promises';
|
||||
import { writeFile, fs } from 'fs/promises';
|
||||
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
|
||||
import readline from 'node:readline';
|
||||
|
||||
import ollama from 'ollama'
|
||||
|
||||
// Promisify exec for using async/await
|
||||
const execPromise = promisify(exec);
|
||||
|
||||
const modelfilePath = "/Volumes/AGI/agi-mf"
|
||||
|
||||
const systemSavePath = "/Volumes/AGI/agi-save"
|
||||
|
||||
const modelID = "sparksammy/agsamantha"
|
||||
|
||||
const maxDepthCount = 11
|
||||
|
||||
function ask(q) {
|
||||
let ans = "";
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
rl.question(q, a => {
|
||||
ans = a
|
||||
rl.close();
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
// Function to run the publishing commands
|
||||
async function runPublishCommands() {
|
||||
async function runPublishCommands(sys) {
|
||||
try {
|
||||
// Execute the 'ollama create' command
|
||||
const createCommand = `ollama create ${modelID} -f ${modelfilePath}`;
|
||||
|
@ -42,13 +61,11 @@ async function runPublishCommands() {
|
|||
}
|
||||
|
||||
|
||||
const ags_template_part1 = `FROM stable-code
|
||||
FROM yi-coder:1.5b
|
||||
FROM granite-code:8b
|
||||
const ags_template_part1 = ` # base systems
|
||||
FROM deepseek-coder
|
||||
FROM dolphin-mistral
|
||||
FROM dolphin-llama3
|
||||
FROM dolphin-mixtral
|
||||
FROM knoopx/mobile-vlm:3b-fp16
|
||||
FROM CognitiveComputations/dolphin-llama3.1
|
||||
FROM llava
|
||||
|
||||
# sets the temperature to 1 [higher is more creative, lower is more coherent]
|
||||
PARAMETER temperature .75
|
||||
|
@ -117,6 +134,18 @@ ${c}
|
|||
`
|
||||
}
|
||||
|
||||
function createUserContextPart(c) {
|
||||
return `
|
||||
|
||||
***USER CHATS FOR EXTRA KNOWLEDGE BEGIN***
|
||||
|
||||
${c}
|
||||
|
||||
***USER CHATS FOR EXTRA KNOWLEDGE END***
|
||||
|
||||
`
|
||||
}
|
||||
|
||||
function generateSearchTerm(hostname) {
|
||||
//return `https://search.sparksammy.com/search.php?q=site%3A${encodeURIComponent(String(hostname))}&p=0&t=0`
|
||||
return `http://${hostname}`
|
||||
|
@ -150,22 +179,40 @@ async function contextAdd(hostnames) {
|
|||
return contexts;
|
||||
}
|
||||
|
||||
let userAGIInput = `
|
||||
|
||||
`;
|
||||
|
||||
async function generateModelfile(c) {
|
||||
let ags_modelfile = ags_template_part1;
|
||||
let ags_modelfile = ""
|
||||
try {
|
||||
const ags_modelfile = await fs.readFile(`${systemSavePath}`, { encoding: 'utf8' });
|
||||
} catch (err) {
|
||||
ags_modelfile = ags_template_part1;
|
||||
}
|
||||
|
||||
for (const item of c) {
|
||||
try {
|
||||
ags_modelfile += createContextPart(`${JSON.stringify(item)}`);
|
||||
} catch {} //very hacky.
|
||||
}
|
||||
ags_modelfile += createUserContextPart(userAGIInput)
|
||||
await writeFile(systemSavePath, ags_modelfile)
|
||||
.then(() => {
|
||||
console.log('File written successfully!');
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('Error writing file:', err);
|
||||
});
|
||||
ags_modelfile += ags_template_finalpart;
|
||||
return ags_modelfile;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
async function learner() {
|
||||
try {
|
||||
await contextAdd(["en.wikipedia.org", "toontownrewritten.wiki", "cnn.com", "rezero.fandom.com", "fategrandorder.fandom.com"]);
|
||||
await contextAdd(["clubpenguin.fandom.com", "foxnews.com", "nytimes.com"])
|
||||
await contextAdd(["stackoverflow.com"]);
|
||||
await contextAdd(["en.wikipedia.org", "toontownrewritten.wiki", "rezero.fandom.com", "fategrandorder.fandom.com"]);
|
||||
await contextAdd(["68k.news"])
|
||||
await contextAdd(["old.reddit.com"]);
|
||||
//await contextAdd(["tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-algebra-i-answer-key.pdf", "tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-english-ii-answer-key.pdf", "tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-biology-answer-key.pdf", "tea.texas.gov/student-assessment/staar/released-test-questions/2024-staar-us-history-answer-key.pdf"])
|
||||
const modelfile = await generateModelfile(contexts);
|
||||
console.log(modelfile);
|
||||
|
@ -186,11 +233,29 @@ async function main() {
|
|||
|
||||
const delay = ms => new Promise(res => setTimeout(res, ms));
|
||||
|
||||
async function mainLoop() {
|
||||
async function learningLoop() {
|
||||
let i = 0
|
||||
while (true) {
|
||||
await main()
|
||||
await delay(60000*20) //20 minutes in ms.
|
||||
i = i+1;
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
mainLoop()
|
||||
async function talkLoop() {
|
||||
let q = await ask(`To ${modelID}: `)
|
||||
userAGIInput += `USER: ${q}`;
|
||||
let a = await ollama.chat({
|
||||
model: `${modelID}`,
|
||||
messages: [{ role: 'user', content: `${q}` }],
|
||||
})
|
||||
userAGIInput += `AI: ${a}`;
|
||||
console.log(`${a}`)
|
||||
generateModelfile(contexts)
|
||||
return [q, a]
|
||||
}
|
||||
|
||||
|
||||
|
||||
let [learningRound, talkLoopResults] = Promise.allSettled([learningLoop(), talkLoop()])
|
||||
|
|
Loading…
Reference in a new issue