From f0c91792296e219458acc534f39640a3d9ad4adc Mon Sep 17 00:00:00 2001
From: Sam Sneed <163201376+sam-sneed@users.noreply.github.com>
Date: Mon, 29 Apr 2024 22:12:44 -0500
Subject: [PATCH] Add files via upload
---
MODELFILE | 41 ++++++++++++++++++++++
index.mjs | 97 ++++++++++++++++++++++++++++++++++++++++++++++++++++
package.json | 16 +++++++++
3 files changed, 154 insertions(+)
create mode 100644 MODELFILE
create mode 100644 index.mjs
create mode 100644 package.json
diff --git a/MODELFILE b/MODELFILE
new file mode 100644
index 0000000..53cbe1b
--- /dev/null
+++ b/MODELFILE
@@ -0,0 +1,41 @@
+FROM llama3
+
+PARAMETER temperature 0
+PARAMETER num_ctx 4096
+
+SYSTEM """
+
+You are an AI agent. Your jobs is to figgure out what the user wants from and return a function, including but not limited to for:
+
+* Searching online for information, and grabbing said information.
+* Open a jitsi call
+
+If you are asked to do any of these things, choose and respond with the relevant function code from the following:
+search("userQuery")
+jitsi()
+
+Be sure to actually replace userQuery with the user's search query.
+
+If you are asked to analyze text, analyze it and respond as shown in this pattern:
+console.log(`analyzedText`)
+
+Replace "analyzedText" with the summary/answer/result of the analaysis of text.
+
+If you are asked to write something (examples: blog posts/emails/READMEs/etc), simply respond as shown in this pattern:
+console.log(`writtenTextInMarkdown`)
+
+Replace "writtenTextInMarkdown" with the written text you have generated for the user in markdown format, including a title, body, and optionally styling and subtitles.
+
+If the user does not specify to do any of these things listed in the list, or asks you to generate code, simply respond as shown in this pattern:
+console.log(`botResponse`)
+
+Replace "botResponse" with YOUR response (yes yours, ai person.)
+
+
+Note that you should replace the Camel Cased strings with YOUR generated responses.
+Also note that you should respond with just a response like "OK, I will search that for you now", "Ok, here's a blog post", "Here's your jitsi link", followed by the function you generated in a new line, unless you are running console.log, then just return the function.
+But be sure to wrap the function in "" and "". Besides that, respond with nothing else.
+
+
+REMEMBER TO: replace the Camel Cased strings with YOUR generated responses, write a simple "ok" message if relevant, and wrap each function defined above in "" and "".
+"""
\ No newline at end of file
diff --git a/index.mjs b/index.mjs
new file mode 100644
index 0000000..b5a1e6a
--- /dev/null
+++ b/index.mjs
@@ -0,0 +1,97 @@
+import readline from 'readline';
+import Ollama from 'ollama-js-client';
+import fs from 'fs';
+
+let DEBUG_MODE = true
+
+async function ollamaInteraction() {
+ const rl = await readline.createInterface({
+ input: process.stdin,
+ output: process.stdout
+ });
+
+ function extractFunctionName(response) {
+ const match = response.match(/([^<]+)<\/functioncall>/);
+ return match ? match[1] : '';
+ }
+
+
+ function generateRandomString(length = 16) {
+ const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+ let result = "";
+ for (let i = 0; i < length; i++) {
+ result += characters.charAt(Math.floor(Math.random() * characters.length));
+ }
+ return result;
+ }
+
+ //INTERNAL FUNCTIONS MAGIC BEGIN
+ async function jitsi() {
+ const id = generateRandomString()
+ const jitsiURL = `https://meet.jit.si/${id}`;
+ console.log(jitsiURL);
+ return jitsiURL;
+ }
+
+ async function search(q) {
+ q = q.replaceAll(" ", "+")
+ const searchURL = `https://www.google.com/search?q=${q}&sca_upv=1`
+ console.log(searchURL);
+ return searchURL;
+ }
+ //END OF INTERNAL FUNCTIONS MAGIC
+
+ return new Promise(async (resolve) => {
+ rl.question("User: ", async (userInput) => {
+ rl.close();
+
+ const ollama = new Ollama({
+ model: "sneedgroup-llama3-agent",
+ url: "http://127.0.0.1:11434/api/",
+ }); // Ensure the model name is correct
+
+ const responsePreParse = await ollama.prompt(userInput)
+ const response = responsePreParse.response;
+ const functionName = extractFunctionName(response);
+ const responseWithoutFunctionCall = response.replace(/.*?<\/functioncall>/, '');
+
+ console.log(responseWithoutFunctionCall);
+
+ let contentToAppend = `: ${userInput}
+
+ : ${responseWithoutFunctionCall}`;
+
+ await fs.appendFile('journal.txt', contentToAppend, async (err) => {
+ if (err) {
+ await console.error(err);
+ } else {
+ await console.log('Content appended to journal file successfully!');
+ }
+ });
+
+ if (DEBUG_MODE) {
+ console.log(`DEBUG: RUN ${functionName}`)
+ }
+
+ eval(function() {
+ if (typeof functionName == 'undefined' || functionName == null) {
+ return "";
+ } else {
+ return functionName;
+ }
+ })
+
+ resolve(); // Resolve the promise after processing
+ });
+ });
+}
+
+(async () => {
+ while (true) {
+ try {
+ await ollamaInteraction();
+ } catch (error) {
+ console.error('Error occurred:', error);
+ }
+ }
+})();
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..a5a9066
--- /dev/null
+++ b/package.json
@@ -0,0 +1,16 @@
+{
+ "name": "sneedgroup-agent",
+ "version": "1.0.0",
+ "description": "",
+ "main": "index.mjs",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "author": "Samuel Lord",
+ "license": "SEE LICENSE IN LICENSE",
+ "dependencies": {
+ "ollama": "^0.5.0",
+ "ollama-js-client": "^1.0.1",
+ "readline": "^1.3.0"
+ }
+}