llm chat | | general chit chat | Search

The storeResponse function stores user interactions, analyzing the content, emotions, and context, and returns an object containing metadata about the interaction. It selects a model, checks for existing conversation files, and updates the conversation data in memory and the file system, generating a summary and keywords for the interaction.

Run example

npm run import -- "store llm response"

store llm response

const path = require('path')
const fs = require('fs')
const selectModel = importer.import("select llm")
const {askLlamaAboutEmotions} = importer.import("ask llm about emotions")
const {ACTIVE_CONVERSATIONS, PROJECT_PATH, DEFAULT_MODEL} = importer.import("general chit chat")


async function storeResponse(user, session, content, context, otr) {
  let promptModel = await selectModel(DEFAULT_MODEL)

  if(!session) {
    return {
      emotions: await askLlamaAboutEmotions(content)
    }
  }

  let now = new Date()
  let convoFile = path.join(PROJECT_PATH, now.getFullYear() + '-' 
    + String(now.getMonth() + 1).padStart(2, '0') 
    + '-' + DEFAULT_MODEL
    + '-' + session + '.json')
  if(typeof ACTIVE_CONVERSATIONS[convoFile] == 'undefined') {
    if(fs.existsSync(convoFile)) {
      ACTIVE_CONVERSATIONS[convoFile] = JSON.parse(fs.readFileSync(convoFile))
    } else {
      ACTIVE_CONVERSATIONS[convoFile] = {}
    }
  }

  let contextContainsImage = false
  if(context && context.startsWith('data:image/png;base64,')) {
    contextContainsImage = true
  }

  let summary
  if(!otr) {
    summary = await promptModel('Summerize this prompt in one short sentence:\n' 
      + content + '\nOnly respond with the summary, no pleasantries.')
  }
  let keywords = await promptModel('List a few key words that categorize this prompt:\n' 
    + content + '\nOnly respond with a single category, no pleasantries.')
  let emotions = await askLlamaAboutEmotions(content)

  let result = ACTIVE_CONVERSATIONS[convoFile][Date.now()] = {
    user: user,
    content: otr ? void 0 : content,
    context: contextContainsImage ? void 0 : context,
    summary: summary,
    keywords: keywords,
    emotions: emotions,
    otr: otr ? true : false,
  }
  fs.writeFileSync(convoFile, JSON.stringify(ACTIVE_CONVERSATIONS[convoFile], null, 4))

  return result
}


module.exports = storeResponse

What the code could have been:

const path = require('path');
const fs = require('fs');
const selectModel = importer.import('select llm');
const {
  ACTIVE_CONVERSATIONS,
  PROJECT_PATH,
  DEFAULT_MODEL,
} = importer.import('general chit chat');
const { askLlamaAboutEmotions } = importer.import('ask llm about emotions');

/**
 * Stores a response to a user's prompt.
 * @param {string} user - The user who made the request.
 * @param {string} session - The session ID of the user.
 * @param {string} content - The prompt made by the user.
 * @param {string} context - The context of the prompt.
 * @param {boolean} otr - Whether the response is an OTR (Off-The-Record) response.
 * @returns {Promise} The stored response.
 */
async function storeResponse(user, session, content, context, otr) {
  const promptModel = await selectModel(DEFAULT_MODEL);

  if (!session) {
    return getEmotionsFromContent(content);
  }

  const now = new Date();
  const convoFile = getConvoFile(now, session);
  const convoData = await getConvoData(convoFile, ACTIVE_CONVERSATIONS);

  const contextContainsImage = isImageContext(context);
  const summary = await getSummary(content, promptModel, otr);
  const keywords = await getKeywords(content, promptModel);
  const emotions = getEmotionsFromContent(content);

  const result = {
    user,
    content: otr? void 0 : content,
    context: contextContainsImage? void 0 : context,
    summary,
    keywords,
    emotions,
    otr: otr? true : false,
  };

  setActiveConvoData(convoFile, convoData, {...result, timestamp: Date.now() });
  await saveConvoData(convoFile, ACTIVE_CONVERSATIONS);

  return result;
}

/**
 * Gets the emotions from the content.
 * @param {string} content - The content to get emotions from.
 * @returns {Promise} The emotions.
 */
async function getEmotionsFromContent(content) {
  const emotions = await askLlamaAboutEmotions(content);
  return emotions;
}

/**
 * Gets the summary of the content.
 * @param {string} content - The content to get the summary from.
 * @param {object} promptModel - The prompt model to use.
 * @param {boolean} otr - Whether the response is an OTR response.
 * @returns {Promise} The summary.
 */
async function getSummary(content, promptModel, otr) {
  if (!otr) {
    return promptModel('Summarize this prompt in one short sentence:\n' + content + '\nOnly respond with the summary, no pleasantries.');
  }
  return '';
}

/**
 * Gets the keywords from the content.
 * @param {string} content - The content to get the keywords from.
 * @param {object} promptModel - The prompt model to use.
 * @returns {Promise} The keywords.
 */
async function getKeywords(content, promptModel) {
  return promptModel('List a few key words that categorize this prompt:\n' + content + '\nOnly respond with a single category, no pleasantries.');
}

/**
 * Checks if the context is an image context.
 * @param {string} context - The context to check.
 * @returns {boolean} Whether the context is an image context.
 */
function isImageContext(context) {
  return!!context && context.startsWith('data:image/png;base64,');
}

/**
 * Gets the conversation file based on the date and session.
 * @param {Date} date - The date of the conversation.
 * @param {string} session - The session ID of the conversation.
 * @returns {string} The conversation file.
 */
function getConvoFile(date, session) {
  const model = DEFAULT_MODEL;
  return path.join(PROJECT_PATH, `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}-${model}-${session}.json`);
}

/**
 * Gets the conversation data based on the conversation file.
 * @param {string} convoFile - The conversation file.
 * @param {object} convoData - The conversation data.
 * @returns {Promise} The conversation data.
 */
async function getConvoData(convoFile, convoData) {
  if (convoData[convoFile]) {
    return convoData[convoFile];
  }
  if (fs.existsSync(convoFile)) {
    return JSON.parse(fs.readFileSync(convoFile));
  }
  return {};
}

/**
 * Sets the conversation data based on the conversation file.
 * @param {string} convoFile - The conversation file.
 * @param {object} convoData - The conversation data.
 * @param {object} data - The data to set.
 */
function setActiveConvoData(convoFile, convoData, data) {
  convoData[convoFile] = data;
}

/**
 * Saves the conversation data to the conversation file.
 * @param {string} convoFile - The conversation file.
 * @param {object} convoData - The conversation data.
 */
async function saveConvoData(convoFile, convoData) {
  fs.writeFileSync(convoFile, JSON.stringify(convoData[convoFile], null, 4));
  convoData[convoFile] = {};
}

module.exports = storeResponse;

Function Breakdown

storeResponse Function

Purpose

Stores responses from user interactions.

Parameters

  • user: The user who interacted with the system.
  • session: The current session ID.
  • content: The content of the user's interaction.
  • context: Additional context about the interaction.
  • otr: A flag indicating whether the interaction was in an off-the-record (OTR) session.

Returns

An object containing metadata about the user's interaction, including:

  • user: The user who interacted with the system.
  • content: The content of the user's interaction, unless it was an OTR session.
  • context: Additional context about the interaction, unless it contained an image.
  • summary: A brief summary of the user's interaction.
  • keywords: A list of keywords categorizing the user's interaction.
  • emotions: An analysis of the emotions expressed in the user's interaction.
  • otr: A flag indicating whether the interaction was in an OTR session.

Function Flow

  1. Selects a model to use based on the DEFAULT_MODEL constant.
  2. If no session is provided, returns an object containing an analysis of emotions in the content.
  3. Creates a conversation file path based on the current date and session ID.
  4. Checks if a conversation file already exists and loads its contents into memory if so.
  5. Checks if the conversation file is empty and initializes it if necessary.
  6. Parses and stores the conversation data in memory.
  7. Analyzes the content to determine if it contains an image and updates the conversation data accordingly.
  8. Prompts the model to generate a summary and keywords for the content.
  9. Analyzes the emotions expressed in the content.
  10. Stores the conversation data in the file system.
  11. Returns the conversation data as an object.

Imported Modules and Functions

  • importer: An import function.
  • path: A module for working with file paths.
  • fs: A module for interacting with the file system.
  • selectModel: A function for selecting a model.
  • askLlamaAboutEmotions: A function for analyzing emotions in text.
  • ACTIVE_CONVERSATIONS: An object containing conversation data.
  • PROJECT_PATH: A constant representing the project's root directory.
  • DEFAULT_MODEL: A constant representing the default model to use.