discord tools | discord llm connector | discord mesh generator | Search

The code imports necessary functions from various modules and defines three asynchronous functions (doEssay, doBusiness, and doStory) that generate content (essays, business plans, and stories) based on user input and return them as image attachments. Each function selects a prompt model, triggers typing, updates interaction, generates content using respective LLM functions, and returns the content as an image attachment using postMessageImageAttachment.

Run example

npm run import -- "discord writing llms"

discord writing llms

const {postMessageImageAttachment} = importer.import("create message image attachments")
const {triggerTyping, createMessage, updateInteraction} = importer.import("disrcord api")
const selectModel = importer.import("select llm")
const askLlamaToWriteStory = importer.import("write creatively llm")
const askLlamaToWriteBusinessPlan = importer.import("business plan llm")
const askLlamaWriteEssay = importer.import("research paper llm")


async function doEssay(interaction) {
  promptModel = await selectModel(interaction.data.options[1] ? interaction.data.options[1].value : 'Meta')

  await triggerTyping(interaction.channel_id)
  
  await updateInteraction('This could take a while...', interaction.id, interaction.token)

  let essay = await askLlamaWriteEssay(interaction.data.options[0].value, null, promptModel)

  return await postMessageImageAttachment(interaction.data.options[0].value, Buffer.from(essay), interaction.channel_id, 'text/html')
}

async function doBusiness(interaction) {
  promptModel = await selectModel(interaction.data.options[1] ? interaction.data.options[1].value : 'Meta')

  await triggerTyping(interaction.channel_id)
  
  await updateInteraction('This could take a while...', interaction.id, interaction.token)

  let essay = await askLlamaToWriteBusinessPlan(interaction.data.options[0].value, null, promptModel)

  return await postMessageImageAttachment(interaction.data.options[0].value, Buffer.from(essay), interaction.channel_id, 'text/html')
}

async function doStory(interaction) {
  promptModel = await selectModel(interaction.data.options[1] ? interaction.data.options[1].value : 'Meta')

  await triggerTyping(interaction.channel_id)
  
  await updateInteraction('This could take a while...', interaction.id, interaction.token)

  let essay = await askLlamaToWriteStory(interaction.data.options[0].value, null, promptModel)

  return await postMessageImageAttachment(interaction.data.options[0].value, Buffer.from(essay), interaction.channel_id, 'text/html')
}


module.exports = {
  doEssay,
  doBusiness,
  doStory,
}


What the code could have been:

const {
  postMessageImageAttachment,
  triggerTyping,
  createMessage,
  updateInteraction,
} = require('./discord-api');
const { selectModel } = require('./select-llm');
const {
  askLlamaToWriteStory,
  askLlamaToWriteBusinessPlan,
  askLlamaWriteEssay,
} = require('./llm-services');

/**
 * Interface for interaction context
 * @typedef {Object} InteractionContext
 * @property {string} channel_id - Channel ID
 * @property {string} id - Interaction ID
 * @property {string} token - Interaction token
 * @property {Object[]} data - Interaction data
 * @property {string} data.options[].value - Interaction option value
 * @property {string} data.options[].name - Interaction option name
 */
class InteractionContext {
  /**
   * @param {Object} interaction - Interaction object
   */
  constructor(interaction) {
    this.channel_id = interaction.channel_id;
    this.id = interaction.id;
    this.token = interaction.token;
    this.data = interaction.data;
  }
}

/**
 * Service for creating essays using LLM
 */
class EssayService {
  /**
   * Creates an essay using the selected LLM model
   * @param {InteractionContext} interaction - Interaction context
   * @param {string} promptModel - Selected LLM model
   * @return {Promise} Essay content as a string
   */
  async createEssay(interaction, promptModel) {
    const essay = await askLlamaWriteEssay(
      interaction.data.options[0].value,
      null,
      promptModel
    );
    return essay;
  }
}

class BusinessPlanService {
  /**
   * Creates a business plan using the selected LLM model
   * @param {InteractionContext} interaction - Interaction context
   * @param {string} promptModel - Selected LLM model
   * @return {Promise} Business plan content as a string
   */
  async createBusinessPlan(interaction, promptModel) {
    const businessPlan = await askLlamaToWriteBusinessPlan(
      interaction.data.options[0].value,
      null,
      promptModel
    );
    return businessPlan;
  }
}

class StoryService {
  /**
   * Creates a story using the selected LLM model
   * @param {InteractionContext} interaction - Interaction context
   * @param {string} promptModel - Selected LLM model
   * @return {Promise} Story content as a string
   */
  async createStory(interaction, promptModel) {
    const story = await askLlamaToWriteStory(
      interaction.data.options[0].value,
      null,
      promptModel
    );
    return story;
  }
}

/**
 * Service for handling interactions with the LLM
 */
class InteractionService {
  /**
   * Creates an instance of the interaction service
   * @param {Object} services - Service instances
   */
  constructor(services) {
    this.services = services;
    this.services.selectModel = selectModel;
    this.services.triggerTyping = triggerTyping;
    this.services.createMessage = createMessage;
    this.services.updateInteraction = updateInteraction;
    this.services.postMessageImageAttachment = postMessageImageAttachment;
    this.services.essay = new EssayService();
    this.services.businessPlan = new BusinessPlanService();
    this.services.story = new StoryService();
  }

  /**
   * Handles an interaction with the LLM
   * @param {InteractionContext} interaction - Interaction context
   * @param {string} serviceType - Service type (essay, business, story)
   * @return {Promise} Promise resolving when the interaction is handled
   */
  async handleInteraction(interaction, serviceType) {
    const promptModel = await this.services.selectModel(
      interaction.data.options[1]? interaction.data.options[1].value : 'Meta'
    );

    await this.services.triggerTyping(interaction.channel_id);
    await this.services.updateInteraction('This could take a while...', interaction.id, interaction.token);

    let content;
    switch (serviceType) {
      case 'essay':
        content = await this.services.essay.createEssay(interaction, promptModel);
        break;
      case 'business':
        content = await this.services.businessPlan.createBusinessPlan(interaction, promptModel);
        break;
      case'story':
        content = await this.services.story.createStory(interaction, promptModel);
        break;
      default:
        throw new Error(`Unsupported service type: ${serviceType}`);
    }

    await this.services.postMessageImageAttachment(
      interaction.data.options[0].value,
      Buffer.from(content),
      interaction.channel_id,
      'text/html'
    );
  }
}

module.exports = {
  createInteractionService: (services) => new InteractionService(services),
  InteractionContext,
};

Code Breakdown

Import Statements

The code imports various functions from other modules using the importer.import() method:

Functions

The code defines three asynchronous functions:

Each function:

  1. Selects a prompt model using the selectModel function.
  2. Triggers typing in the channel using triggerTyping.
  3. Updates the interaction with a progress message using updateInteraction.
  4. Calls the respective LLM function to generate the content (essay, business plan, or story).
  5. Returns the generated content as an image attachment using postMessageImageAttachment.

Module Exports

The code exports the three functions as module exports:

Notes