The provided code defines three asynchronous functions: askLlamaToSummerize
, askLlamaToGeneralize
, and askLlamaToImplement
, which interact with a Large Language Model (LLM) to perform tasks such as summarizing queries and improving code snippets. These functions are exported as an object and can be used in other parts of the application.
npm run import -- "ask llm to summerize"
async function askLlamaToSummerize(query) {
const {llmPrompt} = await importer.import("create llm session")
let q1 = 'Summerize this into one or two sentences:\n' + query + '\nDiscard any pleasantries or explainations, only return the summary.'
console.log("User: " + q1);
const a1 = await llmPrompt(q1);
console.log("AI: " + a1);
return a1.trim()
}
async function askLlamaToGeneralize(query) {
const {llmPrompt} = await importer.import("create llm session")
let q2 = 'How would you categorize this in two or three words:\n' + query + '\nReturn only the category.'
console.log("User: " + q2);
const a2 = await llmPrompt(q2);
console.log("AI: " + a2);
return a2.trim()
}
async function askLlamaToImplement(query, language) {
const {llmPrompt} = await importer.import("create llm session")
let q2 = 'Improve this code in every way you can, keeing the same basic inputs an outputs' + (language ? (', in ' + language) : '') + ':\n' + query + '\nAdd, refactor, remove, implement TODO comments if you can. Only return the new beautiful code and nothing else.'
console.log("User: " + q2);
const a2 = await llmPrompt(q2);
console.log("AI: " + a2);
return a2.trim()
}
module.exports = {
askLlamaToSummerize,
askLlamaToGeneralize,
askLlamaToImplement
}
// Import required modules
const importer = require('./importer');
// Define a class for the Llama interactions
class Llama {
/**
* Initialize the Llama instance
*/
constructor() {
this.llmPrompt = async () => {
// This function is assumed to be imported from 'create llm session'
return await importer.import('create llm session');
};
}
/**
* Summarize the input query into one or two sentences
* @param {string} query - The input query to be summarized
* @returns {Promise} The summarized query
*/
async summarize(query) {
const {llmPrompt} = await this.llmPrompt();
const question = `Summarize this into one or two sentences:\n${query}\nDiscard any pleasantries or explanations, only return the summary.`;
console.log(`User: ${question}`);
const response = await llmPrompt(question);
console.log(`AI: ${response}`);
return response.trim();
}
/**
* Categorize the input query into two or three words
* @param {string} query - The input query to be categorized
* @returns {Promise} The category of the query
*/
async generalize(query) {
const {llmPrompt} = await this.llmPrompt();
const question = `How would you categorize this in two or three words:\n${query}\nReturn only the category.`;
console.log(`User: ${question}`);
const response = await llmPrompt(question);
console.log(`AI: ${response}`);
return response.trim();
}
/**
* Improve the input query code in every way possible
* @param {string} query - The input query to be improved
* @param {string} [language] - The programming language to be used for improvement (optional)
* @returns {Promise} The improved code
*/
async implement(query, language = '') {
const {llmPrompt} = await this.llmPrompt();
const question = `Improve this code in every way you can, keeing the same basic inputs and outputs, in ${language}:\n${query}\nAdd, refactor, remove, implement TODO comments if you can. Only return the new beautiful code and nothing else.`;
console.log(`User: ${question}`);
const response = await llmPrompt(question);
console.log(`AI: ${response}`);
return response.trim();
}
}
// Export the Llama instance as a module
module.exports = new Llama({
askLlamaToSummerize: (query) => this.summarize(query),
askLlamaToGeneralize: (query) => this.generalize(query),
askLlamaToImplement: (query, language) => this.implement(query, language)
});
The provided code defines three asynchronous functions: askLlamaToSummerize
, askLlamaToGeneralize
, and askLlamaToImplement
. These functions interact with a Large Language Model (LLM) using the create llm session
module, imported via the importer
object.
askLlamaToSummerize
query
(string)create llm session
module.askLlamaToGeneralize
query
(string)create llm session
module.askLlamaToImplement
query
(string)language
(optional string)create llm session
module.The three functions are exported as an object, allowing them to be used in other parts of the application.