From e65d914888f39d453c9643e187597e88d38aecdd Mon Sep 17 00:00:00 2001 From: lucagrippa Date: Fri, 1 Mar 2024 17:53:15 -0500 Subject: [PATCH] Updated langchain and fixed bug --- manifest.json | 2 +- package-lock.json | 44 ++++++------- src/llm.ts | 97 +++++++++++++++++---------- src/main.ts | 165 +++++++++++++++++++++++++--------------------- 4 files changed, 174 insertions(+), 134 deletions(-) diff --git a/manifest.json b/manifest.json index 7fc1301..881af87 100644 --- a/manifest.json +++ b/manifest.json @@ -1,7 +1,7 @@ { "id": "ai-tagger", "name": "AI Tagger", - "version": "1.0.1", + "version": "1.0.2", "minAppVersion": "0.15.0", "description": "Analyze and tag your document with one click for efficient note organization using AI. OpenAI API key required", "author": "Luca Grippa", diff --git a/package-lock.json b/package-lock.json index 1c2690c..8e6dbbf 100644 --- a/package-lock.json +++ b/package-lock.json @@ -589,11 +589,11 @@ "peer": true }, "node_modules/@langchain/community": { - "version": "0.0.32", - "resolved": "https://registry.npmjs.org/@langchain/community/-/community-0.0.32.tgz", - "integrity": "sha512-jN4BxGKAmLbA87hqXH5Mx1IRMMVOgcn1TY1MLOVyBcBa12EvHFx8suogtXgA2ekfc8U8nIryVb1ftSupwUBv/A==", + "version": "0.0.34", + "resolved": "https://registry.npmjs.org/@langchain/community/-/community-0.0.34.tgz", + "integrity": "sha512-eU3VyK7dZ3S05E4IQ3IVb3B8Ja/GaNDHaXhfjUJfZLOwyZrrLMhshGRIbbO+iMqJz8omGK761QK14v0G0/U3iw==", "dependencies": { - "@langchain/core": "~0.1.32", + "@langchain/core": "~0.1.36", "@langchain/openai": "~0.0.14", "flat": "^5.0.2", "langsmith": "~0.1.1", @@ -945,15 +945,15 @@ } }, "node_modules/@langchain/core": { - "version": "0.1.33", - "resolved": "https://registry.npmjs.org/@langchain/core/-/core-0.1.33.tgz", - "integrity": "sha512-1ZRNVEgK+N+Jq1XU6DJG8wW4MjdyA5nxk2LAK4rc8VuJf7D2Hzek8Gq99ToS6B0B32VK5z2HtHDsLMoYcdK/jA==", + "version": "0.1.40", + "resolved": "https://registry.npmjs.org/@langchain/core/-/core-0.1.40.tgz", + "integrity": "sha512-TNX7swnPNFNXftsFMuXLkPhk+9/tE2veLFzuiiuIi0Mz/+jgPi4klHx9jxS8+DHPgMvKzmwsoE8U879iPByXjw==", "dependencies": { "ansi-styles": "^5.0.0", "camelcase": "6", "decamelize": "1.2.0", "js-tiktoken": "^1.0.8", - "langsmith": "~0.1.1", + "langsmith": "~0.1.7", "ml-distance": "^4.0.0", "p-queue": "^6.6.2", "p-retry": "4", @@ -977,11 +977,11 @@ } }, "node_modules/@langchain/openai": { - "version": "0.0.14", - "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.0.14.tgz", - "integrity": "sha512-co6nRylPrLGY/C3JYxhHt6cxLq07P086O7K3QaZH7SFFErIN9wSzJonpvhZR07DEUq6eK6wKgh2ORxA/NcjSRQ==", + "version": "0.0.15", + "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.0.15.tgz", + "integrity": "sha512-ILecml9YopmQxfpaquYEG+KfEz7svJqpcla671J1tVngqtPwRqg9PLUOa2eDrPsunScxKUeLd8HjAGLd/eaefQ==", "dependencies": { - "@langchain/core": "~0.1.13", + "@langchain/core": "~0.1.39", "js-tiktoken": "^1.0.7", "openai": "^4.26.0", "zod": "^3.22.4", @@ -2369,13 +2369,13 @@ } }, "node_modules/langchain": { - "version": "0.1.21", - "resolved": "https://registry.npmjs.org/langchain/-/langchain-0.1.21.tgz", - "integrity": "sha512-OOcCFIgx23WyyNS1VJBLbC3QL5plQBVfp2drXw1OJAarZ8yEY3cgJq8NbTY37sMnLoJ2olFEzMuAOdlTur4cwQ==", + "version": "0.1.25", + "resolved": "https://registry.npmjs.org/langchain/-/langchain-0.1.25.tgz", + "integrity": "sha512-sfEChvr4H2CklHdSByNBbytwBrFhgtA5kPOnwcBrxuXGg1iOaTzhVxQA0QcNcQucI3hZrsNbZjxGp+Can1ooZQ==", "dependencies": { "@anthropic-ai/sdk": "^0.9.1", - "@langchain/community": "~0.0.32", - "@langchain/core": "~0.1.32", + "@langchain/community": "~0.0.33", + "@langchain/core": "~0.1.36", "@langchain/openai": "~0.0.14", "binary-extensions": "^2.2.0", "expr-eval": "^2.0.2", @@ -2383,7 +2383,7 @@ "js-yaml": "^4.1.0", "jsonpointer": "^5.0.1", "langchainhub": "~0.0.8", - "langsmith": "~0.1.1", + "langsmith": "~0.1.7", "ml-distance": "^4.0.0", "openapi-types": "^12.1.3", "p-retry": "4", @@ -2445,7 +2445,7 @@ "web-auth-library": "^1.0.3", "ws": "^8.14.2", "youtube-transcript": "^1.0.6", - "youtubei.js": "^5.8.0" + "youtubei.js": "^9.1.0" }, "peerDependenciesMeta": { "@aws-sdk/client-s3": { @@ -2609,9 +2609,9 @@ "integrity": "sha512-Woyb8YDHgqqTOZvWIbm2CaFDGfZ4NTSyXV687AG4vXEfoNo7cGQp7nhl7wL3ehenKWmNEmcxCLgOZzW8jE6lOQ==" }, "node_modules/langsmith": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.1.6.tgz", - "integrity": "sha512-pLwepjtA7ki4aK20L1KqbJi55f10KVHHOSPAqzoNnAZqWv/YlHyxHhNrY/Nkxb+rM+hKLZNBMpmjlgvceEQtvw==", + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.1.8.tgz", + "integrity": "sha512-GMEPhUPmkOPUih2ho07kSMhHYpCDkavc6Zg0XgBjhLsYqYaobOxFFNyOc806jOvH2yw2tmiKLuiAdlQAVbDnHg==", "dependencies": { "@types/uuid": "^9.0.1", "commander": "^10.0.1", diff --git a/src/llm.ts b/src/llm.ts index bf4e160..22248cb 100644 --- a/src/llm.ts +++ b/src/llm.ts @@ -2,7 +2,7 @@ import { getAllTags, Notice } from 'obsidian'; import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; import { ChatOpenAI } from "@langchain/openai"; -import { JsonOutputFunctionsParser } from "langchain/output_parsers"; +import { JsonOutputKeyToolsParser, JsonOutputKeyToolsParserParams } from "@langchain/core/output_parsers/openai_tools"; import { Runnable } from '@langchain/core/runnables'; import { ChatPromptTemplate, @@ -35,11 +35,17 @@ export class LLM { constructor(modelName: string, openAIApiKey: string) { this.modelName = modelName; - const prompt = this.getPrompt(); + const prompt: ChatPromptTemplate = this.getPrompt(); const functionCallingModel = this.getModel(modelName, openAIApiKey); - const outputParser = new JsonOutputFunctionsParser(); - this.chain = prompt.pipe(functionCallingModel).pipe(outputParser); + // const outputParserParams = JsonOutputKeyToolsParserParams() + // const outputParser = new JsonOutputKeyToolsParser(keyName=document_tagger, returnSingle=true); + const outputParser = new JsonOutputKeyToolsParser({ + keyName: "document_tagger", + returnSingle: true, + }); + + this.chain = prompt.pipe(functionCallingModel!).pipe(outputParser); } getPrompt() { @@ -54,13 +60,17 @@ export class LLM { const humanMessage = "DOCUMENT: \n ```{document}``` \n TAGS: \n" - const prompt = new ChatPromptTemplate({ - promptMessages: [ - SystemMessagePromptTemplate.fromTemplate(systemMessage), - HumanMessagePromptTemplate.fromTemplate(humanMessage), - ], - inputVariables: ["tagsString", "document"], - }); + const prompt = ChatPromptTemplate.fromMessages([ + SystemMessagePromptTemplate.fromTemplate(systemMessage), + HumanMessagePromptTemplate.fromTemplate(humanMessage), + ]); + // const prompt = new ChatPromptTemplate({ + // promptMessages: [ + // SystemMessagePromptTemplate.fromTemplate(systemMessage), + // HumanMessagePromptTemplate.fromTemplate(humanMessage), + // ], + // inputVariables: ["tagsString", "document"], + // }); return prompt; } @@ -70,26 +80,43 @@ export class LLM { tags: z.array(z.string()).describe("An array of tags that best describes the text using existing tags."), newTags: z.array(z.string()).describe("An array of tags that best describes the text using new tags."), }); - const llm = new ChatOpenAI({ - temperature: 0, - modelName: modelName, - openAIApiKey: openAIApiKey, - }); - // Binding "function_call" below makes the model always call the specified function. - // If you want to allow the model to call functions selectively, omit it. - const functionCallingModel = llm.bind({ - functions: [ - { - name: "document_tagger", - description: "Should always be used to tag documents.", - parameters: zodToJsonSchema(zodSchema), + try { + const llm = new ChatOpenAI({ + temperature: 0, + modelName: modelName, + openAIApiKey: openAIApiKey, + }); + // Binding "function_call" below makes the model always call the specified function. + // If you want to allow the model to call functions selectively, omit it. + const llmWithTools = llm.bind({ + tools: [ + { + type: "function" as const, + function: { + name: "document_tagger", + description: "Should always be used to tag documents.", + parameters: zodToJsonSchema(zodSchema), + }, + } + ], + tool_choice: { + type: "function" as const, + function: { + name: "document_tagger", + }, }, - ], - function_call: { name: "document_tagger" }, - }); + }); + + return llmWithTools; + } catch (error) { + if (error.message.includes('OpenAI or Azure OpenAI API key not found at new ChatOpenAI')) { + // Notify the user about the incorrect API key + throw new Error('Incorrect API key. Please check your API key.'); + } + } + - return functionCallingModel; } formatTagsString(tags: string[], newTags: string[]) { @@ -98,7 +125,7 @@ export class LLM { tags.forEach((tag: string) => { tagsString += tag + " " }); - + // if there are new tags, add a separator if (newTags.length > 0) { tagsString += "| " @@ -166,12 +193,12 @@ export class LLM { if (error.message.includes('Incorrect API key')) { // Notify the user about the incorrect API key throw new Error('Incorrect API key. Please check your API key.'); - // } else if (error.message.includes('Invalid Authentication')) { - // // Notify the user about the incorrect API key - // throw new Error('Incorrect API key. Please check your API key.'); - // } else if (error.message.includes('You must be a member of an organization to use the API')) { - // // Notify the user about the incorrect API key - // throw new Error('Your account is not part of an organization. Contact OpenAI to get added to a new organization or ask your organization manager to invite you to an organization.'); + // } else if (error.message.includes('Invalid Authentication')) { + // // Notify the user about the incorrect API key + // throw new Error('Incorrect API key. Please check your API key.'); + // } else if (error.message.includes('You must be a member of an organization to use the API')) { + // // Notify the user about the incorrect API key + // throw new Error('Your account is not part of an organization. Contact OpenAI to get added to a new organization or ask your organization manager to invite you to an organization.'); } else if (error.message.includes('Rate limit reached for requests')) { // Notify the user about the incorrect API key throw new Error('You are sending requests too quickly. Please pace your requests or read OpenAI\'s Rate limit guide.'); diff --git a/src/main.ts b/src/main.ts index b6cc808..96d23ee 100644 --- a/src/main.ts +++ b/src/main.ts @@ -34,80 +34,59 @@ export default class AiTagger extends Plugin { this.llm = new LLM(this.settings.model, this.settings.openai_api_key); } - async tagDocument(documentContents: string, editor: Editor) { + async tagDocument(documentContents: string, editor: Editor, llm: LLM) { let { contentStart, exists, from, frontmatter, to } = getFrontMatterInfo(documentContents); let content: string = documentContents.substring(contentStart); console.log("Content:", content.substring(0, 30) + "...") - // generate tags for the document using an LLM - const generatedTags: string = await this.llm.generateTags(content); - console.log("Generated Tags:", generatedTags) + try { + // generate tags for the document using an LLM + const generatedTags: string = await llm.generateTags(content); + console.log("Generated Tags:", generatedTags) - if (exists) { - let yamlFrontMatter = yaml.load(frontmatter); + if (exists) { + let yamlFrontMatter = yaml.load(frontmatter); - // Update existing "tags" property with generated tags - if (yamlFrontMatter.tags === undefined) { - yamlFrontMatter.tags = generatedTags; + // Update existing "tags" property with generated tags + if (yamlFrontMatter.tags === undefined) { + yamlFrontMatter.tags = generatedTags; + } else { + yamlFrontMatter.tags = yamlFrontMatter.tags + " " + generatedTags; + } + + // write the frontmatter to the top of the document in the editor + const updatedFrontMatter = yaml.dump(yamlFrontMatter); + editor.replaceRange( + updatedFrontMatter, + editor.offsetToPos(from), + editor.offsetToPos(to) + ); } else { - yamlFrontMatter.tags = yamlFrontMatter.tags + " " + generatedTags; + // create front matter + const newFrontmatter = `---\ntags: ${generatedTags}\n---\n` + // write the frontmatter to the top of the document in the editor + editor.replaceRange(newFrontmatter, { line: 0, ch: 0 }); } - - // write the frontmatter to the top of the document in the editor - const updatedFrontMatter = yaml.dump(yamlFrontMatter); - editor.replaceRange( - updatedFrontMatter, - editor.offsetToPos(from), - editor.offsetToPos(to) - ); - } else { - // create front matter - const newFrontmatter = `---\ntags: ${generatedTags}\n---\n` - // write the frontmatter to the top of the document in the editor - editor.replaceRange(newFrontmatter, { line: 0, ch: 0 }); + } catch (error) { + new Notice(error.message); + console.error('Error while generating tags:', error); } } - async tagSelection(selection: string, editor: Editor) { + async tagSelection(selection: string, editor: Editor, llm: LLM) { let { contentStart, exists, from, frontmatter, to } = getFrontMatterInfo(selection); let content: string = selection.substring(contentStart); console.log("Content:", content.substring(0, 30) + "...") - // generate tags for the document using an LLM - const generatedTags: string = await this.llm.generateTags(content); - console.log("Generated Tags:", generatedTags) - - if (exists) { - let yamlFrontMatter = yaml.load(frontmatter); + try { + // generate tags for the document using an LLM + const generatedTags: string = await llm.generateTags(content); + console.log("Generated Tags:", generatedTags) - // Update existing "tags" property with generated tags - if (yamlFrontMatter.tags === undefined) { - yamlFrontMatter.tags = generatedTags; - } else { - yamlFrontMatter.tags = yamlFrontMatter.tags + " " + generatedTags; - } - - // write the frontmatter to the top of the document in the editor - const updatedFrontMatter = yaml.dump(yamlFrontMatter); - editor.replaceRange( - updatedFrontMatter, - editor.offsetToPos(from), - editor.offsetToPos(to) - ); - } else { - // check to see if this is a selection that doesn't include the frontmatter of the document - // if so, we don't need to create new frontmatter we just need to add the tags to the current frontmatter - console.log("Selection with no frontmatter:", selection.substring(0, 30) + "...") - let fileContents: string = editor.getValue(); - console.log("File Contents:", fileContents.substring(0, 30) + "...") - let { contentStart, exists, from, frontmatter, to } = getFrontMatterInfo(fileContents); - - // if there is frontmatter, we need to add it if (exists) { let yamlFrontMatter = yaml.load(frontmatter); - console.log("Overall document has frontmatter:", yamlFrontMatter) // Update existing "tags" property with generated tags if (yamlFrontMatter.tags === undefined) { @@ -124,12 +103,43 @@ export default class AiTagger extends Plugin { editor.offsetToPos(to) ); } else { - console.log("Overall document doesn't have frontmatter:") - // create front matter - const newFrontmatter = `---\ntags: ${generatedTags}\n---\n` - // write the frontmatter to the top of the document in the editor - editor.replaceRange(newFrontmatter, { line: 0, ch: 0 }); + // check to see if this is a selection that doesn't include the frontmatter of the document + // if so, we don't need to create new frontmatter we just need to add the tags to the current frontmatter + console.log("Selection with no frontmatter:", selection.substring(0, 30) + "...") + let fileContents: string = editor.getValue(); + console.log("File Contents:", fileContents.substring(0, 30) + "...") + let { contentStart, exists, from, frontmatter, to } = getFrontMatterInfo(fileContents); + + // if there is frontmatter, we need to add it + if (exists) { + let yamlFrontMatter = yaml.load(frontmatter); + console.log("Overall document has frontmatter:", yamlFrontMatter) + + // Update existing "tags" property with generated tags + if (yamlFrontMatter.tags === undefined) { + yamlFrontMatter.tags = generatedTags; + } else { + yamlFrontMatter.tags = yamlFrontMatter.tags + " " + generatedTags; + } + + // write the frontmatter to the top of the document in the editor + const updatedFrontMatter = yaml.dump(yamlFrontMatter); + editor.replaceRange( + updatedFrontMatter, + editor.offsetToPos(from), + editor.offsetToPos(to) + ); + } else { + console.log("Overall document doesn't have frontmatter:") + // create front matter + const newFrontmatter = `---\ntags: ${generatedTags}\n---\n` + // write the frontmatter to the top of the document in the editor + editor.replaceRange(newFrontmatter, { line: 0, ch: 0 }); + } } + } catch (error) { + new Notice(error.message); + console.error('Error while generating tags:', error); } } @@ -140,26 +150,26 @@ export default class AiTagger extends Plugin { // This adds a settings tab so the user can configure various aspects of the plugin this.addSettingTab(new AiTaggerSettingTab(this.app, this)); - // instantiate LLM class - this.llm = new LLM(this.settings.model, this.settings.openai_api_key); - // This creates an icon in the left ribbon. this.addRibbonIcon('wand-2', 'Generate tags!', async () => { - // Called when the user clicks the icon. - const markdownView = this.app.workspace.getActiveViewOfType(MarkdownView); - if (markdownView) { - try { + try { + // instantiate LLM class + let llm = new LLM(this.settings.model, this.settings.openai_api_key); + + // Called when the user clicks the icon. + const markdownView = this.app.workspace.getActiveViewOfType(MarkdownView); + if (markdownView) { // get current document as a string let fileContents: string = markdownView.editor.getValue(); - this.tagDocument(fileContents, markdownView.editor); - } catch (error) { - new Notice(error.message); - console.error('Error while generating tags:', error); + this.tagDocument(fileContents, markdownView.editor, llm); + } else { + const message = "Open and select a document to use the AI Tagger" + new Notice(message); + console.info(message); } - } else { - const message = "Open and select a document to use the AI Tagger" - new Notice(message); - console.info(message); + } catch (error) { + new Notice(error.message); + console.error('Error while generating tags:', error); } }); @@ -170,15 +180,18 @@ export default class AiTagger extends Plugin { name: 'Generate tags', editorCallback: async (editor: Editor, view: MarkdownView) => { try { + // instantiate LLM class + let llm = new LLM(this.settings.model, this.settings.openai_api_key); + // get current selection as a string let selection = editor.getSelection(); if (selection === "") { // if selection is empty, use the entire document let fileContents: string = editor.getValue(); - this.tagDocument(fileContents, editor); + this.tagDocument(fileContents, editor, llm); } else { - this.tagSelection(selection, editor); + this.tagSelection(selection, editor, llm); } } catch (error) { new Notice(error.message);