From 86126767043291698df73fa6a408328c5c613f7d Mon Sep 17 00:00:00 2001 From: Adam Debreceni Date: Mon, 6 Jan 2025 15:05:28 +0100 Subject: [PATCH] Rename extension --- cmake/MiNiFiOptions.cmake | 1 + extensions/{ai => llamacpp}/CMakeLists.txt | 12 ++++++------ .../processors/LlamaCppProcessor.cpp} | 15 +++++++++------ .../processors/LlamaCppProcessor.h} | 10 +++++----- 4 files changed, 21 insertions(+), 17 deletions(-) rename extensions/{ai => llamacpp}/CMakeLists.txt (71%) rename extensions/{ai/processors/AiProcessor.cpp => llamacpp/processors/LlamaCppProcessor.cpp} (96%) rename extensions/{ai/processors/AiProcessor.h => llamacpp/processors/LlamaCppProcessor.h} (94%) diff --git a/cmake/MiNiFiOptions.cmake b/cmake/MiNiFiOptions.cmake index 60cf179a9e..f0a8813627 100644 --- a/cmake/MiNiFiOptions.cmake +++ b/cmake/MiNiFiOptions.cmake @@ -114,6 +114,7 @@ add_minifi_option(ENABLE_GRPC_FOR_LOKI "Enable gRPC for Grafana Loki extension" add_minifi_option(ENABLE_COUCHBASE "Enable Couchbase support" OFF) add_minifi_option(ENABLE_EXECUTE_PROCESS "Enable ExecuteProcess processor" OFF) add_minifi_option(ENABLE_CONTROLLER "Enables the build of MiNiFi controller binary." ON) +add_minifi_option(ENABLE_LLAMACPP "Enables LlamaCpp support." ON) set_minifi_cache_variable(CUSTOM_MALLOC OFF "Overwrite malloc implementation.") set_property(CACHE CUSTOM_MALLOC PROPERTY STRINGS "jemalloc" "mimalloc" "rpmalloc" OFF) diff --git a/extensions/ai/CMakeLists.txt b/extensions/llamacpp/CMakeLists.txt similarity index 71% rename from extensions/ai/CMakeLists.txt rename to extensions/llamacpp/CMakeLists.txt index 56443e7f8c..e995c8d5de 100644 --- a/extensions/ai/CMakeLists.txt +++ b/extensions/llamacpp/CMakeLists.txt @@ -17,7 +17,7 @@ # under the License. # -if (NOT (ENABLE_ALL OR ENABLE_AI)) +if (NOT (ENABLE_ALL OR ENABLE_LLAMACPP)) return() endif() @@ -28,11 +28,11 @@ include(${CMAKE_SOURCE_DIR}/extensions/ExtensionHeader.txt) file(GLOB SOURCES "processors/*.cpp") -add_minifi_library(minifi-ai-processors SHARED ${SOURCES}) -target_include_directories(minifi-ai-processors PUBLIC "${CMAKE_SOURCE_DIR}/extensions/ai") +add_minifi_library(minifi-llamacpp SHARED ${SOURCES}) +target_include_directories(minifi-llamacpp PUBLIC "${CMAKE_SOURCE_DIR}/extensions/llamacpp") -target_link_libraries(minifi-ai-processors ${LIBMINIFI} llamacpp) +target_link_libraries(minifi-llamacpp ${LIBMINIFI} llamacpp) -register_extension(minifi-ai-processors "AI PROCESSORS" AI-PROCESSORS "Provides AI processors") +register_extension(minifi-llamacpp "AI PROCESSORS" AI-PROCESSORS "Provides AI processors") -register_extension_linter(minifi-ai-processors-linter) \ No newline at end of file +register_extension_linter(minifi-llamacpp-linter) \ No newline at end of file diff --git a/extensions/ai/processors/AiProcessor.cpp b/extensions/llamacpp/processors/LlamaCppProcessor.cpp similarity index 96% rename from extensions/ai/processors/AiProcessor.cpp rename to extensions/llamacpp/processors/LlamaCppProcessor.cpp index 380e45a129..14786b8548 100644 --- a/extensions/ai/processors/AiProcessor.cpp +++ b/extensions/llamacpp/processors/LlamaCppProcessor.cpp @@ -15,7 +15,7 @@ * limitations under the License. */ -#include "AiProcessor.h" +#include "LlamaCppProcessor.h" #include "core/ProcessContext.h" #include "core/ProcessSession.h" #include "Resource.h" @@ -49,12 +49,12 @@ struct LlamaChatMessage { } // namespace -void AiProcessor::initialize() { +void LlamaCppProcessor::initialize() { setSupportedProperties(Properties); setSupportedRelationships(Relationships); } -void AiProcessor::onSchedule(core::ProcessContext& context, core::ProcessSessionFactory&) { +void LlamaCppProcessor::onSchedule(core::ProcessContext& context, core::ProcessSessionFactory&) { context.getProperty(ModelName, model_name_); context.getProperty(SystemPrompt, system_prompt_); context.getProperty(Prompt, prompt_); @@ -150,12 +150,15 @@ void AiProcessor::onSchedule(core::ProcessContext& context, core::ProcessSession llama_sampler_chain_add(llama_sampler_, llama_sampler_init_dist(1234)); } -void AiProcessor::onTrigger(core::ProcessContext& context, core::ProcessSession& session) { +void LlamaCppProcessor::onTrigger(core::ProcessContext& context, core::ProcessSession& session) { auto input_ff = session.get(); if (!input_ff) { context.yield(); return; } + auto ff_guard = gsl::finally([&] { + session.remove(input_ff); + }); auto read_result = session.readBuffer(input_ff); std::string input_content{reinterpret_cast(read_result.buffer.data()), read_result.buffer.size()}; @@ -312,7 +315,7 @@ void AiProcessor::onTrigger(core::ProcessContext& context, core::ProcessSession& } } -void AiProcessor::notifyStop() { +void LlamaCppProcessor::notifyStop() { llama_sampler_free(llama_sampler_); llama_sampler_ = nullptr; llama_free(llama_ctx_); @@ -322,6 +325,6 @@ void AiProcessor::notifyStop() { llama_backend_free(); } -REGISTER_RESOURCE(AiProcessor, Processor); +REGISTER_RESOURCE(LlamaCppProcessor, Processor); } // namespace org::apache::nifi::minifi::processors diff --git a/extensions/ai/processors/AiProcessor.h b/extensions/llamacpp/processors/LlamaCppProcessor.h similarity index 94% rename from extensions/ai/processors/AiProcessor.h rename to extensions/llamacpp/processors/LlamaCppProcessor.h index ed29d7333e..c22d17ae00 100644 --- a/extensions/ai/processors/AiProcessor.h +++ b/extensions/llamacpp/processors/LlamaCppProcessor.h @@ -26,7 +26,7 @@ namespace org::apache::nifi::minifi::processors { -class AiProcessor : public core::Processor { +class LlamaCppProcessor : public core::Processor { static constexpr const char* DEFAULT_SYSTEM_PROMPT = R"(You are a helpful assistant or otherwise called an AI processor. You are part of a flow based pipeline helping the user transforming and routing data (encapsulated in what is called flowfiles). The user will provide the data, it will have attributes (name and value) and a content. @@ -49,12 +49,12 @@ What now follows is a description of how the user would like you to transform/ro public: - explicit AiProcessor(std::string_view name, const utils::Identifier& uuid = {}) + explicit LlamaCppProcessor(std::string_view name, const utils::Identifier& uuid = {}) : core::Processor(name, uuid) { } - ~AiProcessor() override = default; + ~LlamaCppProcessor() override = default; - EXTENSIONAPI static constexpr const char* Description = "AI processor"; + EXTENSIONAPI static constexpr const char* Description = "LlamaCpp processor"; EXTENSIONAPI static constexpr auto ModelName = core::PropertyDefinitionBuilder<>::createProperty("Model Name") .withDescription("The name of the model") @@ -103,7 +103,7 @@ What now follows is a description of how the user would like you to transform/ro void notifyStop() override; private: - std::shared_ptr logger_ = core::logging::LoggerFactory::getLogger(uuid_); + std::shared_ptr logger_ = core::logging::LoggerFactory::getLogger(uuid_); double temperature_{0}; std::string model_name_;