Skip to content

Commit

Permalink
Rename extension
Browse files Browse the repository at this point in the history
  • Loading branch information
adamdebreceni committed Jan 6, 2025
1 parent ee482bc commit 8612676
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 17 deletions.
1 change: 1 addition & 0 deletions cmake/MiNiFiOptions.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ add_minifi_option(ENABLE_GRPC_FOR_LOKI "Enable gRPC for Grafana Loki extension"
add_minifi_option(ENABLE_COUCHBASE "Enable Couchbase support" OFF)
add_minifi_option(ENABLE_EXECUTE_PROCESS "Enable ExecuteProcess processor" OFF)
add_minifi_option(ENABLE_CONTROLLER "Enables the build of MiNiFi controller binary." ON)
add_minifi_option(ENABLE_LLAMACPP "Enables LlamaCpp support." ON)

set_minifi_cache_variable(CUSTOM_MALLOC OFF "Overwrite malloc implementation.")
set_property(CACHE CUSTOM_MALLOC PROPERTY STRINGS "jemalloc" "mimalloc" "rpmalloc" OFF)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.
#

if (NOT (ENABLE_ALL OR ENABLE_AI))
if (NOT (ENABLE_ALL OR ENABLE_LLAMACPP))
return()
endif()

Expand All @@ -28,11 +28,11 @@ include(${CMAKE_SOURCE_DIR}/extensions/ExtensionHeader.txt)

file(GLOB SOURCES "processors/*.cpp")

add_minifi_library(minifi-ai-processors SHARED ${SOURCES})
target_include_directories(minifi-ai-processors PUBLIC "${CMAKE_SOURCE_DIR}/extensions/ai")
add_minifi_library(minifi-llamacpp SHARED ${SOURCES})
target_include_directories(minifi-llamacpp PUBLIC "${CMAKE_SOURCE_DIR}/extensions/llamacpp")

target_link_libraries(minifi-ai-processors ${LIBMINIFI} llamacpp)
target_link_libraries(minifi-llamacpp ${LIBMINIFI} llamacpp)

register_extension(minifi-ai-processors "AI PROCESSORS" AI-PROCESSORS "Provides AI processors")
register_extension(minifi-llamacpp "AI PROCESSORS" AI-PROCESSORS "Provides AI processors")

register_extension_linter(minifi-ai-processors-linter)
register_extension_linter(minifi-llamacpp-linter)
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
* limitations under the License.
*/

#include "AiProcessor.h"
#include "LlamaCppProcessor.h"
#include "core/ProcessContext.h"
#include "core/ProcessSession.h"
#include "Resource.h"
Expand Down Expand Up @@ -49,12 +49,12 @@ struct LlamaChatMessage {

} // namespace

void AiProcessor::initialize() {
void LlamaCppProcessor::initialize() {
setSupportedProperties(Properties);
setSupportedRelationships(Relationships);
}

void AiProcessor::onSchedule(core::ProcessContext& context, core::ProcessSessionFactory&) {
void LlamaCppProcessor::onSchedule(core::ProcessContext& context, core::ProcessSessionFactory&) {
context.getProperty(ModelName, model_name_);
context.getProperty(SystemPrompt, system_prompt_);
context.getProperty(Prompt, prompt_);
Expand Down Expand Up @@ -150,12 +150,15 @@ void AiProcessor::onSchedule(core::ProcessContext& context, core::ProcessSession
llama_sampler_chain_add(llama_sampler_, llama_sampler_init_dist(1234));
}

void AiProcessor::onTrigger(core::ProcessContext& context, core::ProcessSession& session) {
void LlamaCppProcessor::onTrigger(core::ProcessContext& context, core::ProcessSession& session) {
auto input_ff = session.get();
if (!input_ff) {
context.yield();
return;
}
auto ff_guard = gsl::finally([&] {
session.remove(input_ff);
});

auto read_result = session.readBuffer(input_ff);
std::string input_content{reinterpret_cast<const char*>(read_result.buffer.data()), read_result.buffer.size()};
Expand Down Expand Up @@ -312,7 +315,7 @@ void AiProcessor::onTrigger(core::ProcessContext& context, core::ProcessSession&
}
}

void AiProcessor::notifyStop() {
void LlamaCppProcessor::notifyStop() {
llama_sampler_free(llama_sampler_);
llama_sampler_ = nullptr;
llama_free(llama_ctx_);
Expand All @@ -322,6 +325,6 @@ void AiProcessor::notifyStop() {
llama_backend_free();
}

REGISTER_RESOURCE(AiProcessor, Processor);
REGISTER_RESOURCE(LlamaCppProcessor, Processor);

} // namespace org::apache::nifi::minifi::processors
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

namespace org::apache::nifi::minifi::processors {

class AiProcessor : public core::Processor {
class LlamaCppProcessor : public core::Processor {
static constexpr const char* DEFAULT_SYSTEM_PROMPT = R"(You are a helpful assistant or otherwise called an AI processor.
You are part of a flow based pipeline helping the user transforming and routing data (encapsulated in what is called flowfiles).
The user will provide the data, it will have attributes (name and value) and a content.
Expand All @@ -49,12 +49,12 @@ What now follows is a description of how the user would like you to transform/ro


public:
explicit AiProcessor(std::string_view name, const utils::Identifier& uuid = {})
explicit LlamaCppProcessor(std::string_view name, const utils::Identifier& uuid = {})
: core::Processor(name, uuid) {
}
~AiProcessor() override = default;
~LlamaCppProcessor() override = default;

EXTENSIONAPI static constexpr const char* Description = "AI processor";
EXTENSIONAPI static constexpr const char* Description = "LlamaCpp processor";

EXTENSIONAPI static constexpr auto ModelName = core::PropertyDefinitionBuilder<>::createProperty("Model Name")
.withDescription("The name of the model")
Expand Down Expand Up @@ -103,7 +103,7 @@ What now follows is a description of how the user would like you to transform/ro
void notifyStop() override;

private:
std::shared_ptr<core::logging::Logger> logger_ = core::logging::LoggerFactory<AiProcessor>::getLogger(uuid_);
std::shared_ptr<core::logging::Logger> logger_ = core::logging::LoggerFactory<LlamaCppProcessor>::getLogger(uuid_);

double temperature_{0};
std::string model_name_;
Expand Down

0 comments on commit 8612676

Please sign in to comment.