Keyboard shortcuts

Press or to navigate between chapters

Press S or / to search in the book

Press ? to show this help

Press Esc to hide this help

API Reference

Quick reference for the main dataflow-rs types and methods.

Type Aliases

Dataflow-rs provides rules-engine aliases alongside the original workflow terminology:

Rules EngineWorkflow EngineImport
RulesEngineEngineuse dataflow_rs::RulesEngine;
RuleWorkflowuse dataflow_rs::Rule;
ActionTaskuse dataflow_rs::Action;

Both names refer to the same types — use whichever fits your mental model.

Engine (RulesEngine)

The central component that evaluates rules and processes messages.

#![allow(unused)]
fn main() {
use dataflow_rs::Engine;  // or: use dataflow_rs::RulesEngine;
}

Constructor

#![allow(unused)]
fn main() {
pub fn new(
    workflows: Vec<Workflow>,
    custom_functions: Option<HashMap<String, Box<dyn AsyncFunctionHandler + Send + Sync>>>
) -> Engine
}

Creates a new engine with the given rules. All JSONLogic is compiled at creation.

Methods

#![allow(unused)]
fn main() {
// Process a message through all matching rules
pub async fn process_message(&self, message: &mut Message) -> Result<()>

// Process with execution trace for debugging
pub async fn process_message_with_trace(&self, message: &mut Message) -> Result<ExecutionTrace>

// Process only workflows on a specific channel (O(1) lookup)
pub async fn process_message_for_channel(&self, channel: &str, message: &mut Message) -> Result<()>

// Channel routing with execution trace
pub async fn process_message_for_channel_with_trace(&self, channel: &str, message: &mut Message) -> Result<ExecutionTrace>

// Get registered rules (sorted by priority)
pub fn workflows(&self) -> &Arc<Vec<Workflow>>

// Find a workflow by ID
pub fn workflow_by_id(&self, id: &str) -> Option<&Workflow>

// Create a new engine with different workflows, preserving custom functions
pub fn with_new_workflows(&self, workflows: Vec<Workflow>) -> Self
}

Workflow (Rule)

A collection of actions with optional conditions and priority.

#![allow(unused)]
fn main() {
use dataflow_rs::Workflow;  // or: use dataflow_rs::Rule;
}

Constructors

#![allow(unused)]
fn main() {
// Parse from JSON string
pub fn from_json(json: &str) -> Result<Workflow>

// Load from file
pub fn from_file(path: &str) -> Result<Workflow>

// Convenience constructor for rules-engine pattern
pub fn rule(id: &str, name: &str, condition: Value, tasks: Vec<Task>) -> Self
}

JSON Schema

{
    "id": "string (required)",
    "name": "string (optional)",
    "priority": "number (optional, default: 0)",
    "condition": "JSONLogic (optional, evaluated against full context)",
    "continue_on_error": "boolean (optional, default: false)",
    "tasks": "array of Task (required)",
    "channel": "string (optional, default: 'default')",
    "version": "number (optional, default: 1)",
    "status": "'active' | 'paused' | 'archived' (optional, default: 'active')",
    "tags": "array of string (optional, default: [])",
    "created_at": "ISO 8601 datetime (optional)",
    "updated_at": "ISO 8601 datetime (optional)"
}

Task (Action)

An individual processing unit within a rule.

#![allow(unused)]
fn main() {
use dataflow_rs::Task;  // or: use dataflow_rs::Action;
}

Constructor

#![allow(unused)]
fn main() {
// Convenience constructor for rules-engine pattern
pub fn action(id: &str, name: &str, function: FunctionConfig) -> Self
}

JSON Schema

{
    "id": "string (required)",
    "name": "string (optional)",
    "condition": "JSONLogic (optional, evaluated against full context)",
    "continue_on_error": "boolean (optional)",
    "function": {
        "name": "string (required)",
        "input": "object (required)"
    }
}

Message

The data container that flows through rules.

#![allow(unused)]
fn main() {
use dataflow_rs::Message;
}

Constructors

#![allow(unused)]
fn main() {
// Create from data (goes to context.data)
pub fn new(data: &Value) -> Message

// Create from full context value
pub fn from_value(context: &Value) -> Message
}

Fields

#![allow(unused)]
fn main() {
pub struct Message {
    pub id: Uuid,
    pub payload: Arc<Value>,
    pub context: Value,  // Contains data, metadata, temp_data
    pub audit_trail: Vec<AuditTrail>,
    pub errors: Vec<ErrorInfo>,
}
}

Methods

#![allow(unused)]
fn main() {
// Get context as Arc for efficient sharing
pub fn get_context_arc(&mut self) -> Arc<Value>

// Invalidate context cache after modifications
pub fn invalidate_context_cache(&mut self)
}

AsyncFunctionHandler

Trait for implementing custom action handlers.

#![allow(unused)]
fn main() {
use dataflow_rs::engine::AsyncFunctionHandler;
}

Trait Definition

#![allow(unused)]
fn main() {
#[async_trait]
pub trait AsyncFunctionHandler: Send + Sync {
    async fn execute(
        &self,
        message: &mut Message,
        config: &FunctionConfig,
        datalogic: Arc<DataLogic>,
    ) -> Result<(usize, Vec<Change>)>;
}
}

Return Value

  • usize - Status code (200 = success, 400 = validation error, 500 = execution error)
  • Vec<Change> - List of changes for audit trail

FunctionConfig

Configuration for function execution.

#![allow(unused)]
fn main() {
pub struct FunctionConfig {
    pub name: String,
    pub input: Value,
}
}

Change

Represents a single data modification.

#![allow(unused)]
fn main() {
pub struct Change {
    pub path: Arc<str>,
    pub old_value: Arc<Value>,
    pub new_value: Arc<Value>,
}
}

AuditTrail

Records changes made by an action.

#![allow(unused)]
fn main() {
pub struct AuditTrail {
    pub task_id: String,
    pub workflow_id: String,
    pub timestamp: DateTime<Utc>,
    pub changes: Vec<Change>,
}
}

ErrorInfo

Error information recorded in the message.

#![allow(unused)]
fn main() {
pub struct ErrorInfo {
    pub code: String,
    pub message: String,
    pub workflow_id: Option<String>,
    pub task_id: Option<String>,
}
}

DataflowError

Main error type for the library.

#![allow(unused)]
fn main() {
use dataflow_rs::engine::error::DataflowError;
}

Variants

#![allow(unused)]
fn main() {
pub enum DataflowError {
    Validation(String),
    Execution(String),
    Logic(String),
    Io(String),
    // ... other variants
}
}

WorkflowStatus

Lifecycle status for workflows.

#![allow(unused)]
fn main() {
use dataflow_rs::WorkflowStatus;
}

Variants

#![allow(unused)]
fn main() {
pub enum WorkflowStatus {
    Active,    // Default — workflow executes normally
    Paused,    // Excluded from channel routing
    Archived,  // Permanently retired
}
}

Built-in Functions

map

Data transformation using JSONLogic.

{
    "name": "map",
    "input": {
        "mappings": [
            {
                "path": "string",
                "logic": "JSONLogic expression"
            }
        ]
    }
}

validation

Rule-based data validation.

{
    "name": "validation",
    "input": {
        "rules": [
            {
                "logic": "JSONLogic expression",
                "message": "string"
            }
        ]
    }
}

filter

Pipeline control flow — halt workflow or skip task.

{
    "name": "filter",
    "input": {
        "condition": "JSONLogic expression",
        "on_reject": "halt | skip (default: halt)"
    }
}

Status codes: 200 (pass), 298 (skip), 299 (halt).

log

Structured logging with JSONLogic expressions.

{
    "name": "log",
    "input": {
        "level": "trace | debug | info | warn | error (default: info)",
        "message": "JSONLogic expression",
        "fields": {
            "key": "JSONLogic expression"
        }
    }
}

Always returns (200, []) — never modifies the message.

WASM API (dataflow-wasm)

For browser/JavaScript usage.

import init, { WasmEngine, process_message } from 'dataflow-wasm';

// Initialize
await init();

// Create engine
const engine = new WasmEngine(workflowsJson);

// Process with payload string (returns Promise)
const result = await engine.process(payloadStr);

// One-off convenience function (no engine needed)
const result2 = await process_message(workflowsJson, payloadStr);

// Get rule info
const count = engine.workflow_count();
const ids = engine.workflow_ids();

Full API Documentation

For complete API documentation, run:

cargo doc --open

This generates detailed documentation from the source code comments.