Skip to content

Flowise

Flowise is a powerful AI workflow builder that enables you to create, manage, and deploy complex AI workflows in the Local AI Cyber Lab environment.

Architecture Overview

graph TB
    subgraph Frontend["Frontend"]
        designer["Flow Designer"]
        templates["Template Library"]
        debug["Debug Console"]
        settings["Settings Panel"]
    end

    subgraph Backend["Backend"]
        engine["Workflow Engine"]
        executor["Flow Executor"]
        scheduler["Task Scheduler"]

        subgraph Components["Component System"]
            nodes["Node Types"]
            connectors["Connectors"]
            tools["Tool Integration"]
        end
    end

    subgraph Storage["Persistent Storage"]
        flows["Flow Definitions"]
        credentials["Credentials"]
        cache["Execution Cache"]
    end

    Frontend --> Backend
    Backend --> Storage
    Components --> engine

    classDef primary fill:#f9f,stroke:#333,stroke-width:2px
    classDef secondary fill:#bbf,stroke:#333,stroke-width:1px
    class designer,engine primary
    class nodes,tools secondary

Workflow Structure

graph LR
    subgraph Input["Input Nodes"]
        text["Text Input"]
        file["File Upload"]
        api["API Request"]
    end

    subgraph Processing["Processing Nodes"]
        llm["LLM Node"]
        chain["Chain Node"]
        tool["Tool Node"]
        memory["Memory Node"]
    end

    subgraph Output["Output Nodes"]
        response["Response"]
        file_out["File Output"]
        api_out["API Response"]
    end

    Input --> Processing
    Processing --> Output

    classDef input fill:#green,stroke:#333
    classDef process fill:#blue,stroke:#333
    classDef output fill:#red,stroke:#333

    class text,file,api input
    class llm,chain,tool,memory process
    class response,file_out,api_out output

Installation

Flowise is included in the Local AI Cyber Lab. For manual setup:

# Update Flowise
docker-compose pull flowise

# Start the service
docker-compose up -d flowise

Configuration

Environment Variables

# .env file
FLOWISE_PORT=3000
FLOWISE_USERNAME=admin
FLOWISE_PASSWORD=secure-password
FLOWISE_DATABASE_PATH=/data/flowise.db

Security Settings

# docker-compose.yml
services:
  flowise:
    environment:
      - FLOWISE_USERNAME=${FLOWISE_USERNAME}
      - FLOWISE_PASSWORD=${FLOWISE_PASSWORD}
      - FLOWISE_SECRETKEY=${FLOWISE_SECRETKEY}

Component Types

Input Components

  1. Text Input:

    {
      "name": "Text Input",
      "type": "input",
      "category": "Input",
      "fields": [
        {
          "name": "text",
          "type": "string",
          "required": true
        }
      ]
    }
    

  2. File Input:

    {
      "name": "File Input",
      "type": "input",
      "category": "Input",
      "fields": [
        {
          "name": "file",
          "type": "file",
          "allowedTypes": ["text/*", "application/pdf"]
        }
      ]
    }
    

Processing Components

  1. LLM Node:

    {
      "name": "LLM",
      "type": "llm",
      "category": "AI",
      "fields": [
        {
          "name": "model",
          "type": "select",
          "options": ["gpt-3.5-turbo", "llama2", "mistral"]
        },
        {
          "name": "temperature",
          "type": "number",
          "default": 0.7
        }
      ]
    }
    

  2. Chain Node:

    {
      "name": "Chain",
      "type": "chain",
      "category": "Flow",
      "fields": [
        {
          "name": "steps",
          "type": "array",
          "items": {
            "type": "node"
          }
        }
      ]
    }
    

Workflow Examples

Basic Chat Flow

const chatFlow = {
  "nodes": [
    {
      "id": "input",
      "type": "text_input",
      "data": {
        "name": "User Input"
      }
    },
    {
      "id": "llm",
      "type": "llm",
      "data": {
        "model": "llama2",
        "temperature": 0.7
      }
    },
    {
      "id": "output",
      "type": "response",
      "data": {
        "format": "text"
      }
    }
  ],
  "edges": [
    {
      "source": "input",
      "target": "llm"
    },
    {
      "source": "llm",
      "target": "output"
    }
  ]
}

Advanced Processing Flow

const advancedFlow = {
  "nodes": [
    {
      "id": "input",
      "type": "file_input",
      "data": {
        "allowedTypes": ["text/*"]
      }
    },
    {
      "id": "parser",
      "type": "text_parser",
      "data": {
        "format": "markdown"
      }
    },
    {
      "id": "memory",
      "type": "memory",
      "data": {
        "type": "buffer",
        "size": 5
      }
    },
    {
      "id": "llm",
      "type": "llm",
      "data": {
        "model": "llama2",
        "systemPrompt": "Analyze the following text:"
      }
    }
  ]
}

Integration

API Integration

import requests

def create_workflow(workflow_data):
    response = requests.post(
        "http://localhost:3000/api/v1/workflows",
        json=workflow_data,
        headers={
            "Authorization": f"Bearer {API_KEY}"
        }
    )
    return response.json()

def execute_workflow(workflow_id, inputs):
    response = requests.post(
        f"http://localhost:3000/api/v1/workflows/{workflow_id}/execute",
        json={"inputs": inputs},
        headers={
            "Authorization": f"Bearer {API_KEY}"
        }
    )
    return response.json()

Component Development

import { INode, INodeData } from '../../../src/Interface'

class CustomNode implements INode {
    label: string
    name: string
    type: string
    icon: string
    category: string
    baseClasses: string[]

    constructor() {
        this.label = 'Custom Node'
        this.name = 'customNode'
        this.type = 'customNode'
        this.icon = 'custom.svg'
        this.category = 'Custom'
        this.baseClasses = ['custom']
    }

    async init(nodeData: INodeData): Promise<any> {
        const input = nodeData.inputs?.input
        // Custom processing logic
        return processedOutput
    }
}

Monitoring

Health Checks

# docker-compose.yml
services:
  flowise:
    healthcheck:
      test: ["CMD", "curl", "-f", "http://localhost:3000/health"]
      interval: 30s
      timeout: 10s
      retries: 3

Performance Monitoring

// Monitor workflow execution
const monitorWorkflow = async (workflowId) => {
    const metrics = {
        executionTime: 0,
        memoryUsage: 0,
        nodeCount: 0
    }

    // Collect metrics
    const start = Date.now()
    const result = await executeWorkflow(workflowId)
    metrics.executionTime = Date.now() - start

    // Report metrics
    await reportMetrics(metrics)
}

Troubleshooting

Common Issues

  1. Workflow Execution Issues:

    # Check logs
    docker-compose logs flowise
    
    # Check workflow status
    curl -H "Authorization: Bearer ${API_KEY}" \
      http://localhost:3000/api/v1/workflows/${WORKFLOW_ID}/status
    

  2. Component Issues:

    # Verify component installation
    docker-compose exec flowise npm list
    
    # Check component logs
    docker-compose exec flowise cat /data/logs/components.log
    

Additional Resources

  1. User Guide
  2. Component Development
  3. API Documentation
  4. Best Practices

Best Practices

  1. Workflow Design:
  2. Keep workflows modular
  3. Use appropriate error handling
  4. Implement logging
  5. Test thoroughly

  6. Security:

  7. Secure API endpoints
  8. Manage credentials safely
  9. Monitor access
  10. Regular updates

  11. Performance:

  12. Optimize node connections
  13. Use caching when appropriate
  14. Monitor resource usage
  15. Regular maintenance