Preparers Auto Processing Example

Examples of using Preparer auto processing with the ChunkText operation in AI Accelerator.

Preparer with table data source

-- Create source test table
CREATE TABLE source_table__1628
(
    id      INT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY,
    content TEXT NOT NULL
);

SELECT aidb.create_preparer_for_table(
    name => 'preparer__1628',
    operation => 'ChunkText',
    source_table => 'source_table__1628',
    source_data_column => 'content',
    destination_table => 'chunked_data__1628',
    destination_data_column => 'chunks',
    source_key_column => 'id',
    destination_key_column => 'id',
    options => '{"desired_length": 1, "max_length": 1000}'::JSONB  -- Configuration for the ChunkText operation
);

SELECT aidb.set_preparer_auto_processing('preparer__1628', 'Live');

INSERT INTO source_table__1628
VALUES (1, 'This is a significantly longer text example that might require splitting into smaller chunks. The purpose of this function is to partition text data into segments of a specified maximum length, for example, this sentence 145 is characters. This enables processing or storage of data in manageable parts.');
SELECT * FROM chunked_data__1628;

INSERT INTO source_table__1628
VALUES (2, 'This sentence should be its own chunk. This too.');
SELECT * FROM chunked_data__1628;

DELETE FROM source_table__1628 WHERE id = 1;
SELECT * FROM chunked_data__1628;

SELECT aidb.set_preparer_auto_processing('preparer__1628', 'Disabled');

Could this page be better? Report a problem or suggest an addition!