summaryrefslogtreecommitdiffstats
path: root/src/arrow/js/bin/print-buffer-alignment.js
blob: 4c3260397a72b1e03261386f4d95a8d85c4c964c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
#! /usr/bin/env node

// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements.  See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership.  The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License.  You may obtain a copy of the License at
//
//   http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied.  See the License for the
// specific language governing permissions and limitations
// under the License.

// @ts-check

const fs = require('fs');
const path = require('path');
const extension = process.env.ARROW_JS_DEBUG === 'src' ? '.ts' : '';
const { RecordBatch, AsyncMessageReader } = require(`../index${extension}`);
const { VectorLoader } = require(`../targets/apache-arrow/visitor/vectorloader`);

(async () => {

    const readable = process.argv.length < 3 ? process.stdin : fs.createReadStream(path.resolve(process.argv[2]));
    const reader = new AsyncMessageReader(readable);

    let schema, recordBatchIndex = 0, dictionaryBatchIndex = 0;

    for await (let message of reader) {

        let bufferRegions = [];

        if (message.isSchema()) {
            schema = message.header();
            continue;
        } else if (message.isRecordBatch()) {
            const header = message.header();
            bufferRegions = header.buffers;
            const body = await reader.readMessageBody(message.bodyLength);
            const recordBatch = loadRecordBatch(schema, header, body);
            console.log(`record batch ${++recordBatchIndex}: ${JSON.stringify({
                offset: body.byteOffset,
                length: body.byteLength,
                numRows: recordBatch.length,
            })}`);
        } else if (message.isDictionaryBatch()) {
            const header = message.header();
            bufferRegions = header.data.buffers;
            const type = schema.dictionaries.get(header.id);
            const body = await reader.readMessageBody(message.bodyLength);
            const recordBatch = loadDictionaryBatch(header.data, body, type);
            console.log(`dictionary batch ${++dictionaryBatchIndex}: ${JSON.stringify({
                offset: body.byteOffset,
                length: body.byteLength,
                numRows: recordBatch.length,
                dictionaryId: header.id,
            })}`);
        }

        bufferRegions.forEach(({ offset, length }, i) => {
            console.log(`\tbuffer ${i + 1}: { offset: ${offset},  length: ${length} }`);
        });
    }

    await reader.return();

})().catch((e) => { console.error(e); process.exit(1); });

function loadRecordBatch(schema, header, body) {
    return new RecordBatch(schema, header.length, new VectorLoader(body, header.nodes, header.buffers, new Map()).visitMany(schema.fields));
}

function loadDictionaryBatch(header, body, dictionaryType) {
    return RecordBatch.new(new VectorLoader(body, header.nodes, header.buffers, new Map()).visitMany([dictionaryType]));
}