0.1.8 • Published 5 years ago

async-chunk-reader v0.1.8

Weekly downloads
2
License
MIT
Repository
github
Last release
5 years ago

Async Chunk Reader

This library allows you to read large amounts of data in chunks.

Click to see how it works

Install

npm install --save async-chunk-reader

Changelog

Added reading from zip file (v1.0.6) Added skipRows (v.1.0.8) to be added in the future (v.1.0.9)

API


init(parameters : InitParameters)

input : InitParameters

  • chunk_size : String
  • input_file : String | Stream
  • encoding : String
  • selectedFileName String

get()

output : Async Iterator

Import

with require :

const reader = require('async-chunk-reader')

with import :

import * as reader from "async-chunk-reader"

Usage


with path :

async function main(){

    const data = await reader
        .init({
            chunkSize: 100000,
            inputFile: 'input/mobile_network_201805.csv.gz'
        })
        .get()

    for await(let chunk of  data){
        console.log(chunk.map(d=>d.value))
    }
}

main();

with stream :

async function main(){

    const data = await reader
        .init({
            inputFile:  process.stdin
        })
        .get()

    for await(let chunk of  data){
        console.log(chunk.map(d=>d.value))
    }
}

main();

with string :

async function main(){

    const data = await reader
        .init({
            inputFile: "Some string"
        })
        .get()

    for await(let chunk of  data){
        console.log(chunk.map(d=>d.value))
    }
}

main();

with zipfile :

async function main(){

    const data = await reader
        .init({
            chunkSize: 100000,
            inputFile: 'example/Archive.zip',
            selectedFileName:'avocado.csv' #file in zip
        })
        .get()

    for await(let chunk of  data){
        console.log(chunk.map(d=>d.value))
    }
}

main();

specialChunkSize :

async function main(){

    const data = await reader
        .init({
            chunkSize: 100000,
            inputFile: 'example/Archive.zip',
            specialChunkSize : {0:10000, 1:40000}
        })
        .get()

    for await(let chunk of  data){
        console.log(chunk.map(d=>d.value))
    }
}

main();

to be added in the future (v.1.0.9)

skipRows :

async function main(){

    const data = await reader
        .init({
            chunkSize: 100000,
            inputFile: 'example/Archive.zip',
            skipRows : 40000
        })
        .get()

    for await(let chunk of  data){
        console.log(chunk.map(d=>d.value))
    }
}

main();
0.1.8

5 years ago

0.1.7

5 years ago

0.1.6

5 years ago

0.1.5

5 years ago

0.1.4

5 years ago

0.1.3

5 years ago

0.1.2

5 years ago

0.1.1

5 years ago

0.1.0

5 years ago