1.1.1 • Published 2 years ago

@sounisi5011/stream-transform-from v1.1.1

Weekly downloads
-
License
MIT
Repository
github
Last release
2 years ago

@sounisi5011/stream-transform-from

Go to the latest release page on npm Supported Node.js version: ^12.17.x || 14.x || 15.x || 16.x || 17.x || >=18.x Tested with Jest Commitizen friendly Minified Bundle Size Details Install Size Details Dependencies Status Build Status Maintainability Status

Create a transform stream from an async iterator. This is the last piece needed to convert between streams and async iterators/generators.

Features

  • No dependencies

    This package uses only the Node.js built-in stream.Transform class.

  • Strict type definition

    The exact type definitions for arguments and return values will be generated based on the objectMode option.

  • Encoding arguments can be used

    You can use encoding, which is passed as the second argument of the transform._transform() method. This allows you to safely convert a string to Buffer object.

Installation

npm install @sounisi5011/stream-transform-from
yarn add @sounisi5011/stream-transform-from
pnpm add @sounisi5011/stream-transform-from

Usage

Convert Buffer objects

const fs = require('fs');
const stream = require('stream');

const { transformFrom } = require('@sounisi5011/stream-transform-from');

stream.pipeline(
  fs.createReadStream('input.txt', 'utf8'),
  transformFrom(async function*(source) {
    for await (const { chunk } of source) {
      yield chunk.toString('utf8').toUpperCase();
    }
  }),
  fs.createWriteStream('output.txt'),
  error => {
    if (error) {
      console.error(error);
    } else {
      console.log('done!');
    }
  }
);

Convert any type value

const stream = require('stream');

const { transformFrom } = require('@sounisi5011/stream-transform-from');

stream.pipeline(
  stream.Readable.from([1, 2, 3]),
  transformFrom(
    async function*(source) {
      for await (const { chunk } of source) {
        yield chunk + 2;
      }
    },
    { objectMode: true }
  ),
  // ...
  error => {
    if (error) {
      console.error(error);
    } else {
      console.log('done!');
    }
  }
);

Convert string to Buffer using encoding

const stream = require('stream');

const { transformFrom } = require('@sounisi5011/stream-transform-from');

stream.pipeline(
  // ...
  transformFrom(
    async function*(source) {
      for await (const { chunk, encoding } of source) {
        if (typeof chunk === 'string') {
          yield Buffer.from(chunk, encoding);
        }
      }
    },
    { writableObjectMode: true }
  ),
  // ...
  error => {
    if (error) {
      console.error(error);
    } else {
      console.log('done!');
    }
  }
);

API

const { transformFrom } = require('@sounisi5011/stream-transform-from');

// The return value is a Transform stream.
const transformStream = transformFrom(
  async function*(source) {
    // `source` is `AsyncIterableIterator<{ chunk: Buffer, encoding: BufferEncoding }>`
    //          or `AsyncIterableIterator<{ chunk: unknown, encoding: BufferEncoding }>` type

    // The value returned by `yield` keyword will be passed as the first argument of `transform.push()` method.
  },

  // The second argument is an options for the Transform stream.
  // The options are passed to the constructor function of the Transform class.
  // However, the following fields are not allowed:
  // + `construct`
  // + `read`
  // + `write`
  // + `writev`
  // + `final`
  // + `destroy`
  // + `transform`
  // + `flush`
  // The fields listed above will be ignored if specified.
  {}
);

Related