6.11.0 • Published 5 months ago

ml-matrix v6.11.0

Weekly downloads
39,052
License
MIT
Repository
github
Last release
5 months ago

ml-matrix

Matrix manipulation and computation library.

NPM version build status DOI npm download

Installation

$ npm install ml-matrix

Usage

As an ES module

import { Matrix } from 'ml-matrix';

const matrix = Matrix.ones(5, 5);

As a CommonJS module

const { Matrix } = require('ml-matrix');

const matrix = Matrix.ones(5, 5);

API Documentation

Examples

Standard operations

const { Matrix } = require('ml-matrix');

var A = new Matrix([
  [1, 1],
  [2, 2],
]);

var B = new Matrix([
  [3, 3],
  [1, 1],
]);

var C = new Matrix([
  [3, 3],
  [1, 1],
]);

Operations

const addition       = Matrix.add(A, B);   // addition       = Matrix [[4, 4], [3, 3], rows: 2, columns: 2]
const subtraction    = Matrix.sub(A, B);   // subtraction    = Matrix [[-2, -2], [1, 1], rows: 2, columns: 2]
const multiplication = A.mmul(B);          // multiplication = Matrix [[4, 4], [8, 8], rows: 2, columns: 2]
const mulByNumber    = Matrix.mul(A, 10);  // mulByNumber    = Matrix [[10, 10], [20, 20], rows: 2, columns: 2]
const divByNumber    = Matrix.div(A, 10);  // divByNumber    = Matrix [[0.1, 0.1], [0.2, 0.2], rows: 2, columns: 2]
const modulo         = Matrix.mod(B, 2);   // modulo         = Matrix [[1, 1], [1, 1], rows: 2, columns: 2]
const maxMatrix      = Matrix.max(A, B);   // max            = Matrix [[3, 3], [2, 2], rows: 2, columns: 2]
const minMatrix      = Matrix.min(A, B);   // max            = Matrix [[1, 1], [1, 1], rows: 2, columns: 2]

Inplace Operations

C.add(A);   // => C = C + A
C.sub(A);   // => C = C - A
C.mul(10);  // => C = 10 * C
C.div(10);  // => C = C / 10
C.mod(2);   // => C = C % 2

Math Operations

// Standard Math operations: (abs, cos, round, etc.)
var A = new Matrix([
  [ 1,  1],
  [-1, -1],
]);

var exponential = Matrix.exp(A);  // exponential = Matrix [[Math.exp(1), Math.exp(1)], [Math.exp(-1), Math.exp(-1)], rows: 2, columns: 2].
var cosinus     = Matrix.cos(A);  // cosinus     = Matrix [[Math.cos(1), Math.cos(1)], [Math.cos(-1), Math.cos(-1)], rows: 2, columns: 2].
var absolute    = Matrix.abs(A);  // absolute    = Matrix [[1, 1], [1, 1], rows: 2, columns: 2].
// Note: you can do it inplace too as A.abs()

Available Methods:

abs, acos, acosh, asin, asinh, atan, atanh, cbrt, ceil, clz32, cos, cosh, exp, expm1, floor, fround, log, log1p, log10, log2, round, sign, sin, sinh, sqrt, tan, tanh, trunc

Manipulation of the matrix

// remember: A = Matrix [[1, 1], [-1, -1], rows: 2, columns: 2]

var numberRows     = A.rows;             // A has 2 rows
var numberCols     = A.columns;          // A has 2 columns
var firstValue     = A.get(0, 0);        // get(rows, columns)
var numberElements = A.size;             // 2 * 2 = 4 elements
var isRow          = A.isRowVector();    // false because A has more than 1 row
var isColumn       = A.isColumnVector(); // false because A has more than 1 column
var isSquare       = A.isSquare();       // true, because A is 2 * 2 matrix
var isSym          = A.isSymmetric();    // false, because A is not symmetric
A.set(1, 0, 10);                         // A = Matrix [[1, 1], [10, -1], rows: 2, columns: 2]. We have changed the second row and the first column
var diag           = A.diag();           // diag = [1, -1] (values in the diagonal)
var m              = A.mean();           // m = 2.75
var product        = A.prod();           // product = -10 (product of all values of the matrix)
var norm           = A.norm();           // norm = 10.14889156509222 (Frobenius norm of the matrix)
var transpose      = A.transpose();      // transpose = Matrix [[1, 10], [1, -1], rows: 2, columns: 2]

Instantiation of matrix

var z = Matrix.zeros(3, 2); // z = Matrix [[0, 0], [0, 0], [0, 0], rows: 3, columns: 2]
var z = Matrix.ones(2, 3);  // z = Matrix [[1, 1, 1], [1, 1, 1], rows: 2, columns: 3]
var z = Matrix.eye(3, 4);   // z = Matrix [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], rows: 3, columns: 4]. there are 1 only in the diagonal

Maths

const {
  Matrix,
  inverse,
  solve,
  linearDependencies,
  QrDecomposition,
  LuDecomposition,
  CholeskyDecomposition,
  EigenvalueDecomposition,
} = require('ml-matrix');

Inverse and Pseudo-inverse

var A = new Matrix([
  [2, 3, 5],
  [4, 1, 6],
  [1, 3, 0],
]);

var inverseA = inverse(A);
var B = A.mmul(inverseA); // B = A * inverse(A), so B ~= Identity


// if A is singular, you can use SVD :
var A = new Matrix([
  [1, 2, 3],
  [4, 5, 6],
  [7, 8, 9],
]); 
// A is singular, so the standard computation of inverse won't work (you can test if you don't trust me^^)

var inverseA = inverse(A, (useSVD = true)); // inverseA is only an approximation of the inverse, by using the Singular Values Decomposition
var B = A.mmul(inverseA); // B = A * inverse(A), but inverse(A) is only an approximation, so B doesn't really be identity.
// if you want the pseudo-inverse of a matrix :
var A = new Matrix([
  [1, 2],
  [3, 4],
  [5, 6],
]);

var pseudoInverseA = A.pseudoInverse();
var B = A.mmul(pseudoInverseA).mmul(A); // with pseudo inverse, A*pseudo-inverse(A)*A ~= A. It's the case here

Least square

Least square is the following problem: We search for x, such that A.x = B (A, x and B are matrix or vectors). Below, how to solve least square with our function

// If A is non singular :
var A = new Matrix([
  [3,    1],
  [4.25, 1],
  [5.5,  1],
  [8,    1],
]);

var B = Matrix.columnVector([4.5, 4.25, 5.5, 5.5]);
var x = solve(A, B);
var error = Matrix.sub(B, A.mmul(x)); // The error enables to evaluate the solution x found.
// If A is non singular :
var A = new Matrix([
  [1, 2, 3],
  [4, 5, 6],
  [7, 8, 9],
]);

var B = Matrix.columnVector([8, 20, 32]);
var x = solve(A, B, (useSVD = true)); // there are many solutions. x can be [1, 2, 1].transpose(), or [1.33, 1.33, 1.33].transpose(), etc.
var error = Matrix.sub(B, A.mmul(x)); // The error enables to evaluate the solution x found.

Decompositions

QR Decomposition
var A = new Matrix([
  [2, 3, 5],
  [4, 1, 6],
  [1, 3, 0],
]);

var QR = new QrDecomposition(A);
var Q = QR.orthogonalMatrix;
var R = QR.upperTriangularMatrix;
// So you have the QR decomposition. If you multiply Q by R, you'll see that A = Q.R, with Q orthogonal and R upper triangular
LU Decomposition
var A = new Matrix([
  [2, 3, 5],
  [4, 1, 6],
  [1, 3, 0],
]);

var LU = new LuDecomposition(A);
var L = LU.lowerTriangularMatrix;
var U = LU.upperTriangularMatrix;
var P = LU.pivotPermutationVector;
// So you have the LU decomposition. P includes the permutation of the matrix. Here P = [1, 2, 0], i.e the first row of LU is the second row of A, the second row of LU is the third row of A and the third row of LU is the first row of A.
Cholesky Decomposition
var A = new Matrix([
  [2, 3, 5],
  [4, 1, 6],
  [1, 3, 0],
]);

var cholesky = new CholeskyDecomposition(A);
var L = cholesky.lowerTriangularMatrix;
Eigenvalues & eigenvectors
var A = new Matrix([
  [2, 3, 5],
  [4, 1, 6],
  [1, 3, 0],
]);

var e = new EigenvalueDecomposition(A);
var real = e.realEigenvalues;
var imaginary = e.imaginaryEigenvalues;
var vectors = e.eigenvectorMatrix;

Linear dependencies

var A = new Matrix([
  [2, 0, 0, 1],
  [0, 1, 6, 0],
  [0, 3, 0, 1],
  [0, 0, 1, 0],
  [0, 1, 2, 0],
]);

var dependencies = linearDependencies(A);
// dependencies is a matrix with the dependencies of the rows. When we look row by row, we see that the first row is [0, 0, 0, 0, 0], so it means that the first row is independent, and the second row is [ 0, 0, 0, 4, 1 ], i.e the second row = 4 times the 4th row + the 5th row.

License

MIT

jrwjrw-analysis@geenee/sdk-example-slamasset-diagram-bactryadiagrambactryabactrya-asset-diagram-managerbactrya-assetdiagramraven-playgroundnianyi-butterfly-dag@algoraven/playground@infinitebrahmanuniverse/nolb-ml@everything-registry/sub-chunk-2181similiqueadshukraspectral-clustering-jsspectra-processordimred@digifi/ml-cartregression-multivariate-polynomialeligendiquaeopenchemlib-extendedopenchemlib-utilsbutterfly-dagbutterfly-dag-forkbutterfly-dag-risingcase-gg-editorpantograph2dpango-printccnetvizclientml@carefree0910/pack_reactconvert-to-jcampprojection-3d-2dchem-equilibrium@butterwell/find-ellipsecpcachromatography@boyne/flow_code_react@antv/f6-core@antv/layout@antv/g6-core@antv/g6-pcngx-regression-multivariate-linearasuna-componentscurve-fittingr-regression@allmaps/transform@ajthinking/data-storybayesian-jsbilly-braintemporibusadipisciesseiure@infl1ght/projection-3d-2d@easyv/butterfly-dag@flow-code/react@jacobq/ml-levenberg-marquardt@rainij/polynomial-regression-js@raven-crypto/raven-playground@kobra-dev/multivariate-linear-regression@isle-project/components@jeremygeng/gg@jsonstack/data@metaexplorer-mods/itpt-editor@modelx/data@shawerestart/layout@petrpatek/mind-ar@kurokida/jspsych-psychophysics@lexotrion/image-js@msitko/ml-levenberg-marquardtmind-arml-spectra-processingml-regression-robust-polynomialml-savitzky-golayml-random-forestml-ransacml-regressionml-regression-lassoml-regression-multivariate-linearml-regression-polynomialml-xgboostml-plsml-preprocessml-curve-fittingml-dataset-metadataml-directml-directional-distributionml-expectation-maximizationml-fcnnlsml-floyd-warshallml-fnnml-generate-datasetmlml-affine-transformml-cartmrz-detectionmrz-scanmsc-ar@yogeshcl/g6-react-ba@timmartin2/python-code-analyzermfs-deconvolution
6.11.0

5 months ago

6.10.8

6 months ago

6.10.7

6 months ago

6.10.6

6 months ago

6.10.5

7 months ago

6.10.4

1 year ago

6.10.3

1 year ago

6.10.2

2 years ago

6.10.1

2 years ago

6.10.0

2 years ago

6.9.0

2 years ago

6.8.2

2 years ago

6.8.1

2 years ago

6.8.0

3 years ago

6.7.0

3 years ago

6.6.0

3 years ago

6.5.3

4 years ago

6.5.2

4 years ago

6.5.1

4 years ago

6.5.0

4 years ago

6.4.1

5 years ago

6.4.0

5 years ago

6.3.0

5 years ago

6.2.0

5 years ago

6.1.2

5 years ago

6.1.1

5 years ago

6.1.0

5 years ago

6.0.0

5 years ago

6.0.0-6

5 years ago

6.0.0-5

5 years ago

6.0.0-4

5 years ago

6.0.0-3

5 years ago

6.0.0-2

5 years ago

6.0.0-1

5 years ago

6.0.0-0

5 years ago

5.3.0

5 years ago

5.2.1

5 years ago

5.2.0

6 years ago

5.1.1

6 years ago

5.1.0

6 years ago

5.0.1

7 years ago

5.0.0

7 years ago

4.0.0

7 years ago

3.0.0

7 years ago

3.0.0-0

7 years ago

2.4.0-1

7 years ago

2.4.0-0

7 years ago

2.3.0

7 years ago

2.2.0

7 years ago

2.1.0

8 years ago

2.0.0

8 years ago

1.4.0

8 years ago

1.3.0

8 years ago

1.2.1

8 years ago

1.2.0

8 years ago

1.1.5

8 years ago

1.1.4

8 years ago

1.1.3

8 years ago

1.1.2

8 years ago

1.1.1

8 years ago

1.1.0

8 years ago

1.0.4

8 years ago

1.0.3

8 years ago

1.0.2

9 years ago

1.0.1

9 years ago

1.0.0

9 years ago

1.0.0-0

9 years ago

0.1.0

9 years ago

0.0.4

9 years ago

0.0.3

9 years ago

0.0.2

9 years ago

0.0.1

9 years ago