Take a look at this abomination of JSON:

{
  "status": "ok",
  "ch": "market.btcusdt.trade.detail",
  "ts": 1530026484015,
  "tick": {
    "id": 10539491734,
    "ts": 1530026483694,
    "data": [
      {
        "amount": 0.0281,
        "ts": 1530026483694,
        "id": 105394917346608176980,
        "price": 6176.38,
        "direction": "sell"
      },
    ]
  }
}

In particular, we have a nice 21-digit number. Considering an unsigned 64-bit only has 20-digits and JS ints usually support 15-16 digits this is problematic.

In contemplating how to address this issue without precision loss, there are really two options...

  1. Use a parsing library that supports bignumbers
  2. Treat numbers as strings

Fortunately there are a few libs for the first: json-bignum and json-bigint.

For pre-processing numbers as strings, the built-in JSON parser is going to be faster than user-land libs. So we will do some preprocessing to make this nice.

The test code looks like this:

const bignumJSON = require("json-bignum");
const bigintJSON = require("json-bigint");

// prettier-ignore
let input = '{"status":"ok","ch":"market.btcusdt.trade.detail","ts":1530026484015,"tick":{"id":10539491734,"ts":1530026483694,"data":[{"amount":0.028100000000000000,"ts":1530026483694,"id":105394917346608176980,"price":6176.380000000000000000,"direction":"sell"},{"amount":0.040000000000000000,"ts":1530026483694,"id":105394917346608178014,"price":6176.380000000000000000,"direction":"sell"},{"amount":0.369900000000000000,"ts":1530026483694,"id":105394917346608178019,"price":6176.380000000000000000,"direction":"sell"},{"amount":0.369900000000000000,"ts":1530026483694,"id":105394917346608180766,"price":6176.380000000000000000,"direction":"sell"},{"amount":0.097200000000000000,"ts":1530026483694,"id":105394917346608184328,"price":6176.380000000000000000,"direction":"sell"}]}}';

function runner(fn) {
  console.time(fn.name);
  for (let i = 0; i < 100000; i++) {
    fn();
  }
  console.timeEnd(fn.name);
}

runner(raw);
runner(regexall);
runner(regexbig);
runner(bignum);
runner(bigint);

// raw
function raw() {
  JSON.parse(input);
}

// regex all
function regexall() {
  JSON.parse(input.replace(/([0-9]{1,}\.{0,1}[0-9]{0,})/g, '"$1"'));
}

// regex big
function regexbig() {
  JSON.parse(input.replace(/:([0-9]{15,}),/g, ':"$1",'));
}

// json-bignum
function bignum() {
  bignumJSON.parse(input);
}

// json-bigint
function bigint() {
  bigintJSON.parse(input);
}

Running this code results in the following results:

$ node test.js 
raw: 633.721ms
regexall: 749.117ms
regexbig: 794.041ms
bignum: 3262.449ms
bigint: 4164.216ms

Not surprisingly, the native JSON.parse functions are the fastest, even when regex preprocessing is performed.

The first regex treats all numbers (ints/floats) as strings. The second regex replaces all big ints (>15 digits) as strings. This is about 18% slower than processing raw but it will guarantee we don't have precision loss.

Fun times.