diff --git a/ProtoDef b/ProtoDef index 2c023c4..8e07785 160000 --- a/ProtoDef +++ b/ProtoDef @@ -1 +1 @@ -Subproject commit 2c023c4f5ee141a5ff88aa002f7743f78a9b99aa +Subproject commit 8e07785e94626882fa3333184bb366e3f6625356 diff --git a/example.js b/example.js index 3d3f940..e9dc715 100644 --- a/example.js +++ b/example.js @@ -1,7 +1,12 @@ +const assert = require('assert') const ProtoDef = require('protodef').ProtoDef const Serializer = require('protodef').Serializer const Parser = require('protodef').Parser +BigInt.prototype.toJSON = function () { // eslint-disable-line -- Allow serializing BigIntegers + return this.toString() +} + // the protocol can be in a separate json file const exampleProtocol = { container: 'native', @@ -9,6 +14,7 @@ const exampleProtocol = { byte: 'native', bool: 'native', switch: 'native', + bitflags: 'native', entity_look: [ 'container', [ @@ -24,10 +30,11 @@ const exampleProtocol = { name: 'pitch', type: 'i8' }, - { - name: 'onGround', - type: 'bool' - } + { name: 'flags', type: ['bitflags', { type: 'u8', flags: ['onGround'] }] }, + { name: 'longId', type: 'varint64' }, + { name: 'longerId', type: 'varint128' }, + { name: 'zigzagId', type: 'zigzag32' }, + { name: 'zigzagBig', type: 'zigzag64' } ] ], packet: [ @@ -71,12 +78,19 @@ serializer.write({ params: { entityId: 1, yaw: 1, - pitch: 1, - onGround: true + pitch: 6, + flags: { + onGround: true + }, + longId: 13n, + longerId: 2n ** 68n, // 9 bytes integer, 10 over wire + zigzagId: -3, + zigzagBig: 4294967296n } }) serializer.pipe(parser) parser.on('data', function (chunk) { - console.log(JSON.stringify(chunk, null, 2)) + console.dir(chunk, { depth: null }) + assert.deepEqual([...chunk.buffer], [22, 1, 1, 6, 1, 13, 128, 128, 128, 128, 128, 128, 128, 128, 128, 32, 5, 128, 128, 128, 128, 32]) }) diff --git a/src/datatypes/compiler-utils.js b/src/datatypes/compiler-utils.js index 824cae3..2c3d5a8 100644 --- a/src/datatypes/compiler-utils.js +++ b/src/datatypes/compiler-utils.js @@ -58,6 +58,27 @@ module.exports = { code += 'return { value: { ' + names.join(', ') + ` }, size: ${totalBytes} }` return compiler.wrapCode(code) }], + bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => { + let fstr = JSON.stringify(flags) + if (Array.isArray(flags)) { + fstr = '{' + for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',') + fstr += '}' + } else if (shift) { + fstr = '{' + for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}` + fstr += '}' + } + return compiler.wrapCode(` +const { value: _value, size } = ${compiler.callType(type, 'offset')} +const value = { _value } +const flags = ${fstr} +for (const key in flags) { + value[key] = (_value & flags[key]) == flags[key] +} +return { value, size } + `.trim()) + }], mapper: ['parametrizable', (compiler, mapper) => { let code = 'const { value, size } = ' + compiler.callType(mapper.type) + '\n' code += 'return { value: ' + JSON.stringify(sanitizeMappings(mapper.mappings)) + '[value] || value, size }' @@ -116,6 +137,26 @@ module.exports = { code += 'return offset' return compiler.wrapCode(code) }], + bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => { + let fstr = JSON.stringify(flags) + if (Array.isArray(flags)) { + fstr = '{' + for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',') + fstr += '}' + } else if (shift) { + fstr = '{' + for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}` + fstr += '}' + } + return compiler.wrapCode(` +const flags = ${fstr} +let val = value._value ${big ? '|| 0n' : ''} +for (const key in flags) { + if (value[key]) val |= flags[key] +} +return (ctx.${type})(val, buffer, offset) + `.trim()) + }], mapper: ['parametrizable', (compiler, mapper) => { const mappings = JSON.stringify(swapMappings(mapper.mappings)) const code = 'return ' + compiler.callType(`${mappings}[value] || value`, mapper.type) @@ -148,6 +189,26 @@ module.exports = { const totalBytes = Math.ceil(values.reduce((acc, { size }) => acc + size, 0) / 8) return `${totalBytes}` }], + bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => { + let fstr = JSON.stringify(flags) + if (Array.isArray(flags)) { + fstr = '{' + for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',') + fstr += '}' + } else if (shift) { + fstr = '{' + for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}` + fstr += '}' + } + return compiler.wrapCode(` +const flags = ${fstr} +let val = value._value ${big ? '|| 0n' : ''} +for (const key in flags) { + if (value[key]) val |= flags[key] +} +return (ctx.${type})(val) + `.trim()) + }], mapper: ['parametrizable', (compiler, mapper) => { const mappings = JSON.stringify(swapMappings(mapper.mappings)) const code = 'return ' + compiler.callType(`${mappings}[value] || value`, mapper.type) diff --git a/src/datatypes/utils.js b/src/datatypes/utils.js index 8cc1d52..2f1722b 100644 --- a/src/datatypes/utils.js +++ b/src/datatypes/utils.js @@ -1,14 +1,15 @@ const { getCount, sendCount, calcCount, PartialReadError } = require('../utils') module.exports = { - varint: [readVarInt, writeVarInt, sizeOfVarInt, require('../../ProtoDef/schemas/utils.json').varint], bool: [readBool, writeBool, 1, require('../../ProtoDef/schemas/utils.json').bool], pstring: [readPString, writePString, sizeOfPString, require('../../ProtoDef/schemas/utils.json').pstring], buffer: [readBuffer, writeBuffer, sizeOfBuffer, require('../../ProtoDef/schemas/utils.json').buffer], void: [readVoid, writeVoid, 0, require('../../ProtoDef/schemas/utils.json').void], bitfield: [readBitField, writeBitField, sizeOfBitField, require('../../ProtoDef/schemas/utils.json').bitfield], + bitflags: [readBitflags, writeBitflags, sizeOfBitflags, require('../../ProtoDef/schemas/utils.json').bitflags], cstring: [readCString, writeCString, sizeOfCString, require('../../ProtoDef/schemas/utils.json').cstring], - mapper: [readMapper, writeMapper, sizeOfMapper, require('../../ProtoDef/schemas/utils.json').mapper] + mapper: [readMapper, writeMapper, sizeOfMapper, require('../../ProtoDef/schemas/utils.json').mapper], + ...require('./varint') } function mapperEquality (a, b) { @@ -58,47 +59,6 @@ function sizeOfMapper (value, { type, mappings }, rootNode) { return this.sizeOf(mappedValue, type, rootNode) } -function readVarInt (buffer, offset) { - let result = 0 - let shift = 0 - let cursor = offset - - while (true) { - if (cursor + 1 > buffer.length) { throw new PartialReadError() } - const b = buffer.readUInt8(cursor) - result |= ((b & 0x7f) << shift) // Add the bits to our number, except MSB - cursor++ - if (!(b & 0x80)) { // If the MSB is not set, we return the number - return { - value: result, - size: cursor - offset - } - } - shift += 7 // we only have 7 bits, MSB being the return-trigger - if (shift > 64) throw new PartialReadError(`varint is too big: ${shift}`) // Make sure our shift don't overflow. - } -} - -function sizeOfVarInt (value) { - let cursor = 0 - while (value & ~0x7F) { - value >>>= 7 - cursor++ - } - return cursor + 1 -} - -function writeVarInt (value, buffer, offset) { - let cursor = 0 - while (value & ~0x7F) { - buffer.writeUInt8((value & 0xFF) | 0x80, offset + cursor) - cursor++ - value >>>= 7 - } - buffer.writeUInt8(value, offset + cursor) - return offset + cursor + 1 -} - function readPString (buffer, offset, typeArgs, rootNode) { const { size, count } = getCount.call(this, buffer, offset, typeArgs, rootNode) const cursor = offset + size @@ -258,3 +218,65 @@ function sizeOfCString (value) { const length = Buffer.byteLength(value, 'utf8') return length + 1 } + +function readBitflags (buffer, offset, { type, flags, shift, big }, rootNode) { + const { size, value } = this.read(buffer, offset, type, rootNode) + let f = {} + if (Array.isArray(flags)) { + for (const [k, v] of Object.entries(flags)) { + f[v] = big ? (1n << BigInt(k)) : (1 << k) + } + } else if (shift) { + for (const k in flags) { + f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k]) + } + } else { + f = flags + } + const result = { _value: value } + for (const key in f) { + result[key] = (value & f[key]) === f[key] + } + return { value: result, size } +} + +function writeBitflags (value, buffer, offset, { type, flags, shift, big }, rootNode) { + let f = {} + if (Array.isArray(flags)) { + for (const [k, v] of Object.entries(flags)) { + f[v] = big ? (1n << BigInt(k)) : (1 << k) + } + } else if (shift) { + for (const k in flags) { + f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k]) + } + } else { + f = flags + } + let val = value._value || (big ? 0n : 0) + for (const key in f) { + if (value[key]) val |= f[key] + } + return this.write(val, buffer, offset, type, rootNode) +} + +function sizeOfBitflags (value, { type, flags, shift, big }, rootNode) { + if (!value) throw new Error('Missing field') + let f = {} + if (Array.isArray(flags)) { + for (const [k, v] of Object.entries(flags)) { + f[v] = big ? (1n << BigInt(k)) : (1 << k) + } + } else if (shift) { + for (const k in flags) { + f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k]) + } + } else { + f = flags + } + let mappedValue = value._value || (big ? 0n : 0) + for (const key in f) { + if (value[key]) mappedValue |= f[key] + } + return this.sizeOf(mappedValue, type, rootNode) +} diff --git a/src/datatypes/varint.js b/src/datatypes/varint.js new file mode 100644 index 0000000..4abf8b6 --- /dev/null +++ b/src/datatypes/varint.js @@ -0,0 +1,138 @@ +const { PartialReadError } = require('../utils') + +module.exports = { + varint: [readVarInt, writeVarInt, sizeOfVarInt, require('../../ProtoDef/schemas/numeric.json').varint], + varint64: [readVarLong, writeVarLong, sizeOfVarLong, require('../../ProtoDef/schemas/numeric.json').varint64], + varint128: [readVarLong128, writeVarLong, sizeOfVarLong, require('../../ProtoDef/schemas/numeric.json').varint128], + zigzag32: [readSignedVarInt, writeSignedVarInt, sizeOfSignedVarInt, require('../../ProtoDef/schemas/numeric.json').zigzag32], + zigzag64: [readSignedVarLong, writeSignedVarLong, sizeOfSignedVarLong, require('../../ProtoDef/schemas/numeric.json').zigzag64] +} + +// u32 + +function readVarInt (buffer, offset) { + let result = 0 + let shift = 0 + let cursor = offset + + while (true) { + if (cursor >= buffer.length) throw new PartialReadError('Unexpected buffer end while reading VarInt') + const byte = buffer.readUInt8(cursor) + result |= (byte & 0x7F) << shift // Add the bits, excluding the MSB + cursor++ + if (!(byte & 0x80)) { // If MSB is not set, return result + return { value: result, size: cursor - offset } + } + shift += 7 + if (shift > 64) throw new PartialReadError(`varint is too big: ${shift}`) // Make sure our shift don't overflow. + } +} + +function sizeOfVarInt (value) { + let cursor = 0 + while (value & ~0x7F) { + value >>>= 7 + cursor++ + } + return cursor + 1 +} + +function writeVarInt (value, buffer, offset) { + let cursor = 0 + while (value & ~0x7F) { + buffer.writeUInt8((value & 0xFF) | 0x80, offset + cursor) + cursor++ + value >>>= 7 + } + buffer.writeUInt8(value, offset + cursor) + return offset + cursor + 1 +} + +// u64 + +function readVarLong (buffer, offset) { + let result = 0n + let shift = 0n + let cursor = offset + + while (true) { + if (cursor >= buffer.length) throw new PartialReadError('Unexpected buffer end while reading VarLong') + const byte = buffer.readUInt8(cursor) + result |= (BigInt(byte) & 0x7Fn) << shift // Add the bits, excluding the MSB + cursor++ + if (!(byte & 0x80)) { // If MSB is not set, return result + return { value: result, size: cursor - offset } + } + shift += 7n + if (shift > 63n) throw new Error(`varint is too big: ${shift}`) + } +} + +function readVarLong128 (buffer, offset) { + let result = 0n + let shift = 0n + let cursor = offset + + while (true) { + if (cursor >= buffer.length) throw new PartialReadError('Unexpected buffer end while reading VarLong') + const byte = buffer.readUInt8(cursor) + result |= (BigInt(byte) & 0x7Fn) << shift // Add the bits, excluding the MSB + cursor++ + if (!(byte & 0x80)) { // If MSB is not set, return result + return { value: result, size: cursor - offset } + } + shift += 7n + if (shift > 127n) throw new Error(`varint is too big: ${shift}`) + } +} + +function sizeOfVarLong (value) { + value = BigInt(value) + let size = 0 + do { + value >>= 7n + size++ + } while (value !== 0n) + return size +} + +function writeVarLong (value, buffer, offset) { + value = BigInt(value) + let cursor = offset + do { + const byte = value & 0x7Fn + value >>= 7n + buffer.writeUInt8(Number(byte) | (value ? 0x80 : 0), cursor++) + } while (value) + return cursor +} + +// Zigzag 32 + +function readSignedVarInt (buffer, offset) { + const { value, size } = readVarInt(buffer, offset) + return { value: (value >>> 1) ^ -(value & 1), size } +} + +function sizeOfSignedVarInt (value) { + return sizeOfVarInt((value << 1) ^ (value >> 31)) +} + +function writeSignedVarInt (value, buffer, offset) { + return writeVarInt((value << 1) ^ (value >> 31), buffer, offset) +} + +// Zigzag 64 + +function readSignedVarLong (buffer, offset) { + const { value, size } = readVarLong(buffer, offset) + return { value: (value >> 1n) ^ -(value & 1n), size } +} + +function sizeOfSignedVarLong (value) { + return sizeOfVarLong((BigInt(value) << 1n) ^ (BigInt(value) >> 63n)) +} + +function writeSignedVarLong (value, buffer, offset) { + return writeVarLong((BigInt(value) << 1n) ^ (BigInt(value) >> 63n), buffer, offset) +} diff --git a/test/dataTypes/datatypes.js b/test/dataTypes/datatypes.js index 51bef45..35a50b9 100644 --- a/test/dataTypes/datatypes.js +++ b/test/dataTypes/datatypes.js @@ -13,7 +13,12 @@ function testValue (type, value, buffer) { }) it('reads', function () { const actualResult = proto.parsePacketBuffer(type, buffer) - if (value === null) { assert.ok(actualResult.data === undefined) } else { expect(actualResult.data).to.deep.equal(value) } + if (typeof actualResult.data === 'bigint') value = BigInt(value) + if (value === null) { + assert.ok(actualResult.data === undefined) + } else { + expect(actualResult.data).to.deep.equal(value) + } expect(actualResult.metadata.size).to.deep.equal(buffer.length) }) it('writes (compiled)', function () { diff --git a/test/misc.js b/test/misc.js new file mode 100644 index 0000000..83200bc --- /dev/null +++ b/test/misc.js @@ -0,0 +1,5 @@ +/* eslint-env mocha */ + +it('example works', () => { + require('../example') +})