Skip to content

Commit

Permalink
Add additional varint types and bitflags (#163)
Browse files Browse the repository at this point in the history
* Implement more varint variations

* clean

* varint128

* Update varint.js

* Add bitflags

* logging fix

* fix BigInt handling for bitflags.shift

* run CI against protodef fork

* update test bigint handling

* update ProtoDef submodule

* update submod

* Update package.json
  • Loading branch information
extremeheat authored Dec 8, 2024
1 parent 67b411a commit 89c2588
Show file tree
Hide file tree
Showing 7 changed files with 297 additions and 52 deletions.
2 changes: 1 addition & 1 deletion ProtoDef
28 changes: 21 additions & 7 deletions example.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,20 @@
const assert = require('assert')
const ProtoDef = require('protodef').ProtoDef
const Serializer = require('protodef').Serializer
const Parser = require('protodef').Parser

BigInt.prototype.toJSON = function () { // eslint-disable-line -- Allow serializing BigIntegers
return this.toString()
}

// the protocol can be in a separate json file
const exampleProtocol = {
container: 'native',
varint: 'native',
byte: 'native',
bool: 'native',
switch: 'native',
bitflags: 'native',
entity_look: [
'container',
[
Expand All @@ -24,10 +30,11 @@ const exampleProtocol = {
name: 'pitch',
type: 'i8'
},
{
name: 'onGround',
type: 'bool'
}
{ name: 'flags', type: ['bitflags', { type: 'u8', flags: ['onGround'] }] },
{ name: 'longId', type: 'varint64' },
{ name: 'longerId', type: 'varint128' },
{ name: 'zigzagId', type: 'zigzag32' },
{ name: 'zigzagBig', type: 'zigzag64' }
]
],
packet: [
Expand Down Expand Up @@ -71,12 +78,19 @@ serializer.write({
params: {
entityId: 1,
yaw: 1,
pitch: 1,
onGround: true
pitch: 6,
flags: {
onGround: true
},
longId: 13n,
longerId: 2n ** 68n, // 9 bytes integer, 10 over wire
zigzagId: -3,
zigzagBig: 4294967296n
}
})
serializer.pipe(parser)

parser.on('data', function (chunk) {
console.log(JSON.stringify(chunk, null, 2))
console.dir(chunk, { depth: null })
assert.deepEqual([...chunk.buffer], [22, 1, 1, 6, 1, 13, 128, 128, 128, 128, 128, 128, 128, 128, 128, 32, 5, 128, 128, 128, 128, 32])
})
61 changes: 61 additions & 0 deletions src/datatypes/compiler-utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,27 @@ module.exports = {
code += 'return { value: { ' + names.join(', ') + ` }, size: ${totalBytes} }`
return compiler.wrapCode(code)
}],
bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => {
let fstr = JSON.stringify(flags)
if (Array.isArray(flags)) {
fstr = '{'
for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',')
fstr += '}'
} else if (shift) {
fstr = '{'
for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}`
fstr += '}'
}
return compiler.wrapCode(`
const { value: _value, size } = ${compiler.callType(type, 'offset')}
const value = { _value }
const flags = ${fstr}
for (const key in flags) {
value[key] = (_value & flags[key]) == flags[key]
}
return { value, size }
`.trim())
}],
mapper: ['parametrizable', (compiler, mapper) => {
let code = 'const { value, size } = ' + compiler.callType(mapper.type) + '\n'
code += 'return { value: ' + JSON.stringify(sanitizeMappings(mapper.mappings)) + '[value] || value, size }'
Expand Down Expand Up @@ -116,6 +137,26 @@ module.exports = {
code += 'return offset'
return compiler.wrapCode(code)
}],
bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => {
let fstr = JSON.stringify(flags)
if (Array.isArray(flags)) {
fstr = '{'
for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',')
fstr += '}'
} else if (shift) {
fstr = '{'
for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}`
fstr += '}'
}
return compiler.wrapCode(`
const flags = ${fstr}
let val = value._value ${big ? '|| 0n' : ''}
for (const key in flags) {
if (value[key]) val |= flags[key]
}
return (ctx.${type})(val, buffer, offset)
`.trim())
}],
mapper: ['parametrizable', (compiler, mapper) => {
const mappings = JSON.stringify(swapMappings(mapper.mappings))
const code = 'return ' + compiler.callType(`${mappings}[value] || value`, mapper.type)
Expand Down Expand Up @@ -148,6 +189,26 @@ module.exports = {
const totalBytes = Math.ceil(values.reduce((acc, { size }) => acc + size, 0) / 8)
return `${totalBytes}`
}],
bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => {
let fstr = JSON.stringify(flags)
if (Array.isArray(flags)) {
fstr = '{'
for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',')
fstr += '}'
} else if (shift) {
fstr = '{'
for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}`
fstr += '}'
}
return compiler.wrapCode(`
const flags = ${fstr}
let val = value._value ${big ? '|| 0n' : ''}
for (const key in flags) {
if (value[key]) val |= flags[key]
}
return (ctx.${type})(val)
`.trim())
}],
mapper: ['parametrizable', (compiler, mapper) => {
const mappings = JSON.stringify(swapMappings(mapper.mappings))
const code = 'return ' + compiler.callType(`${mappings}[value] || value`, mapper.type)
Expand Down
108 changes: 65 additions & 43 deletions src/datatypes/utils.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
const { getCount, sendCount, calcCount, PartialReadError } = require('../utils')

module.exports = {
varint: [readVarInt, writeVarInt, sizeOfVarInt, require('../../ProtoDef/schemas/utils.json').varint],
bool: [readBool, writeBool, 1, require('../../ProtoDef/schemas/utils.json').bool],
pstring: [readPString, writePString, sizeOfPString, require('../../ProtoDef/schemas/utils.json').pstring],
buffer: [readBuffer, writeBuffer, sizeOfBuffer, require('../../ProtoDef/schemas/utils.json').buffer],
void: [readVoid, writeVoid, 0, require('../../ProtoDef/schemas/utils.json').void],
bitfield: [readBitField, writeBitField, sizeOfBitField, require('../../ProtoDef/schemas/utils.json').bitfield],
bitflags: [readBitflags, writeBitflags, sizeOfBitflags, require('../../ProtoDef/schemas/utils.json').bitflags],
cstring: [readCString, writeCString, sizeOfCString, require('../../ProtoDef/schemas/utils.json').cstring],
mapper: [readMapper, writeMapper, sizeOfMapper, require('../../ProtoDef/schemas/utils.json').mapper]
mapper: [readMapper, writeMapper, sizeOfMapper, require('../../ProtoDef/schemas/utils.json').mapper],
...require('./varint')
}

function mapperEquality (a, b) {
Expand Down Expand Up @@ -58,47 +59,6 @@ function sizeOfMapper (value, { type, mappings }, rootNode) {
return this.sizeOf(mappedValue, type, rootNode)
}

function readVarInt (buffer, offset) {
let result = 0
let shift = 0
let cursor = offset

while (true) {
if (cursor + 1 > buffer.length) { throw new PartialReadError() }
const b = buffer.readUInt8(cursor)
result |= ((b & 0x7f) << shift) // Add the bits to our number, except MSB
cursor++
if (!(b & 0x80)) { // If the MSB is not set, we return the number
return {
value: result,
size: cursor - offset
}
}
shift += 7 // we only have 7 bits, MSB being the return-trigger
if (shift > 64) throw new PartialReadError(`varint is too big: ${shift}`) // Make sure our shift don't overflow.
}
}

function sizeOfVarInt (value) {
let cursor = 0
while (value & ~0x7F) {
value >>>= 7
cursor++
}
return cursor + 1
}

function writeVarInt (value, buffer, offset) {
let cursor = 0
while (value & ~0x7F) {
buffer.writeUInt8((value & 0xFF) | 0x80, offset + cursor)
cursor++
value >>>= 7
}
buffer.writeUInt8(value, offset + cursor)
return offset + cursor + 1
}

function readPString (buffer, offset, typeArgs, rootNode) {
const { size, count } = getCount.call(this, buffer, offset, typeArgs, rootNode)
const cursor = offset + size
Expand Down Expand Up @@ -258,3 +218,65 @@ function sizeOfCString (value) {
const length = Buffer.byteLength(value, 'utf8')
return length + 1
}

function readBitflags (buffer, offset, { type, flags, shift, big }, rootNode) {
const { size, value } = this.read(buffer, offset, type, rootNode)
let f = {}
if (Array.isArray(flags)) {
for (const [k, v] of Object.entries(flags)) {
f[v] = big ? (1n << BigInt(k)) : (1 << k)
}
} else if (shift) {
for (const k in flags) {
f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k])
}
} else {
f = flags
}
const result = { _value: value }
for (const key in f) {
result[key] = (value & f[key]) === f[key]
}
return { value: result, size }
}

function writeBitflags (value, buffer, offset, { type, flags, shift, big }, rootNode) {
let f = {}
if (Array.isArray(flags)) {
for (const [k, v] of Object.entries(flags)) {
f[v] = big ? (1n << BigInt(k)) : (1 << k)
}
} else if (shift) {
for (const k in flags) {
f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k])
}
} else {
f = flags
}
let val = value._value || (big ? 0n : 0)
for (const key in f) {
if (value[key]) val |= f[key]
}
return this.write(val, buffer, offset, type, rootNode)
}

function sizeOfBitflags (value, { type, flags, shift, big }, rootNode) {
if (!value) throw new Error('Missing field')
let f = {}
if (Array.isArray(flags)) {
for (const [k, v] of Object.entries(flags)) {
f[v] = big ? (1n << BigInt(k)) : (1 << k)
}
} else if (shift) {
for (const k in flags) {
f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k])
}
} else {
f = flags
}
let mappedValue = value._value || (big ? 0n : 0)
for (const key in f) {
if (value[key]) mappedValue |= f[key]
}
return this.sizeOf(mappedValue, type, rootNode)
}
Loading

0 comments on commit 89c2588

Please sign in to comment.