Skip to content

Commit

Permalink
V2.1.0 (#6)
Browse files Browse the repository at this point in the history
* 2.1 - fixed docs, added preloaded header for partials

* updated perf with travis'

* added schema.sizes, applyBlank, updated bench

* updated perf scores
  • Loading branch information
fed135 authored Jun 1, 2017
1 parent c9b20d0 commit 058d6f7
Show file tree
Hide file tree
Showing 12 changed files with 204 additions and 89 deletions.
32 changes: 13 additions & 19 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,36 +63,30 @@ const buffer = userSchema.bytes();
const content = userSchema.read(buffer);

// Decoding (partial)
const content = userSchema.readHeader(header).readContent(partial);
const content = userSchema.readContent(partial);
```


## Performances

```
[Array] JSON x 84.04 ops/sec ±0.77% (64 runs sampled)
[Array] Compactr x 88.12 ops/sec ±0.99% (66 runs sampled)
[Array] JSON x 188 ops/sec ±2.47% (73 runs sampled)
[Array] Compactr x 248 ops/sec ±3.16% (72 runs sampled)
[Boolean] JSON x 99.29 ops/sec ±0.88% (64 runs sampled)
[Boolean] Compactr x 190 ops/sec ±1.17% (75 runs sampled)
[Boolean] JSON x 220 ops/sec ±5.04% (71 runs sampled)
[Boolean] Compactr x 731 ops/sec ±7.57% (74 runs sampled)
[Float] JSON x 66.76 ops/sec ±1.10% (61 runs sampled)
[Float] Compactr x 112 ops/sec ±1.67% (70 runs sampled)
[Float] JSON x 159 ops/sec ±3.41% (70 runs sampled)
[Float] Compactr x 476 ops/sec ±1.58% (85 runs sampled)
[Integer] JSON x 103 ops/sec ±1.41% (66 runs sampled)
[Integer] Compactr x 202 ops/sec ±1.74% (74 runs sampled)
[Integer] JSON x 264 ops/sec ±1.79% (79 runs sampled)
[Integer] Compactr x 885 ops/sec ±1.36% (84 runs sampled)
[Integer (negative)] JSON x 109 ops/sec ±1.24% (69 runs sampled)
[Integer (negative)] Compactr x 203 ops/sec ±1.55% (74 runs sampled)
[Object] JSON x 139 ops/sec ±1.89% (76 runs sampled)
[Object] Compactr x 169 ops/sec ±1.52% (80 runs sampled)
[Object] JSON x 61.91 ops/sec ±1.32% (58 runs sampled)
[Object] Compactr x 44.78 ops/sec ±2.19% (54 runs sampled)
[String] JSON x 68.89 ops/sec ±1.48% (63 runs sampled)
[String] Compactr x 79.16 ops/sec ±2.04% (61 runs sampled)
[String (special characters)] JSON x 71.65 ops/sec ±1.15% (65 runs sampled)
[String (special characters)] Compactr x 144 ops/sec ±1.37% (71 runs sampled)
[String] JSON x 107 ops/sec ±6.86% (64 runs sampled)
[String] Compactr x 167 ops/sec ±4.86% (72 runs sampled)
```


Expand Down
6 changes: 3 additions & 3 deletions benchmarks/array.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const Compactr = require('../');

let User = Compactr.schema({
id: { type: 'int32', size: 4 },
arr: { type: 'array', items: { type: 'char8' }}
arr: { type: 'array', size: 6, items: { type: 'char8', size: 1 }}
});

const mult = 32;
Expand All @@ -39,7 +39,7 @@ function arrCompactr() {
let packed, unpacked;

for(let i = 0; i<mult*mult; i++) {
packed = User.write({ id: i, arr: ['a', 'b', 'c'] }).array();
unpacked = User.read(packed);
packed = User.write({ id: i, arr: ['a', 'b', 'c'] }).contentArray();
unpacked = User.readContent(packed);
}
}
4 changes: 2 additions & 2 deletions benchmarks/boolean.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ function boolCompactr() {
let packed, unpacked;

for(let i = 0; i<mult*mult; i++) {
packed = User.write({ id: i, bool: !!Math.random() }).array();
unpacked = User.read(packed);
packed = User.write({ id: i, bool: !!Math.random() }).contentArray();
unpacked = User.readContent(packed);
}
}
5 changes: 2 additions & 3 deletions benchmarks/double.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ const Compactr = require('../');

let User = Compactr.schema({
id: { type: 'int32', size: 4 },
str: { type: 'char8', size: 6 },
int: { type: 'double', size: 8 }
});

Expand Down Expand Up @@ -40,7 +39,7 @@ function floatCompactr() {
let packed, unpacked;

for(let i = 0; i<mult*mult; i++) {
packed = User.write({ id: i, int: Math.random() }).array();
unpacked = User.read(packed);
packed = User.write({ id: i, int: Math.random() }).contentArray();
unpacked = User.readContent(packed);
}
}
25 changes: 2 additions & 23 deletions benchmarks/integer.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ const Compactr = require('../');

let User = Compactr.schema({
id: { type: 'int32', size: 4 },
str: { type: 'char8', size: 6 },
int: { type: 'int32', size: 8 }
});

Expand All @@ -23,9 +22,7 @@ const intSuite = new Benchmark.Suite();
/* Float suite ---------------------------------------------------------------*/

intSuite.add('[Integer] JSON', intJSON)
.add('[Integer] JSON (negative)', negativeIntJSON)
.add('[Integer] Compactr', intCompactr)
.add('[Integer] Compactr (negative)', negativeIntCompactr)
.on('cycle', e => console.log(String(e.target)))
.run({ 'async': true });

Expand All @@ -42,25 +39,7 @@ function intCompactr() {
let packed, unpacked;

for(let i = 0; i<mult*mult; i++) {
packed = User.write({ id: i, int: Math.round(Math.random() * 1000000) }).array();
unpacked = User.read(packed);
packed = User.write({ id: i, int: Math.round(Math.random() * 1000000) }).contentArray();
unpacked = User.readContent(packed);
}
}

function negativeIntJSON() {
let packed, unpacked;

for(let i = 0; i<mult*mult; i++) {
packed = new Buffer(JSON.stringify({ id: i, int: Math.round(Math.random() * -1000000) }));
unpacked = JSON.parse(packed.toString());
}
}

function negativeIntCompactr() {
let packed, unpacked;

for(let i = 0; i<mult*mult; i++) {
packed = User.write({ id: i, int: Math.round(Math.random() * -1000000) }).array();
unpacked = User.read(packed);
}
}
6 changes: 3 additions & 3 deletions benchmarks/object.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const Compactr = require('../');

let User = Compactr.schema({
id: { type: 'int32', size: 4 },
obj: { type: 'object', schema: { str: { type: 'string' } } },
obj: { type: 'object', size: 9, schema: { str: { type: 'string', size: 6 } } },
});

const mult = 32;
Expand Down Expand Up @@ -40,7 +40,7 @@ function objCompactr() {
let packed, unpacked;

for(let i = 0; i<mult*mult; i++) {
packed = User.write({ id: i, obj: { str: '' + (Math.random()*0xffffff) } }).array();
unpacked = User.read(packed);
packed = User.write({ id: i, obj: { str: '' + (Math.random()*0xffffff) } }).contentArray();
unpacked = User.readContent(packed);
}
}
26 changes: 3 additions & 23 deletions benchmarks/string.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,7 @@ const stringSuite = new Benchmark.Suite();
/* Float suite ---------------------------------------------------------------*/

stringSuite.add('[String] JSON', strJSON)
.add('[String] JSON special characters', strSpecialJSON)
.add('[String] Compactr', strCompactr)
.add('[String] Compactr special characters', strSpecialCompactr)
.on('cycle', e => console.log(String(e.target)))
.run({ 'async': true });

Expand All @@ -34,7 +32,7 @@ function strJSON() {
let packed, unpacked;

for(let i = 0; i<mult*mult; i++) {
packed = new Buffer(JSON.stringify({ id: i, str: '' + (Math.random()*0xffffff) }));
packed = new Buffer(JSON.stringify({ id: i, str: '' + (Math.random()*0xffffff), special: String.fromCharCode(Math.random()*0xffff) }));
unpacked = JSON.parse(packed.toString());
}
}
Expand All @@ -43,25 +41,7 @@ function strCompactr() {
let packed, unpacked;

for(let i = 0; i<mult*mult; i++) {
packed = User.write({ id: i, str: '' + (Math.random()*0xffffff) }).array();
unpacked = User.read(packed);
}
}

function strSpecialJSON() {
let packed, unpacked;

for(let i = 0; i<mult*mult; i++) {
packed = new Buffer(JSON.stringify({ id: i, special: String.fromCharCode(Math.random()*0xffff) }));
unpacked = JSON.parse(packed.toString());
}
}

function strSpecialCompactr() {
let packed, unpacked;

for(let i = 0; i<mult*mult; i++) {
packed = User.write({ id: i, special: String.fromCharCode(Math.random()*0xffff) }).array();
unpacked = User.read(packed);
packed = User.write({ id: i, str: '' + (Math.random()*0xffffff), special: String.fromCharCode(Math.random()*0xffff) }).contentArray();
unpacked = User.readContent(packed);
}
}
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "compactr",
"version": "2.0.0",
"version": "2.1.0",
"description": "Schema based serialization made easy",
"main": "index.js",
"scripts": {
Expand Down
8 changes: 4 additions & 4 deletions src/reader.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ function Reader(scope) {

function read(bytes) {
readHeader(bytes);
return readContent(bytes);
return readContent(bytes, scope.contentBegins);
}

function readHeader(bytes) {
scope.header.length = 0;
scope.header = [];
let caret = 1;
const keys = bytes[0];
for (let i = 0; i < keys; i++) {
Expand All @@ -43,8 +43,8 @@ function Reader(scope) {
}
}

function readContent(bytes) {
let caret = scope.contentBegins;
function readContent(bytes, caret) {
caret = caret || 0;
const ret = {};
for (let i = 0; i < scope.header.length; i++) {
ret[scope.header[i].key.name] = scope.header[i].key.transformOut(bytes.slice(caret, caret + scope.header[i].size));
Expand Down
40 changes: 36 additions & 4 deletions src/schema.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,25 @@ const Converter = require('./converter');
/* Methods -------------------------------------------------------------------*/

function Schema(schema) {

const sizeRef = {
boolean: 1,
number: 8,
int8: 1,
int16: 2,
int32: 4,
double: 8,
string: 2,
char8: 1,
char16: 2,
char32: 4,
array: 2,
object: 1,
unsigned: 8,
unsigned8: 1,
unsigned16: 2,
unsigned32: 4
};

const scope = {
schema,
indices: {},
Expand All @@ -23,8 +41,11 @@ function Schema(schema) {
header: [],
contentBegins: 0
};

scope.indices = preformat(schema);
const writer = Writer(scope);
const reader = Reader(scope);

applyBlank(); // Pre-load header for easy streaming

function preformat(schema) {
const ret = {};
Expand All @@ -38,18 +59,29 @@ function Schema(schema) {
ret[key] = {
name: key,
index,
type: keyType,
transformIn: (childSchema !== undefined) ? Encoder[keyType].bind(null, childSchema) : Encoder[keyType],
transformOut: (childSchema !== undefined) ? Decoder[keyType].bind(null, childSchema) : Decoder[keyType],
coerse: Converter[keyType],
getSize: Encoder.getSize.bind(null, count),
size: schema[key].size || null,
count
count,
nested: childSchema
};
});

return ret;
}

function applyBlank() {
for (let key in scope.schema) {
scope.header.push({
key: scope.indices[key],
size: scope.indices[key].size || sizeRef[scope.indices[key].type]
});
}
}

function computeNested(schema, key) {
const keyType = schema[key].type;
const isObject = (keyType === 'object');
Expand All @@ -73,7 +105,7 @@ function Schema(schema) {
return childSchema;
}

return Object.assign({}, Writer(scope), Reader(scope));
return Object.assign({}, writer, reader);
}

/* Exports -------------------------------------------------------------------*/
Expand Down
21 changes: 17 additions & 4 deletions src/writer.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,21 @@ function Writer(scope) {
}

function clear() {
scope.headerBytes.length = 0;
scope.contentBytes.length = 0;
scope.headerBytes = [0];
scope.contentBytes = [];
}

function sizes(data) {
const s = {};
for (let key in data) {
if (data[key] instanceof Object) {
s[key] = scope.indices[key].nested.sizes(data[key]);
s.size = scope.indices[key].transformIn(data[key]).length;
}
else s[key] = scope.indices[key].transformIn(data[key]).length;
}

return s;
}

function filterKeys(data) {
Expand All @@ -51,7 +64,7 @@ function Writer(scope) {
res.push.apply(res, header);
res.push.apply(res, content);
return res;
}
}

function headerBuffer() {
return Buffer.from(scope.headerBytes);
Expand All @@ -77,7 +90,7 @@ function Writer(scope) {
return concat(scope.headerBytes, scope.contentBytes);
}

return { write, headerBuffer, headerArray, contentBuffer, contentArray, buffer, array };
return { write, headerBuffer, headerArray, contentBuffer, contentArray, buffer, array, sizes };
}

/* Exports -------------------------------------------------------------------*/
Expand Down
Loading

0 comments on commit 058d6f7

Please sign in to comment.