ReDOWN.prototype._batch = function (array, options, callback) { var err , i = 0 , key , value if (Array.isArray(array)) { for (; i < array.length; i++) { if (array[i]) { key = bops.is(array[i].key) ? array[i].key : String(array[i].key) err = this._checkKey(key, 'key') if (err) return setImmediate(function () { callback(err) }) if (array[i].type === 'del') { this._del(array[i].key, options, noop) } else if (array[i].type === 'put') { value = bops.is(array[i].value) ? array[i].value : String(array[i].value) err = this._checkKey(value, 'value') if (err) return setImmediate(function () { callback(err) }) this._put(key, value, options, noop) } } } } callback && setImmediate(callback); }
Database.prototype.put = function (rawDoc, buffer, opts, cb) { var self = this var doc = rawDoc var updated if (bops.is(rawDoc)) { cb = opts opts = buffer buffer = rawDoc doc = {} rawDoc = undefined } if (!bops.is(buffer)) { cb = opts opts = buffer buffer = undefined } else { doc = {} } if (!cb) { cb = opts opts = {} } var columns = Object.keys(doc) if (opts.columns) columns = columns.concat(opts.columns) var newColumns = this.meta.getNewColumns(columns) if (newColumns.error) return cb(newColumns) if (newColumns.length === 0) return store() self.meta.addColumns(newColumns, function(err) { if (err) return cb(err) store() }) function store() { if (!opts.overwrite) updated = docUtils.updateRevision(doc, buffer, self.meta.json.columns) else updated = doc var seq = self.seq = self.seq + 1 var keys = docUtils.rowKeys(self.keys, self.sep, updated._id, updated._rev, seq, updated._deleted) opts.valueEncoding = 'binary' if (!buffer) buffer = jsonBuffStream.encode(updated, self.meta.json.columns) var seqVal = [seq, updated._id, updated._rev] if (updated._deleted) seqVal.push(true) // 4th spot in seqVal array is a deleted boolean self.mutex.put(keys.seq, JSON.stringify(seqVal), noop) self.mutex.put(keys.row, buffer, opts, afterUpdate) function afterUpdate(err) { if (err) return cb(err) cb(null, updated) } } }
AzureDown.prototype._batch = function (array, options, callback) { var err , key , value this.tableService.beginBatch(); if (Array.isArray(array)) { for (var i = 0; i < array.length; i++) { if (array[i]) { key = bops.is(array[i].key) ? array[i].key : String(array[i].key) err = this._checkKeyValue(key, 'key') if (err) return setImmediate(function () { callback(err) }) if (array[i].type === 'del') { // todo, we should be this._del(array[i].key, options, noop) } else if (array[i].type === 'put') { value = bops.is(array[i].value) ? array[i].value : String(array[i].value) err = this._checkKeyValue(value, 'value') if (err) return setImmediate(function () { callback(err) }) var entity = { PartitionKey: this.config.partitionKey, RowKey: array[i].key, value: value } this.tableService.insertEntity(this.config.table, entity); } } } } this.tableService.commitBatch(callback); }
Database.prototype.put = function (rawDoc, buffer, opts, cb) { var self = this var doc = rawDoc var updated if (bops.is(rawDoc)) { cb = opts opts = buffer buffer = rawDoc doc = {} rawDoc = undefined } if (!bops.is(buffer)) { cb = opts opts = buffer buffer = undefined } else { doc = {} } if (!cb) { cb = opts opts = {} } var columns = Object.keys(doc) if (opts.columns) columns = columns.concat(opts.columns) var newColumns = this.meta.getNewColumns(columns) if (newColumns.length === 0) return store() self.meta.addColumns(newColumns, function(err) { if (err) return cb(err) store() }) function store() { if (!opts.overwrite) updated = self.updateRevision(doc, buffer) else updated = doc if (!updated) return setImmediate(function() { cb(null, doc) }) var seq = self.seq = self.seq + 1 var keys = self.rowKeys(updated._id, updated._rev, seq) opts.valueEncoding = 'binary' if (!buffer) buffer = jsonbuff.encode(updated, self.meta.json.columns) self.mutex.put(keys.seq, [seq, updated._id, updated._rev], noop) self.mutex.put(keys.row, buffer, opts, function (e) { if (e) return cb(e) cb(null, doc) }) } }
Database.prototype.put = function (rawDoc, buffer, opts, cb) { var self = this var doc = rawDoc if (bops.is(rawDoc)) { cb = opts opts = buffer buffer = rawDoc doc = {} rawDoc = undefined } if (!bops.is(buffer)) { cb = opts opts = buffer buffer = undefined } else { doc = {} } if (!cb) { cb = opts opts = {} } if (!opts.overwrite) doc = this.updateRevision(doc, buffer) var keys = this.rowKeys(doc._id, doc._seq, doc._rev) var isJSON = false if (opts.valueEncoding && opts.valueEncoding === 'json') isJSON = true if (!isJSON) opts.valueEncoding = 'binary' if (isJSON) return store() var columns = Object.keys(doc) if (opts.columns) columns = columns.concat(opts.columns) var newColumns = this.getNewColumns(columns, this.meta.columns) if (newColumns.length === 0) return store() self.addColumns(newColumns, function(err) { if (err) return cb(err) store() }) function store() { if (!buffer && isJSON) buffer = doc if (!buffer) buffer = jsonbuff.encode(doc, self.meta.columns) self.mutex.put(keys.seq, [doc._seq, doc._id, doc._rev], noop) self.mutex.put(keys.row, buffer, opts, function (e) { if (e) return cb(e) cb(null, doc) }) } }
function request(opts, callback) { var base = opts.tls ? require('https') : require('http'); if (trace) trace("request", null, { method: opts.method, host: opts.hostname, port: opts.port, path: opts.path, headers: opts.headers }); var req = base.request(opts, function (res) { if (trace) trace("response", null, { code: res.statusCode, headers: res.headers }); callback(null, res.statusCode, res.headers, wrapStream(res)); }); var body = opts.body; if (body) { if (bops.is(body) || typeof body === "string") { req.end(body); } else { body.read(onRead); } } else req.end(); function onRead(err, item) { if (err) return callback(err); if (item === undefined) { return req.end(); } req.write(item); body.read(onRead); } }
exports.stringify = function stringify (o) { if(o && bops.is(o)) return JSON.stringify(':base64:' + bops.to(o, 'base64')) if(o && o.toJSON) o = o.toJSON() if(o && 'object' === typeof o) { var s = '' var array = Array.isArray(o) s = array ? '[' : '{' var first = true for(var k in o) { var isFunction = 'function' == typeof o[k] if(Object.hasOwnProperty.call(o, k) && o[k] !== void(0) && !isFunction) { if(!first) s += ',' first = false s += array ? stringify(o[k]) : stringify(k) + ':' + stringify(o[k]) } } s += array ? ']' : '}' return s } else if ('string' === typeof o) { return JSON.stringify(/^:/.test(o) ? ':' + o : o) } else return JSON.stringify(o) }
function responder (result, response) { var statusCode = result.statusCode || (result instanceof Error ? 500 : 200); var headers = result.headers || {}; var body = result.body; if (isStream(body)) { if (!headers['content-type']) { headers['content-type'] = 'application/octet-stream'; } if (body.length) { headers['conent-length'] = body.length; } response.writeHead(statusCode, headers); body.pipe(response); } else { if (typeof body === 'string') { body = bops.from(body); } else if (!bops.is(body)) { body = bops.from(JSON.stringify(body)); headers['content-type'] = 'application/json'; } if (!headers['content-type']) { headers['content-type'] = 'text/plain'; } headers['content-length'] = body.length; response.writeHead(statusCode, headers); response.end(body); } return new Promise(function (pass, fail) { response.on('end', pass).on('error', fail); }); }
ConcatStream.prototype.getBody = function () { if (this.body.length === 0) return if (typeof(this.body[0]) === "string") return this.body.join('') if (this.isArray(this.body[0])) return this.arrayConcat(this.body) if (bops.is(this.body[0])) return bops.join(this.body) return this.body }
var onRow = function (row) { var primaryVal if (Array.isArray(row)) primaryVal = row[primaryIndex] else if (bops.is(row)) primaryVal = bufferAt(row, primaryIndex) else primaryVal = row[primary] primaryKeys.push(primaryVal) if (this.queue) this.queue(row) }
WriteStream.prototype.primaryKeyAt = function(i) { var pkey = '' var sep = this.options.separator || '' var next = this.primaryKeys[i] if (Array.isArray(next)) { for (var i = 0; i < next.length; i++) { var part = next[i] if (pkey) pkey += sep if (bops.is(part)) pkey += bops.to(part) else pkey += part.toString() } } else { if (bops.is(part)) pkey = bops.to(next) else pkey = next.toString() } if (this.options.hash) { pkey = crypto.createHash('md5').update(pkey).digest("hex") } return pkey }
MemDOWN.prototype._get = function (key, options, callback) { var value = this._store[toKey(key)] if (value === undefined) { // 'NotFound' error, consistent with LevelDOWN API return setImmediate(function () { callback(new Error('NotFound')) }) } if (options.asBuffer !== false && !bops.is(value)) value = bops.from(String(value)) setImmediate(function () { callback(null, value) }) }
WriteStream.prototype.primaryKeyAt = function(i) { var id = '' var sep = this.options.separator || '' var pkey = this.primaryKeys[i] var keys = pkey.key if (Array.isArray(keys)) { for (var i = 0; i < keys.length; i++) { var part = keys[i] if (!part) continue if (id) id += sep if (bops.is(part)) id += bops.to(part) else id += part.toString() } } else { if (bops.is(part)) id = bops.to(keys) else id = keys.toString() } if (this.options.hash) { id = crypto.createHash('md5').update(id).digest("hex") } return { _id: id, _rev: pkey.rev } }
return this.run(matchedRoute.fn).then(function (result) { if (typeof result !== 'object' || bops.is(result) || isStream(result) || !result.hasOwnProperty('body')) { result = { body: result }; } return { statusCode: result.statusCode || 200, headers: result.headers || {}, body: result.body || '' }; });
function decode(headers, vals) { var buffs = multibuffer.unpack(vals) var obj = {} for (var i = 0; i < headers.length; i++) { var header = headers[i] var buff = buffs[i] if (buff[0] === 91 || buff[0] === 123) { // [, { try { buff = JSON.parse(buff) } catch(e) {} } buff = bops.is(buff) ? buff.toString() : buff if (buff.length === 0) continue obj[header] = buff } return obj }
WriteStream.prototype.getPrimaryKey = function(row) { var primaryKey = [] var idx = this.primaryIndex if (!(Array.isArray(idx))) idx = [idx] var primary = this.primary if (!(Array.isArray(primary))) primary = [primary] for (var i = 0; i < idx.length; i++) { var pidx = idx[i] var prim = primary[i] var pkey if (prim === '_id' || pidx > -1) { if (Array.isArray(row)) pkey = row[pidx] else if (bops.is(row)) pkey = bufferAt(row, pidx) else pkey = row[prim] } if (prim === '_id' && !pkey) primaryKey.push(docUtils.uuid()) else primaryKey.push(pkey) } if (primaryKey.length === 1) primaryKey = primaryKey[0] return primaryKey }
exports.toBuffer = function (data) { if (typeof data === 'string') { return bops.from(data).buffer } else if (bops.is(data)) { // If data is a TypedArrayView (Uint8Array) then copy the buffer, so the // underlying buffer will be exactly the right size. We care about this // because the Chrome `sendTo` function will be passed the underlying // ArrayBuffer. var newBuf = bops.create(data.length) bops.copy(data, newBuf, 0, 0, data.length) return newBuf.buffer } else if (data.buffer) { return data.buffer } else if (data instanceof ArrayBuffer) { return data } else { throw new Error('Cannot convert data to ArrayBuffer type') } }
ReDOWN.prototype._isBuffer = function (obj) { return bops.is(obj) }
if (value.every(function(e) { return bops.is(e) })) {
Database.prototype.put = function (rawDoc, buffer, opts, cb) { var self = this var doc = rawDoc var updated var isNew = false if (bops.is(rawDoc)) { cb = opts opts = buffer buffer = rawDoc doc = {} rawDoc = undefined } if (!bops.is(buffer)) { cb = opts opts = buffer buffer = undefined } else { doc = {} } if (!cb) { cb = opts opts = {} } opts.meta = opts.meta || {} if (!opts.skipSchemaCheck) { if (opts.columns) { self.schema.merge(self.schema.normalize(opts.columns), check) } else { self.schema.mergeFromObject(doc, check) } } else { check() } function check(err) { if (err) return cb(err) // TODO implement primary + hash options from writeStream here (see writeStream.writeBatch) if (opts.meta.id) doc.id = opts.meta.id if (!doc.id) { isNew = true return store() } debug('check', doc.id) self.get(doc.id, function(err, existing) { if (err) { isNew = true return store() } // force causes a forced upgrade (ignores version conflicts, makes new revision) if (opts.meta.version || opts.force) { doc = existing return store() } if (!doc.version || doc.version[0] < existing.version[0]) return cb(self.errors.conflict()) store() }) } function store() { if (isNew && doc.version) updated = doc else updated = docUtils.updateVersion(doc, buffer, self.meta.json.columns) var change = self.change = self.change + 1 var keys = docUtils.rowKeys(self.keys, self.sep, updated.id, updated.version, change, updated._deleted) opts.valueEncoding = 'binary' if (!buffer) buffer = self.schema.encode(updated) var changeVal = [change, updated.id, updated.version] if (updated._deleted) { changeVal.push(true) // 4th spot in changeVal array is a deleted boolean } var curVal = docUtils.pack(+updated.version) if (updated._deleted) curVal += self.sep + '1' if (buffer.length === 0) buffer = whiteSpace // todo handle errors self.mutex.put(keys.change, JSON.stringify(changeVal), noop) self.mutex.put(keys.row, buffer, opts, noop) self.mutex.put(keys.cur, curVal, function (err) { cb(err, updated) }) if (isNew) self.pendingRowCount++ if (updated._deleted) self.pendingRowCount-- } }
function encode(source) { if (source === void 0) return tag(UNDEFINED); if (source === null) return tag(NULL); // Unbox possible natives var value = source != null && source.valueOf ? source.valueOf() : source; var type; // NaN and Invalid Date not permitted if (value !== value) { if (source instanceof Date) throw new TypeError('Invalid Date not permitted'); throw new TypeError('NaN not permitted'); } if (value === false) return tag(FALSE); if (value === true) return tag(TRUE); if (source instanceof Date) { // Normalize -0 values to 0 if (Object.is(value, -0)) value = 0; type = value < 0 ? DATE_PRE_EPOCH : DATE_POST_EPOCH; return tag(type, encodeNumber(value)); } if (typeof value === 'number') { if (value === Number.NEGATIVE_INFINITY) return tag(NEGATIVE_INFINITY); if (value === Number.POSITIVE_INFINITY) return tag(POSITIVE_INFINITY); // Normalize -0 values to 0 if (Object.is(value, -0)) value = 0; type = value < 0 ? NEGATIVE_NUMBER : POSITIVE_NUMBER; return tag(type, encodeNumber(value)); } if (bops.is(value)) { return tag(BUFFER, value); } if (typeof value === 'string') { return tag(STRING, bops.from(value, 'utf8')); } // RegExp if (value instanceof RegExp) { // TODO throw new Error('Not Implemented Yet'); } // Function if (typeof value === 'function') { return tag(FUNCTION, encodeList(_type['function'].serialize(value))); } // Array // TODO better handling for sparse arrays if (Array.isArray(value)) { return tag(ARRAY, encodeList(value)); } // Map if (value instanceof Map) { // Packs into an array, e.g. [ k1, v1, k2, v2, ... ] var items = []; getCollectionKeys(value).forEach(function(key) { items.push(key); items.push(value.get(key)); }); return tag(MAP, encodeList(items)); } // Set if (value instanceof Set) { var set = getCollectionKeys(value); // encode, sort, and then decode the result array set = decode(set.map(encode).sort(compare)); // TODO we should be able to build a list by concatenating buffers -- bypass this decode/encodeList dance return tag(SET, encodeList(set)); } // Object if (typeof value === 'object' && Object.prototype.toString.call(value) === '[object Object]') { // Packs into an array, e.g. [ k1, v1, k2, v2, ... ] var items = []; Object.keys(value).forEach(function(key) { items.push(key); items.push(value[key]); }); return tag(OBJECT, encodeList(items)); } // TODO RegExp and other types from Structured Clone algorithm (Blob, File, FileList) throw new Error('Cannot encode unknown type: ' + source); }
function sizeof(value) { var type = typeof value; var length, size; // Raw Bytes if (type === "string") { // TODO: this creates a throw-away buffer which is probably expensive on browsers. length = bops.from(value).length; if (length < 0x20) { return 1 + length; } if (length < 0x100) { return 2 + length; } if (length < 0x10000) { return 3 + length; } if (length < 0x100000000) { return 5 + length; } } if (bops.is(value)) { length = value.length; if (length < 0x100) { return 2 + length; } if (length < 0x10000) { return 3 + length; } if (length < 0x100000000) { return 5 + length; } } if (type === "number") { // Floating Point // double if (value << 0 !== value) return 9; // Integers if (value >=0) { // positive fixnum if (value < 0x80) return 1; // uint 8 if (value < 0x100) return 2; // uint 16 if (value < 0x10000) return 3; // uint 32 if (value < 0x100000000) return 5; // uint 64 if (value < 0x10000000000000000) return 9; throw new Error("Number too big 0x" + value.toString(16)); } // negative fixnum if (value >= -0x20) return 1; // int 8 if (value >= -0x80) return 2; // int 16 if (value >= -0x8000) return 3; // int 32 if (value >= -0x80000000) return 5; // int 64 if (value >= -0x8000000000000000) return 9; throw new Error("Number too small -0x" + value.toString(16).substr(1)); } // Boolean, null if (type === "boolean" || value === null) return 1; if (type === 'undefined') return 3; if (typeof value.toJSON === 'function') { return sizeof(value.toJSON()); } // Container Types if (type === "object") { if ('function' === typeof value.toJSON) { value = value.toJSON(); } size = 0; if (Array.isArray(value)) { length = value.length; for (var i = 0; i < length; i++) { size += sizeof(value[i]); } } else { var keys = encodeableKeys(value); length = keys.length; for (var i = 0; i < length; i++) { var key = keys[i]; size += sizeof(key) + sizeof(value[key]); } } if (length < 0x10) { return 1 + size; } if (length < 0x10000) { return 3 + size; } if (length < 0x100000000) { return 5 + size; } throw new Error("Array or object too long 0x" + length.toString(16)); } if (type === "function") { return 0; } throw new Error("Unknown type " + type); }
function encode(value, buffer, offset) { var type = typeof value; var length, size; // Strings Bytes if (type === "string") { value = bops.from(value); length = value.length; // fixstr if (length < 0x20) { buffer[offset] = length | 0xa0; bops.copy(value, buffer, offset + 1); return 1 + length; } // str 8 if (length < 0x100) { buffer[offset] = 0xd9; bops.writeUInt8(buffer, length, offset + 1); bops.copy(value, buffer, offset + 2); return 2 + length; } // str 16 if (length < 0x10000) { buffer[offset] = 0xda; bops.writeUInt16BE(buffer, length, offset + 1); bops.copy(value, buffer, offset + 3); return 3 + length; } // str 32 if (length < 0x100000000) { buffer[offset] = 0xdb; bops.writeUInt32BE(buffer, length, offset + 1); bops.copy(value, buffer, offset + 5); return 5 + length; } } if (bops.is(value)) { length = value.length; // bin 8 if (length < 0x100) { buffer[offset] = 0xc4; bops.writeUInt8(buffer, length, offset + 1); bops.copy(value, buffer, offset + 2); return 2 + length; } // bin 16 if (length < 0x10000) { buffer[offset] = 0xd8; bops.writeUInt16BE(buffer, length, offset + 1); bops.copy(value, buffer, offset + 3); return 3 + length; } // bin 32 if (length < 0x100000000) { buffer[offset] = 0xd9; bops.writeUInt32BE(buffer, length, offset + 1); bops.copy(value, buffer, offset + 5); return 5 + length; } } if (type === "number") { // Floating Point if ((value << 0) !== value) { buffer[offset] = 0xcb; bops.writeDoubleBE(buffer, value, offset + 1); return 9; } // Integers if (value >=0) { // positive fixnum if (value < 0x80) { buffer[offset] = value; return 1; } // uint 8 if (value < 0x100) { buffer[offset] = 0xcc; buffer[offset + 1] = value; return 2; } // uint 16 if (value < 0x10000) { buffer[offset] = 0xcd; bops.writeUInt16BE(buffer, value, offset + 1); return 3; } // uint 32 if (value < 0x100000000) { buffer[offset] = 0xce; bops.writeUInt32BE(buffer, value, offset + 1); return 5; } // uint 64 if (value < 0x10000000000000000) { buffer[offset] = 0xcf; bops.writeUInt64BE(buffer, value, offset + 1); return 9; } throw new Error("Number too big 0x" + value.toString(16)); } // negative fixnum if (value >= -0x20) { bops.writeInt8(buffer, value, offset); return 1; } // int 8 if (value >= -0x80) { buffer[offset] = 0xd0; bops.writeInt8(buffer, value, offset + 1); return 2; } // int 16 if (value >= -0x8000) { buffer[offset] = 0xd1; bops.writeInt16BE(buffer, value, offset + 1); return 3; } // int 32 if (value >= -0x80000000) { buffer[offset] = 0xd2; bops.writeInt32BE(buffer, value, offset + 1); return 5; } // int 64 if (value >= -0x8000000000000000) { buffer[offset] = 0xd3; bops.writeInt64BE(buffer, value, offset + 1); return 9; } throw new Error("Number too small -0x" + value.toString(16).substr(1)); } if (type === "undefined") { buffer[offset] = 0xd4; buffer[offset + 1] = 0x00; // fixext special type/value buffer[offset + 2] = 0x00; return 1; } // null if (value === null) { buffer[offset] = 0xc0; return 1; } // Boolean if (type === "boolean") { buffer[offset] = value ? 0xc3 : 0xc2; return 1; } // Custom toJSON function. if (typeof value.toJSON === 'function') { return encode(value.toJSON(), buffer, offset); } // Container Types if (type === "object") { size = 0; var isArray = Array.isArray(value); if (isArray) { length = value.length; } else { var keys = encodeableKeys(value); length = keys.length; } // fixarray if (length < 0x10) { buffer[offset] = length | (isArray ? 0x90 : 0x80); size = 1; } // array 16 / map 16 else if (length < 0x10000) { buffer[offset] = isArray ? 0xdc : 0xde; bops.writeUInt16BE(buffer, length, offset + 1); size = 3; } // array 32 / map 32 else if (length < 0x100000000) { buffer[offset] = isArray ? 0xdd : 0xdf; bops.writeUInt32BE(buffer, length, offset + 1); size = 5; } if (isArray) { for (var i = 0; i < length; i++) { size += encode(value[i], buffer, offset + size); } } else { for (var i = 0; i < length; i++) { var key = keys[i]; size += encode(key, buffer, offset + size); size += encode(value[key], buffer, offset + size); } } return size; } if (type === "function") return undefined; throw new Error("Unknown type " + type); }
Database.prototype.put = function (rawDoc, buffer, opts, cb) { var self = this var doc = rawDoc var updated if (bops.is(rawDoc)) { cb = opts opts = buffer buffer = rawDoc doc = {} rawDoc = undefined } if (!bops.is(buffer)) { cb = opts opts = buffer buffer = undefined } else { doc = {} } if (!cb) { cb = opts opts = {} } var columns = Object.keys(doc) if (opts.columns) columns = columns.concat(opts.columns) var newColumns = this.meta.getNewColumns(columns) if (newColumns.error) return cb(newColumns) if (newColumns.length === 0) return check() self.meta.addColumns(newColumns, function(err) { if (err) return cb(err) check() }) function check() { // TODO implement primary + hash options from writeStream here if (opts.overwrite || !doc._id) return store() debug('check', doc._id) self.get(doc._id, function(err, existing) { if (err) return store() if (!doc._rev || doc._rev[0] < existing._rev[0]) return cb(self.errors.conflict()) store() }) } function store() { if (!opts.overwrite) updated = docUtils.updateRevision(doc, buffer, self.meta.json.columns) else updated = doc var seq = self.seq = self.seq + 1 var keys = docUtils.rowKeys(self.keys, self.sep, updated._id, updated._rev, seq, updated._deleted) opts.valueEncoding = 'binary' if (!buffer) buffer = jsonBuffStream.encode(updated, self.meta.json.columns) var seqVal = [seq, updated._id, updated._rev] if (updated._deleted) { seqVal.push(true) // 4th spot in seqVal array is a deleted boolean } var revParts = updated._rev.split('-') var curVal = docUtils.pack(+revParts[0]) + '-' + revParts[1] if (buffer.length === 0) buffer = whiteSpace // todo handle errors self.mutex.put(keys.seq, JSON.stringify(seqVal), noop) self.mutex.put(keys.row, buffer, opts, noop) self.mutex.put(keys.cur, curVal, afterUpdate) function afterUpdate(err) { if (err) return cb(err) cb(null, updated) } } }
module.exports.publish = function(opts) { var opts = opts || {} , dup = opts.dup ? protocol.DUP_MASK : 0 , qos = opts.qos , retain = opts.retain ? protocol.RETAIN_MASK : 0 , topic = opts.topic , payload = opts.payload || empty , id = opts.messageId , version = opts.protocolVersion; var length = 0; // Topic must be a non-empty string if (!topic || 'string' !== typeof topic) { return new Error('Invalid topic'); } else { length += topic.length + 2; } // get the payload length if (!bops.is(payload)) { length += Buffer.byteLength(payload); } else { length += payload.length; } // Message id must a number if qos > 0 // if (qos && 'number' !== typeof id) { if (qos && !check_message_id(version, id)) { return new Error('Invalid message id') } else if (qos) { if (version == protocol.VERSION13) { length += 8; } else { length += 2; } } var buffer = bops.create(1 + calc_length_length(length) + length) , pos = 0; // Header buffer[pos++] = protocol.codes['publish'] << protocol.CMD_SHIFT | dup | qos << protocol.QOS_SHIFT | retain; // Remaining length pos += write_length(buffer, pos, length); // Topic pos += write_string(buffer, pos, topic); // Message ID if (qos > 0) { if (version == protocol.VERSION13) { pos += write_message_id_len64(buffer, pos, id); } else { pos += write_number(buffer, pos, id); } } // Payload if (!bops.is(payload)) { write_string_no_pos(buffer, pos, payload); } else { write_buffer(buffer, pos, payload); } return buffer; };
AzureDown.prototype._isBuffer = function (obj) { return bops.is(obj) }