SmoothShorts.shortenPost = function(postData, cb) { /** posts:smoothshorts sortedSet **/ // key is generated from 'NodeBB secret' var key = nconf.get('secret'); key = parseInt('0x' + key.substring(0, key.indexOf('-')), 16); // hash post object var hash = xxh.hash(new Buffer(JSON.stringify(postData)), key).toString(16); // don't take any chances of leaking the secret around; // not even just parts of it. ;) key = null; db.sortedSetAdd('posts:smoothshorts', postData.pid, hash, function(err) { if (!err) { winston.verbose('[plugin:smoothshorts] Hashed post ID ' + postData.pid); if (cb) { cb(); } } else { winston.error('[plugin:smoothshorts] Writing hash to DB failed.' + '(pid=' + postData.pid + ')'); winston.error(err); if (cb) { cb(err); } } }); };
createBlock: function(blockInfo) { var block = new Buffer(blockInfo.data.length + LZ4_MAGIC.length + 7); var offset = 0; LZ4_MAGIC.copy(block, 0); offset = LZ4_MAGIC.length; block.writeUInt8(FLAGS.VERSION, offset); offset += 1; // default to the 4M block block.writeUInt8(BLOCK_SIZE.M4 << 4, offset); offset += 1; // create a check sum of the header var header = block.slice(LZ4_MAGIC.length, offset); var headerChecksum = (XXH.hash(header, 0) >> 8) & 0xFF; block.writeUInt8(headerChecksum, offset); offset += 1; block.writeInt32LE(blockInfo.compressedlength, offset); offset += 4; blockInfo.data.copy(block, offset); return block; }
var keyFunction = function(ctx, args) { var key = ""; for(var name in _this.config.columns) { key = key + name + "(" + args[name] + ")"; } return XXHash.hash(new Buffer(key), 0x654C6162); }
var parseAMDMeta = function (fileContent) { var hash = xxhash.hash(fileContent, 0xCAFEBABE); if (amdMetaHash[hash] === undefined) { amdMetaHash[hash] = detect.fromAst(esprima.parse(fileContent)); } return amdMetaHash[hash]; };
RedisFilter.prototype.has = function(item, callback) { if (typeof item === 'string') item = new Buffer(item); var bitsToGet = []; for (var i = 0; i < this.hashes; i++) { var hash = Xxhash.hash(item, this.seeds[i]); var bit = hash % this.bits; bitsToGet.push(bit); } this.getbits(bitsToGet, function(err, vals) { if (err) return callback(err); for (var i = 0; i < bitsToGet.length; i++) { var item = String(bitsToGet[i]); if (!vals[item]) return callback(null, false); } callback(null, true); }); };
RedisFilter.prototype._addOne = function (buf, callback) { if (typeof buf === 'string') buf = new Buffer(buf); var bitsToSet = []; for (var i = 0; i < this.hashes; i++) { var hash = Xxhash.hash(buf, this.seeds[i]); var bit = hash % this.bits; bitsToSet.push(bit); } this.setbits(bitsToSet, callback); };
readable.on('readable', function() { while (filesize - count * 4096 > 4096) { fourK_data=readable.read(4096) if (null !== fourK_data) { //异步读取 count ++; hash = xxhash.hash(fourK_data, seed); hashlist.push(hash); hashstring += hash; } else break; // break if if (filesize - count * 4096 <= 4096) { flag = 1; // 退出循环前, 把flag置1, 保证之后可以读取最后一块 } } if (flag) { // 只要是readable的状态就会进入function, 所以必须限制使得读取完成之后回调函数不再起作用 flag = 0; fourK_data = readable.read(); console.log('block count:' + ++count); console.log("last block size: " + fourK_data.length); hash = xxhash.hash(fourK_data, seed); hashlist.push(hash); hashstring += hash; final_hash = xxhash.hash(Buffer(hashstring), seed); db.insert({ 'filename': path.basename(filepath), 'hashlist': hashlist, 'hash': final_hash }, function(err, newDoc) { console.log("\nnew record: " + JSON.stringify(newDoc)); }); console.timeEnd("hash"); } });
(function () { console.log('Test key collisions (same seed, different keys)'); const seed = randomInteger(); console.log('Using seed ' + seed); const keyLength = 100; console.log('Using key length of ' + keyLength); const iterations = 1000000; console.log('Using ' + iterations + ' iterations'); const hashes = { murmurhash3: {}, xxhash: {}, farmhash: {} }; const collisions = { murmurhash3: 0, xxhash: 0, farmhash: 0 }; for (let i = 0; i < iterations; i++) { const input = randomStringOfLength(keyLength); const inputBuffer = Buffer.from(input); // xxhash const murmur = murmurhash3.murmur32Sync(input, seed); if (murmur in hashes.murmurhash3) { collisions.murmurhash3++; } else { hashes.murmurhash3[murmur] = true; } const xx = xxhash.hash(inputBuffer, seed); if (xx in hashes.xxhash) { collisions.xxhash++; } else { hashes.xxhash[xx] = true; } const farm = farmhash.hash32WithSeed(input, seed); if (farm in hashes.farmhash) { collisions.farmhash++; } else { hashes.farmhash[farm] = true; } } console.log('Collisions:'); console.dir(collisions); console.log(); })();
(function () { console.log('Test seed collisions (same key, different seeds)'); const input = randomStringOfLength(100); const inputBuffer = Buffer.from(input); // xxhash console.log('Using input of length ' + input.length); const iterations = 1000000; console.log('Using ' + iterations + ' iterations'); const hashes = { murmurhash3: {}, xxhash: {}, farmhash: {} }; const collisions = { murmurhash3: 0, xxhash: 0, farmhash: 0 }; for (let seed = 0; seed < iterations; seed++) { const murmur = murmurhash3.murmur32Sync(input, seed); if (murmur in hashes.murmurhash3) { collisions.murmurhash3++; } else { hashes.murmurhash3[murmur] = true; } const xx = xxhash.hash(inputBuffer, seed); if (xx in hashes.xxhash) { collisions.xxhash++; } else { hashes.xxhash[xx] = true; } const farm = farmhash.hash32WithSeed(input, seed); if (farm in hashes.farmhash) { collisions.farmhash++; } else { hashes.farmhash[farm] = true; } } console.log('Collisions:'); console.dir(collisions); console.log(); })();
key : function(ctx, args) { return XXHash.hash(new Buffer(args.key), 0x654C6162); },
{ oper:'equal', column:'key', var: function(ctx, args) { return XXHash.hash(new Buffer(args.key), 0x654C6162); }}
shardingItems.push(function(ctx, args) { return XXHash.hash(new Buffer(args.key), 0x654C6162); });
fs.readFile('./temp/'+file, (err, data) => {limahash.isOn2(filesAndHashes, XXHash.hash(data, 0xCAFEBABE), callback);}
limahash.saveFile = (file,data,filesAndHashes) => // Função para salvar arquivo em lista: filesAndHashes.push( { name : file, hash : XXHash.hash(data, 0xCAFEBABE) } ); // Adiciona nome de arquivo e hash de conteúdo em lista
setTimeout(function(){ console.log(xxhash.hash(fs.readFileSync(settings.source_file), 0xAAAA)); console.log(xxhash.hash(fs.readFileSync(settings.download_file), 0xAAAA)); }, 10000);
function getHashFromImage(image) { // IMPORTANT! DO NOT EVER CHANGE MY SEED VALUE UNLESS YOU WANT TO INVALIDATE // EXISTING PROCESSED IMAGES! return XXHash.hash(image.buffer, 0xABCD1133); }
// :TODO: Implement the following // * Pause (disabled | termHeight | keyPress ) // * Cancel (disabled | <keys> ) // * Resume from pause -> continous (disabled | <keys>) function display(client, art, options, cb) { if(_.isFunction(options) && !cb) { cb = options; options = {}; } if(!art || !art.length) { return cb(new Error('Empty art')); } options.mciReplaceChar = options.mciReplaceChar || ' '; options.disableMciCache = options.disableMciCache || false; // :TODO: this is going to be broken into two approaches controlled via options: // 1) Standard - use internal tracking of locations for MCI -- no CPR's/etc. // 2) CPR driven if(!_.isBoolean(options.iceColors)) { // try to detect from SAUCE if(_.has(options, 'sauce.ansiFlags') && (options.sauce.ansiFlags & (1 << 0))) { options.iceColors = true; } } const ansiParser = new aep.ANSIEscapeParser({ mciReplaceChar : options.mciReplaceChar, termHeight : client.term.termHeight, termWidth : client.term.termWidth, trailingLF : options.trailingLF, }); let parseComplete = false; let cprListener; let mciMap; const mciCprQueue = []; let artHash; let mciMapFromCache; function completed() { if(cprListener) { client.removeListener('cursor position report', cprListener); } if(!options.disableMciCache && !mciMapFromCache) { // cache our MCI findings... client.mciCache[artHash] = mciMap; client.log.trace( { artHash : artHash.toString(16), mciMap : mciMap }, 'Added MCI map to cache'); } ansiParser.removeAllListeners(); // :TODO: Necessary??? const extraInfo = { height : ansiParser.row - 1, }; return cb(null, mciMap, extraInfo); } if(!options.disableMciCache) { artHash = xxhash.hash(new Buffer(art), 0xCAFEBABE); // see if we have a mciMap cached for this art if(client.mciCache) { mciMap = client.mciCache[artHash]; } } if(mciMap) { mciMapFromCache = true; client.log.trace( { artHash : artHash.toString(16), mciMap : mciMap }, 'Loaded MCI map from cache'); } else { // no cached MCI info mciMap = {}; cprListener = function(pos) { if(mciCprQueue.length > 0) { mciMap[mciCprQueue.shift()].position = pos; if(parseComplete && 0 === mciCprQueue.length) { return completed(); } } }; client.on('cursor position report', cprListener); let generatedId = 100; ansiParser.on('mci', mciInfo => { // :TODO: ensure generatedId's do not conflict with any existing |id| const id = _.isNumber(mciInfo.id) ? mciInfo.id : generatedId; const mapKey = `${mciInfo.mci}${id}`; const mapEntry = mciMap[mapKey]; if(mapEntry) { mapEntry.focusSGR = mciInfo.SGR; mapEntry.focusArgs = mciInfo.args; } else { mciMap[mapKey] = { args : mciInfo.args, SGR : mciInfo.SGR, code : mciInfo.mci, id : id, }; if(!mciInfo.id) { ++generatedId; } mciCprQueue.push(mapKey); client.term.rawWrite(ansi.queryPos()); } }); } ansiParser.on('literal', literal => client.term.write(literal, false) ); ansiParser.on('control', control => client.term.rawWrite(control) ); ansiParser.on('complete', () => { parseComplete = true; if(0 === mciCprQueue.length) { return completed(); } }); let initSeq = ''; if(options.font) { initSeq = ansi.setSyncTermFontWithAlias(options.font); } else if(options.sauce) { let fontName = getFontNameFromSAUCE(options.sauce); if(fontName) { fontName = ansi.getSyncTERMFontFromAlias(fontName); } // // Set SyncTERM font if we're switching only. Most terminals // that support this ESC sequence can only show *one* font // at a time. This applies to detection only (e.g. SAUCE). // If explicit, we'll set it no matter what (above) // if(fontName && client.term.currentSyncFont != fontName) { client.term.currentSyncFont = fontName; initSeq = ansi.setSyncTERMFont(fontName); } } if(options.iceColors) { initSeq += ansi.blinkToBrightIntensity(); } if(initSeq) { client.term.rawWrite(initSeq); } ansiParser.reset(art); return ansiParser.parse(); }
}).add('xxhash+seed', function() { xxhash.hash(inputBuffer, seed); }).on('cycle', function(event) {
xxhashBase64 = require('raw-loader!./assets/xxhash.node.base64') xattrBase64 = require('raw-loader!./assets/xattr.node.base64') } const dir = 'bin' if (xxhashBase64) { mkdirp.sync(dir) let decode = Buffer.from(xxhashBase64.toString(), 'base64') fs.writeFileSync(path.join(dir, 'xxhash.node'), decode) // test xxhash const XXHASH = require('xxhash') console.log('test xxhash: ' + XXHASH.hash(Buffer.from('hello'), 1234)) } if (xattrBase64) { mkdirp.sync(dir) let decode = Buffer.from(xattrBase64.toString(), 'base64') fs.writeFileSync(path.join(dir, 'xattr.node'), decode) // test xattr const XATTR = require('fs-xattr') XATTR.setSync(dir, 'user.foo', 'bar') console.log('test xattr: ' + XATTR.getSync(dir, 'user.foo')) XATTR.removeSync(dir, 'user.foo') }
function createHash(src){ return XXHash.hash(new Buffer(src), seed); }
.add('xxhash+seed', function () { xxhash.hash(inputBuffer, seed); })