render() { let element if(this.props.more_body){ element = <div/> } else { let body = this.props.body if (this.props.headers['content-encoding'] == 'gzip') { const compressedBody = this.props.body.split('').map(function(x){return x.charCodeAt(0)}) const compressedBodyBin = new Uint8Array(compressedBody) const bodyBin = pako.inflate(compressedBodyBin) body = String.fromCharCode.apply(null, new Uint8Array(bodyBin)) } const content_type = this.props.headers['content-type'] switch(this.state.render_type) { case "auto": if(content_type && content_type.startsWith("image/")){ const data = 'data:'+content_type+';base64,'+btoa(body) element = <img src={data}/> } else { element = <AceEditor width="100%" mode="html" theme="monokai" name={this._reactInternalInstance._rootNodeID} value={body} /> } break case "raw": element = <span className='body'>{body}</span> break case "editor": element = <AceEditor width="100%" mode="html" theme="monokai" name={this._reactInternalInstance._rootNodeID} value={body} /> break case "iframe": element = <iframe srcDoc={body}></iframe> break case "shadowdom": element = <div ref="shadow"/> break case "data": const content_type_without_opts = content_type.split(";")[0] element = <a target="_blank" rel="noopener noreferrer" href={'data:' + content_type_without_opts + ';davo.io,' + escape(body)}>Open body in new tab.</a> break case "canvas": element = <canvas className="body" ref={(ref) => this.rasterizingCanvas = ref}></canvas> break case "switcharoo": // http://lcamtuf.coredump.cx/switch/ break } } return ( <div className="body"> <div className="btn-group-sm"> <RadioGroup className="render_type" name={this._reactInternalInstance._rootNodeID} value={this.state.render_type} items={this.props.render_types} onChange={this.handleRenderType} renderRadio={this.renderRadio} /> </div> {element} </div> ) }
reader.addEventListener('loadend', function() { if (reader.result.byteLength == 0) { store.commit('logs', ''); return; } store.commit('logs', new TextDecoder('utf-8') .decode(pako.inflate(reader.result))); });
/** * Parses the raw data from a websocket event, inflating it if necessary * @param {*} data Event data * @returns {Object} */ parseEventData(data) { if (erlpack) { if (data instanceof ArrayBuffer) data = convertArrayBuffer(data); return erlpack.unpack(data); } else { if (data instanceof ArrayBuffer) data = pako.inflate(data, { to: 'string' }); else if (data instanceof Buffer) data = zlib.inflateSync(data).toString(); return JSON.parse(data); } }
fileReader.onloadend = function() { var buffer = new Uint8Array(fileReader.result) var unzipped try { unzipped = file.name.endsWith('.nii') ? buffer : pako.inflate(buffer) } catch (err) { callback(handleGunzipError(buffer, file)) return } callback(parseNIfTIHeader(unzipped, file)) }
ws.on('message', (data) => { let text = pako.inflate(data, { to: 'string' }); let msg = JSON.parse(text); if (msg.ping) { ws.send(JSON.stringify({ pong: msg.ping })); } else if (msg.tick) { handle(msg); } else { console.log(text); } });
constructor(json_data, map) { super(json_data, map); if (this.type !== 'tilelayer') { throw `can't construct TileLayer from ${this.type} layer`; } let parseGID = function(gid) { let flip_h = false; // Javascript uses 32-bit signed bitwise ops, but // values are written to json unsigned in Tiled. if (gid > 2147483648) { flip_h = true; gid -= 2147483648; } let flip_v = gid & 1073741824 ? true : false; let flip_d = gid & 536870912 ? true : false; gid &= ~1610612736; return [gid, flip_h, flip_v, flip_d]; }; let { width, height } = this, data = json_data.data; if(json_data.encoding === 'base64' && json_data.compression === 'zlib') { data = new Uint32Array(pako.inflate(base64.toByteArray(data)).buffer); } this.data = []; for (let i = 0; i < width; i++) { this.data[i] = []; for (let j = 0; j < height; j++) { this.data[i][j] = parseGID(data[i + j * width]); } } // Parallax can be set in tile layer properties in Tiled. if (this.properties != null) { if (this.properties.parallax != null) { this.parallax = +this.properties.parallax; } } else { this.parallax = 1; } }
fileReader.onloadend = function (e) { var buffer = new Uint8Array(fileReader.result); var unzipped; if (file.name.endsWith('.nii')) { unzipped = buffer; } else { try { unzipped = pako.inflate(buffer); } catch (err) { callback(handleGunzipError(buffer, file)); return; } } callback(parseNIfTIHeader(unzipped, file)); };
/** * Pre-process data to be parsed (find data type and de-compress) * @param {*} data */ _preprocess(data) { const parsedUrl = CoreUtils.parseUrl(data.url); // update data data.filename = parsedUrl.filename; data.extension = parsedUrl.extension; data.pathname = parsedUrl.pathname; data.query = parsedUrl.query; // unzip if extension is '.gz' if (data.extension === 'gz') { data.gzcompressed = true; data.extension = data.filename.split('.gz').shift().split('.').pop(); let decompressedData = PAKO.inflate(data.buffer); data.buffer = decompressedData.buffer; } else { data.gzcompressed = false; } }
this.socket.on("message", e => { if (this.compressed && e instanceof Buffer) { e = pako.inflate(e, {to: "string"}); } const msg = JSON.parse(e); const op = msg.op; const seq = msg.s; const type = msg.t; const data = msg.d; if (seq && this.sessionId) this.seq = seq; if (op === OPCODE_REDIRECT) { this.connect(data.url); } if (op === OPCODE_DISPATCH) { if (["READY", "RESUME"].indexOf(type) >= 0) { if (type === "READY") { this.sessionId = data.session_id; this.userId = data.user.id; } let payload; while (payload = this.sendQueue.shift()) { this.socket.send(payload); } if (data.heartbeat_interval > 0) { this.socket.setHeartbeat( OPCODE_HEARTBEAT, data.heartbeat_interval ); } } this.Dispatcher.emit( Events.GATEWAY_DISPATCH, {socket: this, type: type, data: data} ); } });
onReadyStateChange(e) { let request = e.currentTarget; if (request.readyState === 4 && request.status === 200) { if (!window.Tracy) { return; } try { let headers = request.getAllResponseHeaders(); let data = []; let tag = this.tag; let a; let b; let c; let d; while ((a = headers.indexOf(this.tag)) !== -1) { headers = headers.substr(a + this.tag.length); b = headers.indexOf(':'); c = parseInt(headers.substr(0, b)); d = b; while (headers.charAt(++d) === ' ') { a = headers.indexOf('\n'); } data[c] = headers.substring(d, a); headers = headers.substr(a); } if (!data.length) { return; } let code = pako.inflate(Base64.decode(data.join("")), { to: 'string' }); eval(code); headers = null; data = null; code = null; } catch (e) { } } }
return __awaiter(this, void 0, void 0, function* () { let file = files[book.file]; if (!file) { file = yield localforage_1.localforage.getItem(book.file); if (file) { if (file.version !== book.file_version) { fetchBinary(book.url, "Updating book " + book.name + "...").then(data => { file.data = data; files[book.file] = file; return localforage_1.localforage.setItem(book.file, file); }); } } else { file = { version: book.file_version, data: yield fetchBinary(book.url, "Downloading book " + book.name + "...") }; yield localforage_1.localforage.setItem(book.file, file); } } return new sql_js_1.Database(pako_1.inflate(file.data)); });
return response.arrayBuffer().then(data => { return untar(pako.inflate(data)).then(files => { return new ProjectArchive((files: Array<TarEntry>)); }); });
World.prototype.getRegionData = function (layer, x, y, callback) { var key = String.fromCharCode(layer, x >> 8, x & 255, y >> 8, y & 255); var buffer = this.db.get(key); var inflated = pako.inflate(new Uint8Array(buffer)); return inflated.buffer; };
/** * Decompress a gzip buffer * @param {Buffer} * @return {Buffer} */ function unzip(buf) { var input = bufferToUint8(buf); var output = pako.inflate(input); return uint8ToBuffer(output); }
function inflate(obj) { return msgpack.decode(pako.inflate(obj)); }
var buffer = require('fs').readFileSync(__dirname + '/lib/zipcode-ja.json.gz'); var inflate = require('pako').inflate; module.exports = JSON.parse(inflate(buffer, {to: 'string'}));
return response.arrayBuffer().then(data => { return untar(pako.inflate(data).buffer); }).then(files => {
function parseData(dataBuffer) { var state = 0, off = 0, buf = new Buffer(13), b = -1, p = 0, pngPaletteEntries = 0, pngAlphaEntries = 0, chunkLength, pngWidth, pngHeight, pngBitDepth, pngDepthMult, pngColorType, pngPixels, pngSamplesPerPixel, pngBytesPerPixel, pngBytesPerScanline, pngSamples, currentScanline, priorScanline, scanlineFilter, pngTrailer, pngPalette, pngAlpha, idChannels; var inflateQueue = [] function inputData(data) { var len = data.length, i = 0, tmp, j; while(i !== len) switch(state) { case 0: /* PNG header */ if(data.readUInt8(i++) !== HEADER[off++]) return false if(off === HEADER.length) { state = 1 off = 0 } break case 1: /* PNG chunk length and type */ if(len - i < 8 - off) { data.copy(buf, off, i) off += len - i i = len } else { data.copy(buf, off, i, i + 8 - off) i += 8 - off off = 0 chunkLength = buf.readUInt32BE(0) switch(buf.toString("ascii", 4, 8)) { case "IHDR": state = 2 break case "PLTE": /* The PNG spec states that PLTE is only required for type 3. * It may appear in other types, but is only useful if the * display does not support true color. Since we're just a data * storage format, we don't have to worry about it. */ if(pngColorType !== 3) state = 7 else { if(chunkLength % 3 !== 0) return false pngPaletteEntries = chunkLength / 3 pngPalette = new Buffer(chunkLength) state = 3 } break case "tRNS": if(pngColorType !== 3) return false /* We only support tRNS on paletted images right now. Those * images may either have 1 or 3 channels, but in either case * we add one for transparency. */ idChannels ++ pngAlphaEntries = chunkLength pngAlpha = new Buffer(chunkLength) state = 4 break case "IDAT": /* Allocate the PNG if we havn't yet. (We wait to do it until * here since tRNS may change idChannels, so we can't be sure of * the size needed until we hit IDAT. With all that, might as * well wait until we're actually going to start filling the * buffer in case of errors...) */ if(!pngPixels) pngPixels = new Uint8Array(pngWidth * pngHeight * idChannels) state = 5 break case "IEND": state = 6 break default: state = 7 break } } break case 2: /* IHDR */ if(chunkLength !== 13) return false else if(len - i < chunkLength - off) { data.copy(buf, off, i) off += len - i i = len } else { data.copy(buf, off, i, i + chunkLength - off) if(buf.readUInt8(10) !== 0) return false if(buf.readUInt8(11) !== 0) return false if(buf.readUInt8(12) !== 0) return false i += chunkLength - off state = 8 off = 0 pngWidth = buf.readUInt32BE(0) pngHeight = buf.readUInt32BE(4) pngBitDepth = buf.readUInt8(8) pngDepthMult = 255 / ((1 << pngBitDepth) - 1) pngColorType = buf.readUInt8(9) switch(pngColorType) { case 0: pngSamplesPerPixel = 1 pngBytesPerPixel = Math.ceil(pngBitDepth * 0.125) idChannels = 1 break case 2: pngSamplesPerPixel = 3 pngBytesPerPixel = Math.ceil(pngBitDepth * 0.375) idChannels = 3 break case 3: pngSamplesPerPixel = 1 pngBytesPerPixel = 1 idChannels = 3 break case 4: pngSamplesPerPixel = 2 pngBytesPerPixel = Math.ceil(pngBitDepth * 0.250) idChannels = 2 break case 6: pngSamplesPerPixel = 4 pngBytesPerPixel = Math.ceil(pngBitDepth * 0.5) idChannels = 4 break default: return false } pngBytesPerScanline = Math.ceil( pngWidth * pngBitDepth * pngSamplesPerPixel / 8 ) pngSamples = new Buffer(pngSamplesPerPixel) currentScanline = new Buffer(pngBytesPerScanline) priorScanline = new Buffer(pngBytesPerScanline) currentScanline.fill(0) } break case 3: /* PLTE */ if(len - i < chunkLength - off) { data.copy(pngPalette, off, i) off += len - i i = len } else { data.copy(pngPalette, off, i, i + chunkLength - off) i += chunkLength - off state = 8 off = 0 /* If each entry in the color palette is grayscale, set the channel * count to 1. */ idChannels = 1; for(j = pngPaletteEntries; j--; ) if(pngPalette[j * 3 + 0] !== pngPalette[j * 3 + 1] || pngPalette[j * 3 + 0] !== pngPalette[j * 3 + 2]) { idChannels = 3; break; } } break case 4: /* tRNS */ if(len - i < chunkLength - off) { data.copy(pngAlpha, off, i) off += len - i i = len } else { data.copy(pngAlpha, off, i, i + chunkLength - off) i += chunkLength - off state = 8 off = 0 } break case 5: /* IDAT */ /* If the amount available is less than the amount remaining, then * feed as much as we can to the inflator. */ if(len - i < chunkLength - off) { /* FIXME: Do I need to be smart and check the return value? */ inflateQueue.push(data.slice(i)) off += len - i i = len } /* Otherwise, write the last bit of the data to the inflator, and * finish processing the chunk. */ else { /* FIXME: Do I need to be smart and check the return value? */ inflateQueue.push(data.slice(i, i + chunkLength - off)) i += chunkLength - off state = 8 off = 0 } break case 6: /* IEND */ if(chunkLength !== 0) return false else if(len - i < 4 - off) { off += len - i i = len } else { pngTrailer = new Buffer(0) i += 4 - off state = 9 off = 0 } break case 7: /* unrecognized chunk */ if(len - i < chunkLength - off) { off += len - i i = len } else { i += chunkLength - off state = 8 off = 0 } break case 8: /* chunk crc */ /* FIXME: CRC is blatantly ignored */ if(len - i < 4 - off) { off += len - i i = len } else { i += 4 - off state = 1 off = 0 } break case 9: /* trailing data */ /* FIXME: It is inefficient to create a trailer buffer of length zero * and keep reallocating it every time we want to add more data. */ tmp = new Buffer(off + len - i) pngTrailer.copy(tmp) data.copy(tmp, off, i, len) pngTrailer = tmp off += len - i i = len break } return true } //Try parsing header data if(!inputData(dataBuffer)) { return null } if(state !== 9) { return null } //Concatenate all inflate buffers var inflateBuffer = Buffer.concat(inflateQueue) var inflateData = pako.inflate(new Uint8Array(inflateBuffer)) function unpackPixels(data) { var len = data.length, i, tmp, x, j, k for(i = 0; i !== len; ++i) { if(b === -1) { scanlineFilter = data[i] tmp = currentScanline currentScanline = priorScanline priorScanline = tmp } else switch(scanlineFilter) { case 0: currentScanline[b] = data[i] break case 1: currentScanline[b] = b < pngBytesPerPixel ? data[i] : (data[i] + currentScanline[b - pngBytesPerPixel]) & 255 break case 2: currentScanline[b] = (data[i] + priorScanline[b]) & 255 break case 3: currentScanline[b] = (data[i] + (( b < pngBytesPerPixel ? priorScanline[b] : currentScanline[b - pngBytesPerPixel] + priorScanline[b] ) >>> 1)) & 255 break case 4: currentScanline[b] = (data[i] + ( b < pngBytesPerPixel ? priorScanline[b] : paeth( currentScanline[b - pngBytesPerPixel], priorScanline[b], priorScanline[b - pngBytesPerPixel] ) )) & 255 break default: return null } if(++b === pngBytesPerScanline) { /* One scanline too many? */ if(p === pngPixels.length) return null /* We have now read a complete scanline, so unfilter it and write it * into the pixel array. */ for(j = 0, x = 0; x !== pngWidth; ++x) { /* Read all of the samples into the sample buffer. */ for(k = 0; k !== pngSamplesPerPixel; ++j, ++k) switch(pngBitDepth) { case 1: pngSamples[k] = (currentScanline[(j >>> 3)] >> (7 - (j & 7))) & 1 break case 2: pngSamples[k] = (currentScanline[(j >>> 2)] >> ((3 - (j & 3)) << 1)) & 3 break case 4: pngSamples[k] = (currentScanline[(j >>> 1)] >> ((1 - (j & 1)) << 2)) & 15 break case 8: pngSamples[k] = currentScanline[j] break default: return null } /* Write the pixel based off of the samples so collected. */ switch(pngColorType) { case 0: pngPixels[p++] = pngSamples[0] * pngDepthMult; break; case 2: pngPixels[p++] = pngSamples[0] * pngDepthMult; pngPixels[p++] = pngSamples[1] * pngDepthMult; pngPixels[p++] = pngSamples[2] * pngDepthMult; break; case 3: if(pngSamples[0] >= pngPaletteEntries) return null switch(idChannels) { case 1: pngPixels[p++] = pngPalette[pngSamples[0] * 3]; break; case 2: pngPixels[p++] = pngPalette[pngSamples[0] * 3]; pngPixels[p++] = pngSamples[0] < pngAlphaEntries ? pngAlpha[pngSamples[0]] : 255; break; case 3: pngPixels[p++] = pngPalette[pngSamples[0] * 3 + 0]; pngPixels[p++] = pngPalette[pngSamples[0] * 3 + 1]; pngPixels[p++] = pngPalette[pngSamples[0] * 3 + 2]; break; case 4: pngPixels[p++] = pngPalette[pngSamples[0] * 3 + 0]; pngPixels[p++] = pngPalette[pngSamples[0] * 3 + 1]; pngPixels[p++] = pngPalette[pngSamples[0] * 3 + 2]; pngPixels[p++] = pngSamples[0] < pngAlphaEntries ? pngAlpha[pngSamples[0]] : 255; break; } break; case 4: pngPixels[p++] = pngSamples[0] * pngDepthMult; pngPixels[p++] = pngSamples[1] * pngDepthMult; break; case 6: pngPixels[p++] = pngSamples[0] * pngDepthMult; pngPixels[p++] = pngSamples[1] * pngDepthMult; pngPixels[p++] = pngSamples[2] * pngDepthMult; pngPixels[p++] = pngSamples[3] * pngDepthMult; break; } } b = -1; } } return true } if(!unpackPixels(inflateData)) { return null } if(p !== pngPixels.length) { return null } return new ImageData(pngWidth, pngHeight, idChannels, pngPixels, pngTrailer) }
"zlib": function(str) { return pako.inflate(str, {to: 'string'}); }