function test(bufferAsync, bufferSync, expected) { fs.read(fd, bufferAsync, 0, expected.length, 0, common.mustCall((err, bytesRead) => { assert.ifError(err); assert.strictEqual(bytesRead, expected.length); assert.deepStrictEqual(bufferAsync, Buffer.from(expected)); })); const r = fs.readSync(fd, bufferSync, 0, expected.length, 0); assert.deepStrictEqual(bufferSync, Buffer.from(expected)); assert.strictEqual(r, expected.length); }
function writeHeader(oldFile, newFile, newDataSize, newFileSize) { var buffer = new Buffer(4); for (var i=0; i<40; i+=4) { if (i===4) { fs.writeSync(newFile, getBuffer(newFileSize), 0, 4); } else if (i===40) { // Write new data size fs.writeSync(newFile, getBuffer(newDataSize), 0, 4); } else { // Else if we're reading in the header region or the correct data region, write same data over fs.readSync(songData.fd, buffer, 0, 4, i); fs.writeSync(newFile, buffer, 0, 4); } } }
var copyFileSync = function(srcFile, destFile) { var fdr = fs.openSync(srcFile, 'r') var stat = fs.fstatSync(fdr) var fdw = fs.openSync(destFile, 'w', stat.mode) var bytesRead = 1 var pos = 0 while (bytesRead > 0) { bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos) fs.writeSync(fdw, _buff, 0, bytesRead) pos += bytesRead } fs.closeSync(fdr) fs.closeSync(fdw) }
const loop = () => { console.log('..'); const buf = new Buffer(chunk_size); // 第三个是写入 buffer 位置的偏移量, position 是读取的偏移量 const bytesRead = fs.readSync(fd, buf, 0, chunk_size, position); position += bytesRead; // 输出读取到的新数据 process.stdout.write(buf.slice(0, bytesRead)); // 读取到结尾了 if (bytesRead < chunk_size) { setTimeout(loop, delay); } else { loop(); } };
FileManager.byteRead = function(path, offset, length, position) { var fd = fs.openSync(path, 'r'), buffer = new Buffer(length), newBuf; var bytesRead = fs.readSync(fd, buffer, offset, length, position); if (bytesRead < length) { newBuf = new Buffer(bytesRead); buffer.copy(newBuf); } fs.closeSync(fd); return newBuf || buffer; };
function copy(src,dest){ var readSoFar=0,fdsrc,fddest,read; var buff = new Buffer(BUFFER_SIZE); fdsrc = fs.openSync(src,'r'); fddest = fs.openSync(dest,'w'); do{ //fd, buffer, offset, length, position read = fs.readSync( fd,buffer,0,BUFFER_SIZE,readSoFar); readSoFar+=BUFFER_SIZE; //fd, buffer, offset, length, position fs.writeSync(fd,buffer,0,read); }while(read>0) fs.closeSync(fdsrc); fs.closeSync(fddest); }
module.exports = function(srcFile, destFile){ BUF_LENGTH = 64*1024; buff = new Buffer(BUF_LENGTH); fdr = fs.openSync(srcFile, 'r'); fdw = fs.openSync(destFile, 'w'); bytesRead = 1; pos = 0; while (bytesRead > 0) { bytesRead = fs.readSync(fdr, buff, 0, BUF_LENGTH, pos) fs.writeSync(fdw,buff,0,bytesRead) pos += bytesRead } fs.closeSync(fdr); fs.closeSync(fdw); }
function copy(src,dest){ var buff = new Buffer(BUFFER_SIZE); // 读取的文件索引位置,源文件描述符,目标文件描述符, var readSoFar,fdSrc,fdDest,read; fdSrc = fs.openSync(src,'r'); fdDest = fs.openSync(dest,'w'); readSoFar = 0; do{ //从源文件中读取内容,并返回实际读取到的字节数 read = fs.readSync(fdSrc,buff,0,BUFFER_SIZE,readSoFar); fs.writeSync(fdDest,buff,0,read); readSoFar += read; }while(read == BUFFER_SIZE); fs.closeSync(fdDest); fs.closeSync(fdSrc); }
function isMpegFile(file) { var buffer = new Buffer(256); buffer.fill(0); var fd = fs.openSync(file, 'r'); try { fs.readSync(fd, buffer, 0, 256, null); if (buffer.readInt8(0) === 0x47 && buffer.readInt8(188) === 0x47) { logger_1.Logger.debug("tslint: " + file + ": ignoring MPEG transport stream"); return true; } } finally { fs.closeSync(fd); } return false; }
copy.sync = function(srcFile, destFile) { var BUF_LENGTH, buff, bytesRead, fdr, fdw, pos; BUF_LENGTH = 64 * 1024; buff = new Buffer(BUF_LENGTH); fdr = fs.openSync(srcFile, "r"); fdw = fs.openSync(destFile, "w"); bytesRead = 1; pos = 0; while (bytesRead > 0) { bytesRead = fs.readSync(fdr, buff, 0, BUF_LENGTH, pos); fs.writeSync(fdw, buff, 0, bytesRead); pos += bytesRead; } fs.closeSync(fdr); return fs.closeSync(fdw); };
function copyFile(srcFile, destFile){ var BUF_LENGTH, BUF_LENGTH = 64 * 1024, buff = new Buffer(BUF_LENGTH), fdr = FS.openSync(srcFile, 'r'), fdw = FS.openSync(destFile, 'w'), bytesRead = 1, pos = 0; while(bytesRead > 0) { bytesRead = FS.readSync(fdr, buff, 0, BUF_LENGTH, pos); FS.writeSync(fdw, buff, 0, bytesRead); pos += bytesRead; } FS.closeSync(fdr); return FS.closeSync(fdw); }
readString : function (size) { if (size === 0) return ""; var result = "", i, code; this.checkOffset(size); // for(i = 0; i < size; i++) // { // code = this.byteAt(this.index + i); // result += String.fromCharCode(code); // } var buf = new Buffer(size); fs.readSync(this.stream, buf, 0, size, this.index); result = buf.toString("binary"); this.index += size; return result; },
f.attachInterrupt = function(pin, handler, mode, callback) { if(debug) winston.debug('attachInterrupt(' + [pin, handler, mode] + ');'); pin = my.getpin(pin); var resp = {'pin':pin, 'attached': false}; if(!epoll.exists) { resp.err = 'attachInterrupt: requires Epoll module'; if(debug) winston.debug(resp.err); if(callback) callback(resp); return(resp.attached); } if(!gpio[pin.gpio]) { resp.attached = false; resp.configured = false; if(callback) callback(resp); return(resp.attached); } if(gpio[pin.gpio].intProc) { resp.attached = false; resp.configured = true; if(callback) callback(resp); return(resp.attached); } var gpioFile = '/sys/class/gpio/gpio' + pin.gpio + '/value'; var valuefd = fs.openSync(gpioFile, 'r'); var value = new Buffer(1); fs.writeFileSync('/sys/class/gpio/gpio' + pin.gpio + '/edge', mode); handler = (typeof handler === "string") ? my.myeval('(' + handler + ')') : handler; var intHandler = function(err, fd, events) { if(err) { m.err = err; } fs.readSync(valuefd, value, 0, 1, 0); var m = {}; m.pin = pin; m.value = parseInt(Number(value), 2); if(typeof handler =='function') m.output = handler(m); else m.output = {handler:handler}; if(m.output && (typeof callback == 'function')) callback(m); }; gpio[pin.gpio].intProc = new epoll.Epoll(intHandler); fs.readSync(valuefd, value, 0, 1, 0); gpio[pin.gpio].intProc.add(valuefd, epoll.Epoll.EPOLLPRI); gpio[pin.gpio].intProc.valuefd = valuefd; resp.attached = true; if(callback) callback(resp); return(resp.attached); };
channel.receiving = function(err, packet, cbChan) { // //console.log("We got a packet on the dfu-nrf51 channel\n"); // any responce is next frame if(packet && fd) { var json = {at:offset}; var body = new Buffer(1024); var readLength = fs.readSync(fd,body,0,size,offset); if (readLength < 1024) { body.length = readLength; } // On the first frame include the image sizes and type if (offset == 0) { json.start = true; if(imageType & nrf51ImageTypeSoftdevice) { json.imageSizeSoftdevice = imageSizeSoftdevice; } if(imageType & nrf51ImageTypeBootloader) { json.imageSizeBootloader = imageSizeBootloader; } if(imageType & nrf51ImageTypeApplication) { json.imageSizeApplication = imageSizeApplication; } json.imageType = imageType; } // //console.log("dfu-nrf51 chunk:", body); offset += readLength; //buf.copy(body,0,0,body.length); checksum = crc.crc32(body,checksum); // if at the end, include total checksum and image type if(readLength != size) { json.done = checksum; json.end = true; fs.closeSync(fd); fd = undefined } //process.stdout.write(".") // //console.log('\n\ndfu-nrf51 frame',json,readLength, body); channel.send({json:json,body:body}); }else{ res() //console.log('\n\ndfu-nrf51 frame error',err); } cbChan(); }
// Buffered file copy, synchronous // (Using readFileSync() + writeFileSync() could easily cause a memory overflow // with large files) function copyFileSync(srcFile, destFile, options) { if (!fs.existsSync(srcFile)) { common.error('copyFileSync: no such file or directory: ' + srcFile); } if (fs.lstatSync(srcFile).isSymbolicLink() && !options.followsymlink) { try { fs.lstatSync(destFile); common.unlinkSync(destFile); // re-link it } catch (e) { // it doesn't exist, so no work needs to be done } var symlinkFull = fs.readlinkSync(srcFile); fs.symlinkSync(symlinkFull, destFile, os.platform() === 'win32' ? 'junction' : null); } else { var BUF_LENGTH = 64 * 1024; var buf = new Buffer(BUF_LENGTH); var bytesRead = BUF_LENGTH; var pos = 0; var fdr = null; var fdw = null; try { fdr = fs.openSync(srcFile, 'r'); } catch (e) { common.error('copyFileSync: could not read src file (' + srcFile + ')'); } try { fdw = fs.openSync(destFile, 'w'); } catch (e) { common.error('copyFileSync: could not write to dest file (code=' + e.code + '):' + destFile); } while (bytesRead === BUF_LENGTH) { bytesRead = fs.readSync(fdr, buf, 0, BUF_LENGTH, pos); fs.writeSync(fdw, buf, 0, bytesRead); pos += bytesRead; } fs.closeSync(fdr); fs.closeSync(fdw); fs.chmodSync(destFile, fs.statSync(srcFile).mode); } }
readEndOfCentral : function() { // zip 64 ? // var offset = this.reader.stream.lastIndexOf(JSZip.signature.ZIP64_CENTRAL_DIRECTORY_LOCATOR); // if (offset === -1) // nope // { this.zip64 = false; // find the offset var buf = new Buffer(1024 + 4); var foffset = fs.fstatSync(this.reader.stream).size - 1024 - 4; while(true) { var readBytes = fs.readSync(this.reader.stream, buf, 0, 1024 + 4, foffset); var offset = buf.toString("binary").lastIndexOf(JSZip.signature.CENTRAL_DIRECTORY_END); if ( offset !== -1 ) { offset += foffset; break; } if (foffset === 0) { break; } foffset -= 1024; if (foffset <0 ) { foffset = 0; } } if (offset === -1) { throw new Error("Corrupted zip : can't find end of central directory"); } this.reader.setIndex(offset); this.checkSignature(JSZip.signature.CENTRAL_DIRECTORY_END); this.readBlockEndOfCentral(); // } // else // zip 64 ! // { // this.zip64 = true; // this.reader.setIndex(offset); // this.checkSignature(JSZip.signature.ZIP64_CENTRAL_DIRECTORY_LOCATOR); // this.readBlockZip64EndOfCentralLocator(); // this.reader.setIndex(this.relativeOffsetEndOfZip64CentralDir); // this.checkSignature(JSZip.signature.ZIP64_CENTRAL_DIRECTORY_END); // this.readBlockZip64EndOfCentral(); // } },
(function readChanges () { try { var stats = fs.statSync(file), fd, bytesRead, lines, m,line, i, len; if (position < stats.size) { fd = fs.openSync(file, 'r'); do { bytesRead = fs.readSync(fd, buf, 0, 16, position); position += bytesRead; buffer += buf.toString('utf-8', 0, bytesRead); } while (bytesRead === 16); fs.closeSync(fd); lines = buffer.split('\n'); buffer = lines.pop(); // keep the last line because it could be incomplete for (i = 0, len = lines.length; i < len; i++) { line = lines[i]; if (line) { m = line.match(logLevelRE); if (m) { lastLogger = m[2].toLowerCase(); line = m[4].trim(); } if (levels.indexOf(lastLogger) == -1) { logger.log(('[' + lastLogger.toUpperCase() + '] ').cyan + line); } else { logger[lastLogger](line); } } } } readChangesTimer = setTimeout(readChanges, 30); } catch (ex) { if (ex.code == 'ENOENT') { clearTimeout(readChangesTimer); if (simStarted) { var endLogTxt = __('End simulator log'); logger.log(('-- ' + endLogTxt + ' ' + (new Array(75 - endLogTxt.length)).join('-')).grey); } logger.log(); process.exit(0); } throw ex; } }());
Reader.prototype.getContents = function() { var source = this.source , len = null , buf = null if (Buffer.isBuffer(source)) return this.data = source; else if ('number' === typeof source) { len = fs.fstatSync(source).size buf = Buffer(len); fs.readSync(source, buf, 0, len); return this.data = buf; } else if ('string' === typeof source) { return this.data = fs.readFileSync(source); } throw "unrecognized source. must be filename, file descriptor or buffer"; }
copyFileSync = function(srcFile, destFile) { //via http://procbits.com/2011/11/15/synchronous-file-copy-in-node-js/ var BUF_LENGTH, buff, bytesRead, fdr, fdw, pos; BUF_LENGTH = 64 * 1024; buff = new Buffer(BUF_LENGTH); fdr = fs.openSync(srcFile, 'r'); fdw = fs.openSync(destFile, 'w'); bytesRead = 1; pos = 0; while (bytesRead > 0) { bytesRead = fs.readSync(fdr, buff, 0, BUF_LENGTH, pos); fs.writeSync(fdw, buff, 0, bytesRead); pos += bytesRead; } fs.closeSync(fdr); return fs.closeSync(fdw); },
/** * Generator based line reader * * @param {Number} [fd] The file descriptor * @param {Number} [filesize] The size of the file in bytes * @param {Number} [bufferSize] The size of the buffer in bytes * @param {Number} [position] The position where to start reading the file in bytes * @return {Object} The generator object */ function* readlines(fd, filesize, bufferSize, position) { if (typeof bufferSize === 'undefined') bufferSize = 64 * 1024; if (typeof position === 'undefined') position = 0; let lineBuffer; while (position < filesize) { let remaining = filesize - position; if (remaining < bufferSize) bufferSize = remaining; let readChunk = new Buffer(bufferSize); let bytesRead = fs.readSync(fd, readChunk, 0, bufferSize, position); let curpos = 0; let startpos = 0; let lastbyte = null; let curbyte; while (curpos < bytesRead) { curbyte = readChunk[curpos]; // skip LF if last chunk ended in CR if (curbyte === LF && lastbyte !== CR || curbyte === CR && curpos < bytesRead - 1) { yield _concat(lineBuffer, readChunk.slice(startpos, curpos)); lineBuffer = undefined; startpos = curpos + 1; if (curbyte === CR && readChunk[curpos + 1] === LF) { startpos++; curpos++; } } else if (curbyte === CR && curpos >= bytesRead - 1) { lastbyte = curbyte; } curpos++; } position += bytesRead; if (startpos < bytesRead) { lineBuffer = _concat(lineBuffer, readChunk.slice(startpos, bytesRead)); } } // dump what ever is left in the buffer if (Buffer.isBuffer(lineBuffer)) yield lineBuffer; };
function initResistors() { var buf = new Buffer(128); eepromFile = '/sys/bus/i2c/drivers/at24/1-0054/eeprom'; var eeprom = fs.openSync( eepromFile, 'r'); fs.readSync(eeprom, buf, 0, 128, 244); fs.closeSync(eeprom); console.log('R8: '+buf.readUInt32LE(0)); console.log('R9: '+buf.readUInt32LE(4)); console.log('R10: '+buf.readUInt32LE(8)); console.log('R11: '+buf.readUInt32LE(12)); R8 = buf.readUInt32LE(0); R9 = buf.readUInt32LE(4); R10 = buf.readUInt32LE(8); R11 = buf.readUInt32LE(12); };
function copyFileSync(srcFile, destFile) { console.log('[DEBUG] copyFileSync from ' + srcFile + " to " + destFile); var bytesRead, fdr, fdw, pos; var BUF_LENGTH = 64 * 1024; var _buff = new Buffer(BUF_LENGTH); fdr = fs.openSync(srcFile, 'r'); fdw = fs.openSync(destFile, 'w'); bytesRead = 1; pos = 0; while(bytesRead > 0) { bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos); fs.writeSync(fdw, _buff, 0, bytesRead); pos += bytesRead; } fs.closeSync(fdr); return fs.closeSync(fdw); }
exports.copyFile = function(srcPath,dstPath) { // Create any directories in the destination $tw.utils.createDirectory(path.dirname(dstPath)); // Copy the file var srcFile = fs.openSync(srcPath,"r"), dstFile = fs.openSync(dstPath,"w"), bytesRead = 1, pos = 0; while (bytesRead > 0) { bytesRead = fs.readSync(srcFile,fileBuffer,0,FILE_BUFFER_LENGTH,pos); fs.writeSync(dstFile,fileBuffer,0,bytesRead); pos += bytesRead; } fs.closeSync(srcFile); fs.closeSync(dstFile); return null; };
HeaderDB.prototype.readFile = function(filename) { var fd = fs.openSync(filename, 'r'); var stats = fs.fstatSync(fd); if (stats.size % 80 != 0) throw new Error("Corrupted header db"); while (1) { var buf = new Buffer(80); var bread = fs.readSync(fd, buf, 0, 80, null); if (bread < 80) break; this.addBuf(buf); } fs.closeSync(fd); };
var _readSync = function(tableName, colName, id) { var key = tableName+'|'+colName; _openSync(tableName, colName); var col = _tables[tableName].columns[colName], buffer = new Buffer(col.length), sector = _getSector(tableName, colName, id); if (_handles[key]) { var fd = _handles[key].fd; _fs.readSync(fd, buffer, 0, buffer.length, sector.start); return _decode(buffer.toString('utf8'), col.type); } else { return _readSync(tableName, colName, id); } };
Utils.readFileHead = function(path, chars) { var buffer = new Buffer(chars || 500), file = fs.openSync(path, "r"), str, len; // Read in file len = fs.readSync(file, buffer, 0, chars, 0); // Convert to lines, and discard any fragments // (so that we can use template engines without through errors) str = buffer.slice(0, len).toString().split("\n"); return str.slice(0, str.length - 1).join("\n"); };
function copyBinaryFile(srcFile, destFile) { var BUF_LENGTH = 64*1024; var buf = new Buffer(BUF_LENGTH); var bytesRead = BUF_LENGTH; var pos = 0; var fdr = fs.openSync(srcFile, 'r'); var fdw = fs.openSync(destFile, 'w'); while (bytesRead === BUF_LENGTH) { bytesRead = fs.readSync(fdr, buf, 0, BUF_LENGTH, pos); fs.writeSync(fdw, buf, 0, bytesRead); pos += bytesRead; } fs.closeSync(fdr); fs.closeSync(fdw); }
fread : function(buf, len) { if (len === undefined) len = buf.length; /* got will be our mark in the buf argument. When got reaches len, we're done. (Unless we hit EOF first.) */ var got = 0; while (true) { if (this.bufuse == filemode_Read) { if (this.bufmark < this.buflen) { var want = len - got; if (want > this.buflen - this.bufmark) want = this.buflen - this.bufmark; if (want > 0) { this.buffer.copy(buf, got, this.bufmark, this.bufmark+want); this.bufmark += want; got += want; } } if (got >= len) return got; /* We need more, but we've consumed the entire buffer. Fall through to the next step where we will fflush and keep reading. */ } if (this.bufuse) this.fflush(); /* ### if len-got >= BUFFER_SIZE, we could read directly and ignore our buffer. */ this.bufuse = filemode_Read; this.bufmark = 0; this.buflen = fs.readSync(this.fd, this.buffer, 0, BUFFER_SIZE, this.mark); if (this.buflen == 0) { /* End of file. Mark the buffer unused, since it's empty. */ this.bufuse = 0; return got; } /* mark stays at the buffer start position */ } },
beforeEach(function (done) { var buffer = Buffer.alloc(bufferSize); var filepath = path.resolve(file); var descriptor = fs.openSync(filepath, 'r'); fs.readSync(descriptor, buffer, 0, bufferSize, 0); type = detector(buffer); // tiff cannot support buffers, unless the buffer contains the entire file if (type !== 'tiff') { bufferDimensions = imageSize(buffer); } imageSize(file, function (err, _dim) { asyncDimensions = _dim; done(); }); });
fs.open(params.media, 'r', function (err, fd) { var bytesRead, data; while (offset < stats.size) { bytesRead = fs.readSync(fd, theBuffer, 0, bufferLength, null); data = bytesRead < bufferLength ? theBuffer.slice(0, bytesRead) : theBuffer; options.formData = { command: "APPEND", media_id: media_id, segment_index: segment_index, media_data: data.toString('base64') }; request.post(options, finalizeMedia(media_id)); offset += bufferLength; segment_index++ } });