@@ -86,6 +86,7 @@ const {
86
86
promisify,
87
87
} = require ( 'internal/util' ) ;
88
88
const { EventEmitterMixin } = require ( 'internal/event_target' ) ;
89
+ const { StringDecoder } = require ( 'string_decoder' ) ;
89
90
const { watch } = require ( 'internal/fs/watchers' ) ;
90
91
const { isIterable } = require ( 'internal/streams/utils' ) ;
91
92
const assert = require ( 'internal/assert' ) ;
@@ -416,63 +417,75 @@ async function writeFileHandle(filehandle, data, signal, encoding) {
416
417
417
418
async function readFileHandle ( filehandle , options ) {
418
419
const signal = options ?. signal ;
420
+ const encoding = options ?. encoding ;
421
+ const decoder = encoding && new StringDecoder ( encoding ) ;
419
422
420
423
checkAborted ( signal ) ;
421
424
422
425
const statFields = await binding . fstat ( filehandle . fd , false , kUsePromises ) ;
423
426
424
427
checkAborted ( signal ) ;
425
428
426
- let size ;
429
+ let size = 0 ;
430
+ let length = 0 ;
427
431
if ( ( statFields [ 1 /* mode */ ] & S_IFMT ) === S_IFREG ) {
428
432
size = statFields [ 8 /* size */ ] ;
433
+ length = encoding ? MathMin ( size , kReadFileBufferLength ) : size ;
429
434
} else {
430
- size = 0 ;
435
+ length = kReadFileUnknownBufferLength ;
431
436
}
432
437
433
438
if ( size > kIoMaxLength )
434
439
throw new ERR_FS_FILE_TOO_LARGE ( size ) ;
435
440
436
- let endOfFile = false ;
437
441
let totalRead = 0 ;
438
- const noSize = size === 0 ;
439
- const buffers = [ ] ;
440
- const fullBuffer = noSize ? undefined : Buffer . allocUnsafeSlow ( size ) ;
441
- do {
442
+ let buffer = Buffer . allocUnsafeSlow ( length ) ;
443
+ let result = '' ;
444
+ let isBufferFull = true ;
445
+ let offset = 0 ;
446
+ let buffers ;
447
+
448
+ while ( true ) {
442
449
checkAborted ( signal ) ;
443
- let buffer ;
444
- let offset ;
445
- let length ;
446
- if ( noSize ) {
447
- buffer = Buffer . allocUnsafeSlow ( kReadFileUnknownBufferLength ) ;
448
- offset = 0 ;
449
- length = kReadFileUnknownBufferLength ;
450
- } else {
451
- buffer = fullBuffer ;
452
- offset = totalRead ;
450
+ if ( size === 0 ) {
453
451
length = MathMin ( size - totalRead , kReadFileBufferLength ) ;
454
452
}
455
453
456
454
const bytesRead = ( await binding . read ( filehandle . fd , buffer , offset ,
457
- length , - 1 , kUsePromises ) ) || 0 ;
455
+ length , - 1 , kUsePromises ) ) ?? 0 ;
458
456
totalRead += bytesRead ;
459
- endOfFile = bytesRead === 0 || totalRead === size ;
460
- if ( noSize && bytesRead > 0 ) {
461
- const isBufferFull = bytesRead === kReadFileUnknownBufferLength ;
462
- const chunkBuffer = isBufferFull ? buffer : buffer . slice ( 0 , bytesRead ) ;
463
- ArrayPrototypePush ( buffers , chunkBuffer ) ;
457
+
458
+ if ( bytesRead === 0 || totalRead === size ) {
459
+ const singleRead = bytesRead === totalRead ;
460
+ if ( ! encoding ) {
461
+ if ( size === 0 && ! singleRead ) {
462
+ return Buffer . concat ( buffers , totalRead ) ;
463
+ }
464
+ return buffer
465
+ }
466
+
467
+ if ( singleRead ) {
468
+ return buffer . toString ( encoding ) ;
469
+ }
470
+ result += decoder . end ( buffer . slice ( 0 , bytesRead ) ) ;
471
+ return result ;
464
472
}
465
- } while ( ! endOfFile ) ;
466
473
467
- let result ;
468
- if ( size > 0 ) {
469
- result = totalRead === size ? fullBuffer : fullBuffer . slice ( 0 , totalRead ) ;
470
- } else {
471
- result = buffers . length === 1 ? buffers [ 0 ] : Buffer . concat ( buffers ,
472
- totalRead ) ;
474
+ if ( size === 0 ) {
475
+ isBufferFull = bytesRead === kReadFileUnknownBufferLength ;
476
+ // Unknown file size requires chunks.
477
+ if ( ! encoding ) {
478
+ buffers ??= [ ] ;
479
+ ArrayPrototypePush ( buffers , buffer ) ;
480
+ buffer = Buffer . allocUnsafeSlow ( kReadFileUnknownBufferLength )
481
+ }
482
+ }
483
+ if ( encoding ) {
484
+ result += decoder . write ( isBufferFull ? buffer : buffer . slice ( 0 , bytesRead ) ) ;
485
+ } else if ( size !== 0 ) {
486
+ offset += bytesRead ;
487
+ }
473
488
}
474
-
475
- return options . encoding ? result . toString ( options . encoding ) : result ;
476
489
}
477
490
478
491
// All of the functions are defined as async in order to ensure that errors
0 commit comments