diff --git a/src/mol-io/reader/common/text/tokenizer.ts b/src/mol-io/reader/common/text/tokenizer.ts index 251cced322c6d3b2c2c189173b96da68c7d5661d..81173c6c37a45df99db0c5e449f817954897c60f 100644 --- a/src/mol-io/reader/common/text/tokenizer.ts +++ b/src/mol-io/reader/common/text/tokenizer.ts @@ -134,7 +134,6 @@ namespace Tokenizer { /** Advance the state by the given number of lines and return line starts/ends as tokens. */ export async function readLinesAsync(state: Tokenizer, count: number, ctx: RuntimeContext, initialLineCount = 100000): Promise<Tokens> { - const { length } = state; const lineTokens = TokenBuilder.create(state.data, count * 2); let linesAlreadyRead = 0; @@ -143,7 +142,7 @@ namespace Tokenizer { readLinesChunk(state, linesToRead, lineTokens); linesAlreadyRead += linesToRead; return linesToRead; - }, (ctx, state) => ctx.update({ message: 'Parsing...', current: state.position, max: length })); + }, (ctx, state) => ctx.update({ message: 'Parsing...', current: state.position, max: state.length })); return lineTokens; }