Skip to content
Snippets Groups Projects
Commit b09a0c69 authored by Alexander Rose's avatar Alexander Rose
Browse files

use fast number parsers

parent 62511192
Branches
No related tags found
No related merge requests found
/** /**
* Copyright (c) 2017 mol* contributors, licensed under MIT, See LICENSE file for more info. * Copyright (c) 2019 mol* contributors, licensed under MIT, See LICENSE file for more info.
* *
* @author Schäfer, Marco <marco.schaefer@uni-tuebingen.de>
* @author Alexander Rose <alexander.rose@weirdbyte.de> * @author Alexander Rose <alexander.rose@weirdbyte.de>
*/ */
import { Tokens, TokenBuilder, Tokenizer } from '../../common/text/tokenizer' import { Tokens, TokenBuilder, Tokenizer } from '../../common/text/tokenizer'
import * as Data from './data-model' import * as Data from './data-model'
import{ ReaderResult } from '../../result' import{ ReaderResult } from '../../result'
import {Task, RuntimeContext, chunkedSubtask } from 'mol-task' import {Task, RuntimeContext, chunkedSubtask } from 'mol-task'
import { parseInt as fastParseInt, parseFloat as fastParseFloat } from '../../common/text/number-parser'
const enum PlyTokenType { const enum PlyTokenType {
Value = 0, Value = 0,
...@@ -18,7 +19,6 @@ const enum PlyTokenType { ...@@ -18,7 +19,6 @@ const enum PlyTokenType {
element = 4 element = 4
} }
interface State { interface State {
data: string; data: string;
tokenizer: Tokenizer, tokenizer: Tokenizer,
...@@ -54,7 +54,6 @@ interface State { ...@@ -54,7 +54,6 @@ interface State {
} }
function State(data: string, runtimeCtx: RuntimeContext, opts: PlyOptions): State { function State(data: string, runtimeCtx: RuntimeContext, opts: PlyOptions): State {
const tokenizer = Tokenizer(data) const tokenizer = Tokenizer(data)
return { return {
data, data,
...@@ -134,7 +133,6 @@ function eatLine (state: Tokenizer) { ...@@ -134,7 +133,6 @@ function eatLine (state: Tokenizer) {
} }
function skipLine(state: Tokenizer) { function skipLine(state: Tokenizer) {
while (state.position < state.length) { while (state.position < state.length) {
const c = state.data.charCodeAt(state.position); const c = state.data.charCodeAt(state.position);
...@@ -143,14 +141,13 @@ function skipLine(state: Tokenizer) { ...@@ -143,14 +141,13 @@ function skipLine(state: Tokenizer) {
} }
} }
function getColumns(state: State, NumberofColumns: number){ function getColumns(state: State, numberOfColumns: number) {
eatLine(state.tokenizer); eatLine(state.tokenizer);
let tmp = (Tokenizer.getTokenString(state.tokenizer)) let tmp = Tokenizer.getTokenString(state.tokenizer)
let split = tmp.split(" ", NumberofColumns); let split = tmp.split(' ', numberOfColumns);
return split; return split;
} }
/** /**
* Move to the next token. * Move to the next token.
* Returns true when the current char is a newline, i.e. indicating a full record. * Returns true when the current char is a newline, i.e. indicating a full record.
...@@ -189,18 +186,18 @@ function moveNextInternal(state: State) { ...@@ -189,18 +186,18 @@ function moveNextInternal(state: State) {
state.tokenType = PlyTokenType.Value; state.tokenType = PlyTokenType.Value;
let return_value = eatValue(tokenizer); let return_value = eatValue(tokenizer);
if(state.endHeader === 1) if (state.endHeader === 1) {
{
if (state.currentVertex < state.vertexCount) { if (state.currentVertex < state.vertexCount) {
// TODO the numbers are parsed twice
state.properties[state.currentVertex * state.propertyCount + state.currentProperty] = Number(Tokenizer.getTokenString(state.tokenizer)); state.properties[state.currentVertex * state.propertyCount + state.currentProperty] = Number(Tokenizer.getTokenString(state.tokenizer));
if (state.currentProperty < 3) { if (state.currentProperty < 3) {
state.vertices[state.currentVertex * 3 + state.currentProperty] = Number(Tokenizer.getTokenString(state.tokenizer)); state.vertices[state.currentVertex * 3 + state.currentProperty] = fastParseFloat(state.tokenizer.data, state.tokenizer.tokenStart, state.tokenizer.tokenEnd);
} }
if (state.currentProperty >= 3 && state.currentProperty < 6) { if (state.currentProperty >= 3 && state.currentProperty < 6) {
state.colors[state.currentVertex * 3 + state.currentProperty-3] = Number(Tokenizer.getTokenString(state.tokenizer)); state.colors[state.currentVertex * 3 + state.currentProperty - 3] = fastParseInt(state.tokenizer.data, state.tokenizer.tokenStart, state.tokenizer.tokenEnd);
} }
if (state.currentProperty >= 6 && state.currentProperty < 9) { if (state.currentProperty >= 6 && state.currentProperty < 9) {
state.normals[state.currentVertex * 3 + state.currentProperty-6] = Number(Tokenizer.getTokenString(state.tokenizer)); state.normals[state.currentVertex * 3 + state.currentProperty - 6] = fastParseFloat(state.tokenizer.data, state.tokenizer.tokenStart, state.tokenizer.tokenEnd);
} }
state.currentProperty++; state.currentProperty++;
if (state.currentProperty === state.propertyCount) { if (state.currentProperty === state.propertyCount) {
...@@ -210,7 +207,7 @@ function moveNextInternal(state: State) { ...@@ -210,7 +207,7 @@ function moveNextInternal(state: State) {
return return_value; return return_value;
} }
if (state.currentFace < state.faceCount && state.currentVertex === state.vertexCount) { if (state.currentFace < state.faceCount && state.currentVertex === state.vertexCount) {
state.faces[state.currentFace * 4 + state.currentFaceElement] = Number(Tokenizer.getTokenString(state.tokenizer)); state.faces[state.currentFace * 4 + state.currentFaceElement] = fastParseInt(state.tokenizer.data, state.tokenizer.tokenStart, state.tokenizer.tokenEnd);
state.currentFaceElement++; state.currentFaceElement++;
if (state.currentProperty === 4) { if (state.currentProperty === 4) {
state.currentFaceElement = 0; state.currentFaceElement = 0;
...@@ -234,8 +231,6 @@ function moveNext(state: State) { ...@@ -234,8 +231,6 @@ function moveNext(state: State) {
return newRecord return newRecord
} }
function readRecordsChunk(chunkSize: number, state: State) { function readRecordsChunk(chunkSize: number, state: State) {
if (state.tokenType === PlyTokenType.End) return 0 if (state.tokenType === PlyTokenType.End) return 0
...@@ -258,13 +253,10 @@ function readRecordsChunks(state: State) { ...@@ -258,13 +253,10 @@ function readRecordsChunks(state: State) {
function addHeadEntry (state: State) { function addHeadEntry (state: State) {
const head = Tokenizer.getTokenString(state.tokenizer) const head = Tokenizer.getTokenString(state.tokenizer)
console.log(head)
state.initialHead.push(head) state.initialHead.push(head)
state.tokens.push(TokenBuilder.create(head, state.data.length / 80)) state.tokens.push(TokenBuilder.create(head, state.data.length / 80))
} }
function init(state: State) { // only for first two lines to get the format and the coding! (marco) function init(state: State) { // only for first two lines to get the format and the coding! (marco)
let newRecord = moveNext(state) let newRecord = moveNext(state)
while (!newRecord) { // newRecord is only true when a newline occurs (marco) while (!newRecord) { // newRecord is only true when a newline occurs (marco)
...@@ -308,15 +300,6 @@ async function parseInternal(data: string, ctx: RuntimeContext, opts: PlyOptions ...@@ -308,15 +300,6 @@ async function parseInternal(data: string, ctx: RuntimeContext, opts: PlyOptions
ctx.update({ message: 'Parsing...', current: 0, max: data.length }); ctx.update({ message: 'Parsing...', current: 0, max: data.length });
const PLYdata = await handleRecords(state) const PLYdata = await handleRecords(state)
const result = Data.PlyFile(PLYdata) const result = Data.PlyFile(PLYdata)
console.log(result);
// let Data_for_Shape = plyToShape.collectData_for_Shape(table, datas);
// console.log(plyToShape.getShape(state.runtimeCtx, table));
// let shape = plyToShape.init_ren(PLYdata);
// console.log("shape"+shape);
// const script = document.createElement('script');
// script.src = "../../build/src/mol-model/shape/formarts/ply/plyData_to_shape.js";
// document.body.appendChild(script);
return ReaderResult.success(result); return ReaderResult.success(result);
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment