Skip to content
Snippets Groups Projects
Commit b09a0c69 authored by Alexander Rose's avatar Alexander Rose
Browse files

use fast number parsers

parent 62511192
No related branches found
No related tags found
No related merge requests found
/**
* Copyright (c) 2017 mol* contributors, licensed under MIT, See LICENSE file for more info.
* Copyright (c) 2019 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author Schäfer, Marco <marco.schaefer@uni-tuebingen.de>
* @author Alexander Rose <alexander.rose@weirdbyte.de>
*/
import { Tokens, TokenBuilder, Tokenizer } from '../../common/text/tokenizer'
import * as Data from './data-model'
import{ ReaderResult } from '../../result'
import {Task, RuntimeContext, chunkedSubtask } from 'mol-task'
import { parseInt as fastParseInt, parseFloat as fastParseFloat } from '../../common/text/number-parser'
const enum PlyTokenType {
Value = 0,
......@@ -18,7 +19,6 @@ const enum PlyTokenType {
element = 4
}
interface State {
data: string;
tokenizer: Tokenizer,
......@@ -54,7 +54,6 @@ interface State {
}
function State(data: string, runtimeCtx: RuntimeContext, opts: PlyOptions): State {
const tokenizer = Tokenizer(data)
return {
data,
......@@ -134,7 +133,6 @@ function eatLine (state: Tokenizer) {
}
function skipLine(state: Tokenizer) {
while (state.position < state.length) {
const c = state.data.charCodeAt(state.position);
......@@ -143,14 +141,13 @@ function skipLine(state: Tokenizer) {
}
}
function getColumns(state: State, NumberofColumns: number){
function getColumns(state: State, numberOfColumns: number) {
eatLine(state.tokenizer);
let tmp = (Tokenizer.getTokenString(state.tokenizer))
let split = tmp.split(" ", NumberofColumns);
let tmp = Tokenizer.getTokenString(state.tokenizer)
let split = tmp.split(' ', numberOfColumns);
return split;
}
/**
* Move to the next token.
* Returns true when the current char is a newline, i.e. indicating a full record.
......@@ -189,18 +186,18 @@ function moveNextInternal(state: State) {
state.tokenType = PlyTokenType.Value;
let return_value = eatValue(tokenizer);
if(state.endHeader === 1)
{
if (state.endHeader === 1) {
if (state.currentVertex < state.vertexCount) {
// TODO the numbers are parsed twice
state.properties[state.currentVertex * state.propertyCount + state.currentProperty] = Number(Tokenizer.getTokenString(state.tokenizer));
if (state.currentProperty < 3) {
state.vertices[state.currentVertex * 3 + state.currentProperty] = Number(Tokenizer.getTokenString(state.tokenizer));
state.vertices[state.currentVertex * 3 + state.currentProperty] = fastParseFloat(state.tokenizer.data, state.tokenizer.tokenStart, state.tokenizer.tokenEnd);
}
if (state.currentProperty >= 3 && state.currentProperty < 6) {
state.colors[state.currentVertex * 3 + state.currentProperty-3] = Number(Tokenizer.getTokenString(state.tokenizer));
state.colors[state.currentVertex * 3 + state.currentProperty - 3] = fastParseInt(state.tokenizer.data, state.tokenizer.tokenStart, state.tokenizer.tokenEnd);
}
if (state.currentProperty >= 6 && state.currentProperty < 9) {
state.normals[state.currentVertex * 3 + state.currentProperty-6] = Number(Tokenizer.getTokenString(state.tokenizer));
state.normals[state.currentVertex * 3 + state.currentProperty - 6] = fastParseFloat(state.tokenizer.data, state.tokenizer.tokenStart, state.tokenizer.tokenEnd);
}
state.currentProperty++;
if (state.currentProperty === state.propertyCount) {
......@@ -210,7 +207,7 @@ function moveNextInternal(state: State) {
return return_value;
}
if (state.currentFace < state.faceCount && state.currentVertex === state.vertexCount) {
state.faces[state.currentFace * 4 + state.currentFaceElement] = Number(Tokenizer.getTokenString(state.tokenizer));
state.faces[state.currentFace * 4 + state.currentFaceElement] = fastParseInt(state.tokenizer.data, state.tokenizer.tokenStart, state.tokenizer.tokenEnd);
state.currentFaceElement++;
if (state.currentProperty === 4) {
state.currentFaceElement = 0;
......@@ -234,8 +231,6 @@ function moveNext(state: State) {
return newRecord
}
function readRecordsChunk(chunkSize: number, state: State) {
if (state.tokenType === PlyTokenType.End) return 0
......@@ -258,13 +253,10 @@ function readRecordsChunks(state: State) {
function addHeadEntry (state: State) {
const head = Tokenizer.getTokenString(state.tokenizer)
console.log(head)
state.initialHead.push(head)
state.tokens.push(TokenBuilder.create(head, state.data.length / 80))
}
function init(state: State) { // only for first two lines to get the format and the coding! (marco)
let newRecord = moveNext(state)
while (!newRecord) { // newRecord is only true when a newline occurs (marco)
......@@ -308,15 +300,6 @@ async function parseInternal(data: string, ctx: RuntimeContext, opts: PlyOptions
ctx.update({ message: 'Parsing...', current: 0, max: data.length });
const PLYdata = await handleRecords(state)
const result = Data.PlyFile(PLYdata)
console.log(result);
// let Data_for_Shape = plyToShape.collectData_for_Shape(table, datas);
// console.log(plyToShape.getShape(state.runtimeCtx, table));
// let shape = plyToShape.init_ren(PLYdata);
// console.log("shape"+shape);
// const script = document.createElement('script');
// script.src = "../../build/src/mol-model/shape/formarts/ply/plyData_to_shape.js";
// document.body.appendChild(script);
return ReaderResult.success(result);
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment