Skip to content
Snippets Groups Projects
Commit 515aaaeb authored by MarcoSchaeferT's avatar MarcoSchaeferT
Browse files

Added file-selection for PLY-files and create initial ply-parser (working on...)

parent b06fbec7
No related branches found
No related tags found
No related merge requests found
Showing
with 554 additions and 8 deletions
File suppressed by a .gitattributes entry, the file's encoding is unsupported, or the file size exceeds the limit.
......@@ -35,6 +35,7 @@ function print(data: Volume) {
console.log(data.volume.cell);
console.log(data.volume.dataStats);
console.log(data.volume.fractionalBox);
console.log("\n\n Hello 12156421231 \n\n");
}
async function doMesh(data: Volume, filename: string) {
......
/**
* Copyright (c) 2017 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* Adapted from https://github.com/rcsb/mmtf-javascript
* @author Alexander Rose <alexander.rose@weirdbyte.de>
* @author David Sehnal <david.sehnal@gmail.com>
*/
// NOT IN USE ELSEWEHERE !!!!!
export function asciiWrite(data: Uint8Array, offset: number, str: string) {
for (let i = 0, l = str.length; i < l; i++) {
let codePoint = str.charCodeAt(i);
// One byte of UTF-8
if (codePoint < 0x80) {
data[offset++] = codePoint >>> 0 & 0x7f | 0x00;
continue;
}
// Two bytes of UTF-8
if (codePoint < 0x800) {
data[offset++] = codePoint >>> 6 & 0x1f | 0xc0;
data[offset++] = codePoint >>> 0 & 0x3f | 0x80;
continue;
}
// Three bytes of UTF-8.
if (codePoint < 0x10000) {
data[offset++] = codePoint >>> 12 & 0x0f | 0xe0;
data[offset++] = codePoint >>> 6 & 0x3f | 0x80;
data[offset++] = codePoint >>> 0 & 0x3f | 0x80;
continue;
}
// Four bytes of UTF-8
if (codePoint < 0x110000) {
data[offset++] = codePoint >>> 18 & 0x07 | 0xf0;
data[offset++] = codePoint >>> 12 & 0x3f | 0x80;
data[offset++] = codePoint >>> 6 & 0x3f | 0x80;
data[offset++] = codePoint >>> 0 & 0x3f | 0x80;
continue;
}
throw new Error('bad codepoint ' + codePoint);
}
}
const __chars = function () {
let data: string[] = [];
for (let i = 0; i < 1024; i++) data[i] = String.fromCharCode(i);
return data;
}();
function throwError(err: string) {
throw new Error(err);
}
export function asciiRead(data: number, offset: number, length: number) {
let chars = __chars;
let str: string | undefined = void 0;
let byte = data;
// One byte character
if ((byte & 0x80) !== 0x00) throwError('Invalid byte ' + byte.toString(16));
str = chars[byte];
return str;
}
export function asciiByteCount(str: string) {
let count = 0;
for (let i = 0, l = str.length; i < l; i++) {
let codePoint = str.charCodeAt(i);
if (codePoint < 0x80) {
count += 1;
continue;
}
if (codePoint < 0x800) {
count += 2;
continue;
}
if (codePoint < 0x10000) {
count += 3;
continue;
}
if (codePoint < 0x110000) {
count += 4;
continue;
}
throwError('bad codepoint ' + codePoint);
}
return count;
}
\ No newline at end of file
......@@ -60,6 +60,7 @@ interface TokenizerState {
* Eat everything until a whitespace/newline occurs.
*/
function eatValue(state: TokenizerState) {
console.log("hello");
while (state.position < state.length) {
switch (state.data.charCodeAt(state.position)) {
case 9: // \t
......
......@@ -85,6 +85,7 @@ export namespace Tokenizer {
/** Sets the current token start to current position and moves to the next line. */
export function markLine(state: Tokenizer) {
state.tokenStart = state.position;
console.log("hello");
eatLine(state);
}
......
/**
* Copyright (c) 2017 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author Alexander Rose <alexander.rose@weirdbyte.de>
*/
import { CifField as CsvColumn } from '../../cif/data-model'
export { CsvColumn }
export interface PlyFile {
readonly name?: string,
readonly PLY_File: ply_form
}
export function CsvFile(PLY_File: ply_form, name?: string): PlyFile {
return { name, PLY_File };
}
export interface ply_form {
readonly rowCount: number,
readonly vertexCount: number,
readonly faceCount: number,
readonly propertyCount: number,
readonly initialHead: ReadonlyArray<string>,
getColumn(name: string): CsvColumn | undefined
}
export function CsvTable(rowCount: number, vertexCount: number, faceCount: number, propertyCount: number, initialHead: string[], columns: CsvColumns): ply_form {
return { rowCount, vertexCount, faceCount, propertyCount, initialHead: [...initialHead], getColumn(name) { return columns[name]; } };
}
export type CsvColumns = { [name: string]: CsvColumn }
// export namespace CsvTable {
// export function empty(name: string): Table {
// return { rowCount: 0, name, fieldNames: [], getColumn(name: string) { return void 0; } };
// };
// }
\ No newline at end of file
/**
* Copyright (c) 2017 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author Alexander Rose <alexander.rose@weirdbyte.de>
*/
import Field from '../../cif/text/field'
export default Field
\ No newline at end of file
/**
* Copyright (c) 2017 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author Alexander Rose <alexander.rose@weirdbyte.de>
*/
// import { Column } from 'mol-data/db'
import { Tokens, TokenBuilder, Tokenizer } from '../../common/text/tokenizer'
import * as Data from './data-model'
import Field from './field'
import Result from '../../result'
import { Task, RuntimeContext, chunkedSubtask, } from 'mol-task'
const enum PlyTokenType {
Value = 0,
Comment = 1,
End = 2,
property = 3
}
interface State {
data: string;
tokenizer: Tokenizer,
tokenType: PlyTokenType;
runtimeCtx: RuntimeContext,
tokens: Tokens[],
fieldCount: number,
recordCount: number,
columnCount: number,
initialHead: string[],
propertyNames: string[],
commentCharCode: number,
propertyCharCode: number
}
function State(data: string, runtimeCtx: RuntimeContext, opts: PlyOptions): State {
const tokenizer = Tokenizer(data)
return {
data,
tokenizer,
tokenType: PlyTokenType.End,
runtimeCtx,
tokens: [],
fieldCount: 0,
recordCount: 0,
columnCount: 0,
initialHead: [],
propertyNames: [],
commentCharCode: opts.comment.charCodeAt(0),
propertyCharCode: opts.property.charCodeAt(0)
};
}
/**
* Eat everything until a delimiter (whitespace) or newline occurs.
* Ignores whitespace at the end of the value, i.e. trim right.
* Returns true when a newline occurs after the value.
*/
function eatValue(state: Tokenizer) {
while (state.position < state.length) {
const c = state.data.charCodeAt(state.position);
++state.position
switch (c) {
case 10: // \n
case 13: // \r
return true;
case 32: // ' ' Delimeter of ply is space (Unicode 32)
return;
case 9: // \t
case 32: // ' '
break;
default:
++state.tokenEnd;
break;
}
}
}
function skipWhitespace(state: Tokenizer) {
let prev = -1;
while (state.position < state.length) {
const c = state.data.charCodeAt(state.position);
switch (c) {
case 9: // '\t'
//case 32: // ' '
prev = c;
++state.position;
break;
case 10: // \n
// handle \r\n
if (prev !== 13) {
++state.lineNumber;
}
prev = c;
++state.position;
break;
case 13: // \r
prev = c;
++state.position;
++state.lineNumber;
break;
default:
return;
}
}
}
function skipLine(state: Tokenizer) {
while (state.position < state.length) {
const c = state.data.charCodeAt(state.position);
if (c === 10 || c === 13) return // \n or \r
++state.position
}
}
/**
* Move to the next token.
* Returns true when the current char is a newline, i.e. indicating a full record.
*/
function moveNextInternal(state: State) {
const tokenizer = state.tokenizer
//skipWhitespace(tokenizer);
if (tokenizer.position >= tokenizer.length) {
state.tokenType = PlyTokenType.End;
return true;
}
tokenizer.tokenStart = tokenizer.position;
tokenizer.tokenEnd = tokenizer.position;
const c = state.data.charCodeAt(tokenizer.position);
switch (c) {
case state.commentCharCode:
state.tokenType = PlyTokenType.Comment;
skipLine(tokenizer);
break;
case state.propertyCharCode:
state.tokenType = PlyTokenType.property;
//return eatProperty(tokenizer);
default:
state.tokenType = PlyTokenType.Value;
return eatValue(tokenizer);
}
}
/**
* Moves to the next non-comment token/line.
* Returns true when the current char is a newline, i.e. indicating a full record.
*/
function moveNext(state: State) {
let newRecord = moveNextInternal(state);
while (state.tokenType === PlyTokenType.Comment) { // skip comment lines (marco
newRecord = moveNextInternal(state);
}
return newRecord
}
function readRecordsChunk(chunkSize: number, state: State) {
if (state.tokenType === PlyTokenType.End) return 0
let newRecord = moveNext(state);
if (newRecord) ++state.recordCount
const { tokens, tokenizer } = state;
let counter = 0;
while (state.tokenType === PlyTokenType.Value && counter < chunkSize) {
TokenBuilder.add(tokens[state.fieldCount % state.columnCount], tokenizer.tokenStart, tokenizer.tokenEnd);
++state.fieldCount
newRecord = moveNext(state);
if (newRecord) ++state.recordCount
++counter;
}
return counter;
}
function readRecordsChunks(state: State) {
return chunkedSubtask(state.runtimeCtx, 100000, state, readRecordsChunk,
(ctx, state) => ctx.update({ message: 'Parsing...', current: state.tokenizer.position, max: state.data.length }));
}
function addColumn (state: State) {
state.initialHead.push(Tokenizer.getTokenString(state.tokenizer))
state.tokens.push(TokenBuilder.create(state.tokenizer, state.data.length / 80))
}
function init(state: State) { // only for first line to get the columns! (marco)
let newRecord = moveNext(state)
while (!newRecord) { // newRecord is only true when a newline occurs (marco)
addColumn(state)
newRecord = moveNext(state);
}
addColumn(state)
newRecord = moveNext(state);
while (!newRecord) {
addColumn(state)
newRecord = moveNext(state);
}
addColumn(state)
if(state.initialHead[0] !== 'ply'){
console.log("ERROR: this is not a .ply file!")
throw new Error("this is not a .ply file!");
return 0;
}
if(state.initialHead[2] !== 'ascii'){
console.log("ERROR: only ASCII-DECODING is supported!");
throw new Error("only ASCII-DECODING is supported!");
return 0;
}
state.columnCount = state.initialHead.length
return 1;
}
async function handleRecords(state: State): Promise<Data.ply_form> {
if(!init(state)){
console.log("ERROR: parsing file (PLY) failed!")
throw new Error("parsing file (PLY) failed!");
}
await readRecordsChunks(state)
const columns: Data.CsvColumns = Object.create(null);
for (let i = 0; i < state.columnCount; ++i) {
columns[state.initialHead[i]] = Field(state.tokens[i], state.recordCount);
}
return Data.CsvTable(state.recordCount,0,0,0, state.initialHead, columns)
}
async function parseInternal(data: string, ctx: RuntimeContext, opts: PlyOptions): Promise<Result<Data.PlyFile>> {
const state = State(data, ctx, opts);
ctx.update({ message: 'Parsing...', current: 0, max: data.length });
const table = await handleRecords(state)
const result = Data.CsvFile(table)
console.log(result);
return Result.success(result);
}
interface PlyOptions {
comment: string;
property: string;
}
export function parse(data: string, opts?: Partial<PlyOptions>) {
const completeOpts = Object.assign({}, { comment: 'c', property: 'p' }, opts)
return Task.create<Result<Data.PlyFile>>('Parse PLY', async ctx => {
return await parseInternal(data, ctx, completeOpts);
});
}
export default parse;
\ No newline at end of file
/**
* Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author David Sehnal <david.sehnal@gmail.com>
*
* Adapted from LiteMol
*/
import { Task, RuntimeContext } from 'mol-task';
export function readFromFile(file: File) {
return <Task<number | string>>readFromFileInternal(file);
}
async function processFile(ctx: RuntimeContext, e: any) {
const data = (e.target as FileReader).result;
return data as string;
}
function readData(ctx: RuntimeContext, action: string, data: XMLHttpRequest | FileReader): Promise<any> {
return new Promise<any>((resolve, reject) => {
data.onerror = (e: any) => {
const error = (<FileReader>e.target).error;
reject(error ? error : 'Failed.');
};
data.onabort = () => reject(Task.Aborted(''));
data.onprogress = (e: ProgressEvent) => {
if (e.lengthComputable) {
ctx.update({ message: action, isIndeterminate: false, current: e.loaded, max: e.total });
} else {
ctx.update({ message: `${action} ${(e.loaded / 1024 / 1024).toFixed(2)} MB`, isIndeterminate: true });
}
}
data.onload = (e: any) => resolve(e);
});
}
function readFromFileInternal(file: File): Task<string | number> {
let reader: FileReader | undefined = void 0;
return Task.create('Read File', async ctx => {
try {
reader = new FileReader();
reader.readAsBinaryString(file);
ctx.update({ message: 'Opening file...', canAbort: true });
const e = await readData(ctx, 'Reading...', reader);
const result = processFile(ctx, e);
return result;
} finally {
reader = void 0;
}
}, () => {
if (reader) reader.abort();
});
}
/**
* Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author David Sehnal <david.sehnal@gmail.com>
*/
import { PluginStateTransform } from '../../../../mol-plugin/state/objects';
import { PluginStateObject as SO } from '../../../../mol-plugin/state/objects';
import { Task } from 'mol-task';
import PLY from 'mol-io/reader/ply/parse_data/ply_parser'
import { ParamDefinition as PD } from 'mol-util/param-definition';
import { Transformer } from 'mol-state';
import { readFromFile } from './data-source';
export { ReadFile_ascii }
type ReadFile_ascii = typeof ReadFile_ascii
const ReadFile_ascii = PluginStateTransform.BuiltIn({
name: 'ReadFile_ascii',
display: { name: 'ReadFile_ascii', description: 'Read string data from the specified file' },
from: SO.Root,
to: [SO.Data.String],
params: {
file: PD.File(),
label: PD.makeOptional(PD.Text('')),
isBinary: PD.makeOptional(PD.Boolean(false, { description: 'If true, open file as as binary (string otherwise)' }))
}
})({
apply({ params: p }) {
return Task.create('Open File', async ctx => {
const data = await readFromFile(p.file).runInContext(ctx);
return new SO.Data.String(data as string, { label: p.label ? p.label : p.file.name });
});
},
update({ oldParams, newParams, b }) {
if (oldParams.label !== newParams.label) {
(b.label as string) = newParams.label || oldParams.file.name;
return Transformer.UpdateResult.Updated;
}
return Transformer.UpdateResult.Unchanged;
},
isSerializable: () => ({ isSerializable: false, reason: 'Cannot serialize user loaded files.' })
});
export { ParsePLY }
type ParsePLY = typeof ParsePLY
const ParsePLY = PluginStateTransform.BuiltIn({
name: 'parse-ply',
display: { name: 'Parse PLY', description: 'Parse OLY from String' },
from: [SO.Data.String],
to: SO.Format.Ply
})({
apply({ a }) {
return Task.create('Parse PLY', async ctx => {
const parsed = await (PLY(a.data).runInContext(ctx));
if (parsed.isError) throw new Error(parsed.message);
return new SO.Format.Ply(parsed.result);
});
}
});
\ No newline at end of file
......@@ -11,7 +11,7 @@ import * as React from 'react';
import * as ReactDOM from 'react-dom';
import { PluginCommands } from './command';
import { PluginSpec } from './spec';
import { DownloadStructure, CreateComplexRepresentation, OpenStructure } from './state/actions/basic';
import {DownloadStructure, CreateComplexRepresentation, OpenStructure, PLYtest} from './state/actions/basic';
import { StateTransforms } from './state/transforms';
import { PluginBehaviors } from './behavior';
......@@ -24,6 +24,7 @@ const DefaultSpec: PluginSpec = {
actions: [
PluginSpec.Action(DownloadStructure),
PluginSpec.Action(OpenStructure),
PluginSpec.Action(PLYtest),
PluginSpec.Action(CreateComplexRepresentation),
PluginSpec.Action(StateTransforms.Data.Download),
PluginSpec.Action(StateTransforms.Data.ParseCif),
......
......@@ -84,13 +84,31 @@ export const OpenStructure = StateAction.build({
const data = b.toRoot().apply(StateTransforms.Data.ReadFile, { file: params.file, isBinary: /\.bcif$/i.test(params.file.name) });
return state.update(createStructureTree(ctx, data, false));
});
import * as data_functions from "../../../mol-io/reader/ply//read_data/data"
export const PLYtest = StateAction.build({
display: { name: 'PLY Test', description: 'nothing ply' },
from: PluginStateObject.Root,
params: { file: PD.File({ accept: '.ply' }) }
})(({ params, state }, ctx: PluginContext) => {
const b = state.build();
const data = b.toRoot().apply(data_functions.ReadFile_ascii, { file: params.file, isBinary: false });
return state.update(getPLYdata(ctx, data));
});
function getPLYdata(ctx: PluginContext, b: StateTreeBuilder.To<PluginStateObject.Data.String>, ): StateTree {
let root = b
.apply(data_functions.ParsePLY);
console.log(data_functions.ParsePLY);
return root.getTree();
}
function createStructureTree(ctx: PluginContext, b: StateTreeBuilder.To<PluginStateObject.Data.Binary | PluginStateObject.Data.String>, supportProps: boolean): StateTree {
let root = b
.apply(StateTransforms.Data.ParseCif)
.apply(StateTransforms.Model.TrajectoryFromMmCif)
.apply(StateTransforms.Model.ModelFromTrajectory, { modelIndex: 0 });
if (supportProps) {
root = root.apply(StateTransforms.Model.CustomModelProperties);
}
......
......@@ -5,6 +5,7 @@
*/
import { CifFile } from 'mol-io/reader/cif';
import { PlyFile } from 'mol-io/reader/ply/parse_data/data-model';
import { Model as _Model, Structure as _Structure } from 'mol-model/structure';
import { VolumeData } from 'mol-model/volume';
import { PluginBehavior } from 'mol-plugin/behavior/behavior';
......@@ -56,6 +57,7 @@ export namespace PluginStateObject {
export namespace Format {
export class Json extends Create<any>({ name: 'JSON Data', typeClass: 'Data' }) { }
export class Cif extends Create<CifFile>({ name: 'CIF File', typeClass: 'Data' }) { }
export class Ply extends Create<PlyFile>({ name: 'PLY File', typeClass: 'Data' }) { }
}
export namespace Molecule {
......
......@@ -34,5 +34,6 @@
document.body.appendChild(script)
}
</script>
<script type="text/javascript" src="./render-shape.js"></script>
</body>
</html>
\ No newline at end of file
......@@ -16,6 +16,8 @@ import { Mesh } from 'mol-geo/geometry/mesh/mesh';
import { labelFirst } from 'mol-theme/label';
import { RuntimeContext, Progress } from 'mol-task';
const parent = document.getElementById('app')!
parent.style.width = '100%'
parent.style.height = '100%'
......@@ -56,7 +58,7 @@ async function getSphereMesh(ctx: RuntimeContext, centers: number[], mesh?: Mesh
const builderState = MeshBuilder.createState(centers.length * 128, centers.length * 128 / 2, mesh)
const t = Mat4.identity()
const v = Vec3.zero()
const sphere = Sphere(2)
const sphere = Sphere(4)
builderState.currentGroup = 0
for (let i = 0, il = centers.length / 3; i < il; ++i) {
// for production, calls to update should be guarded by `if (ctx.shouldUpdate)`
......@@ -69,8 +71,8 @@ async function getSphereMesh(ctx: RuntimeContext, centers: number[], mesh?: Mesh
}
const myData = {
centers: [0, 0, 0, 0, 3, 0],
colors: [ColorNames.tomato, ColorNames.springgreen],
centers: [0, 0, 0, 0, 3, 0, 1, 0 , 4],
colors: [ColorNames.tomato, ColorNames.springgreen,ColorNames.springgreen],
labels: ['Sphere 0, Instance A', 'Sphere 1, Instance A', 'Sphere 0, Instance B', 'Sphere 1, Instance B'],
transforms: [Mat4.identity(), Mat4.fromTranslation(Mat4.zero(), Vec3.create(3, 0, 0))]
}
......@@ -96,7 +98,7 @@ async function getShape(ctx: RuntimeContext, data: MyData, props: {}, shape?: Sh
// Init ShapeRepresentation container
const repr = ShapeRepresentation(getShape, Mesh.Utils)
async function init() {
export async function init() {
// Create shape from myData and add to canvas3d
await repr.createOrUpdate({}, myData).run((p: Progress) => console.log(Progress.format(p)))
console.log(repr)
......@@ -110,4 +112,4 @@ async function init() {
await repr.createOrUpdate({}, myData).run()
}, 1000)
}
init()
\ No newline at end of file
export default init();
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment