Skip to content
Snippets Groups Projects
Commit 84bf6ed7 authored by Alexander Rose's avatar Alexander Rose
Browse files

Merge branch 'master' of https://github.com/molstar/molstar-proto

parents fd0e62da 59c7a8df
No related branches found
No related tags found
No related merge requests found
.*
node_modules
npm-debug.log
*.sublime-project
\ No newline at end of file
......@@ -6,4 +6,5 @@ npm-debug.log
*.sublime-workspace
web/render-test/index.js
\ No newline at end of file
web/render-test/index.js
.idea
# This is to build a container that demos the Molstar Canvas prototype
# Source material: https://nodejs.org/en/docs/guides/nodejs-docker-webapp/
# Source material: https://derickbailey.com/2017/05/31/how-a-650mb-node-js-image-for-docker-uses-less-space-than-a-50mb-image/
# Source material: https://hub.docker.com/_/node/
# Use the slimed NodeJS source, yielding a space savings of 600MB (~66% of total)
FROM node:alpine
# Create app directory
WORKDIR /usr/src/app
# Install app dependencies
# A wildcard is used to ensure both package.json AND package-lock.json AND tslint.json AND tsconfig.json are copied
# where available (npm@5+)
COPY *.json ./
# Install all dependencies and copy results
RUN npm install
COPY . .
# Build application and bundle results
RUN npm run build
COPY build/ build/
# Build Canvas application and bundle results
RUN npm run build-canvas
COPY build/ build/
# Open ports for HTTP
EXPOSE 8080/tcp
# Setup standalone simple webserver to run the demo
RUN npm install http-server -g
# Start NodeJS at container stand up
CMD [ "http-server", "build/canvas/", "-p", "8080" ]
# Developer helpers (what is inside this container?)
RUN node -v
RUN ls -alh
......@@ -78,6 +78,19 @@ From the root of the project:
and navigate to `build/viewer`
**Run via Docker**
Build the docker image
docker build -t molstar-proto .
Run the image
docker run -p 8080:8080 molstar-proto
### Code generation
**CIF schemas**
......
......@@ -30,6 +30,17 @@ namespace SymmetryOperator {
return { name, matrix, inverse: Mat4.invert(Mat4.zero(), matrix), isIdentity: false, hkl: _hkl };
}
export function checkIfRotationAndTranslation(rot: Mat3, offset: Vec3) {
const matrix = Mat4.identity();
for (let i = 0; i < 3; i++) {
for (let j = 0; j < 3; j++) {
Mat4.setValue(matrix, i, j, Mat3.getValue(rot, i, j));
}
}
Mat4.setTranslation(matrix, offset);
return Mat4.isRotationAndTranslation(matrix, RotationEpsilon);
}
export function ofRotationAndOffset(name: string, rot: Mat3, offset: Vec3) {
const t = Mat4.identity();
for (let i = 0; i < 3; i++) {
......
......@@ -66,7 +66,8 @@ function getNcsOperators(format: mmCIF_Format) {
for (let i = 0; i < struct_ncs_oper._rowCount; i++) {
const m = Tensor.toMat3(matrixSpace, matrix.value(i));
const v = Tensor.toVec3(vectorSpace, vector.value(i));
opers[i] = SymmetryOperator.ofRotationAndOffset(`ncs_${id.value(i)}`, m, v);
if (!SymmetryOperator.checkIfRotationAndTranslation(m, v)) continue;
opers[opers.length] = SymmetryOperator.ofRotationAndOffset(`ncs_${id.value(i)}`, m, v);
}
return opers;
}
......
......@@ -4,28 +4,14 @@
* @author David Sehnal <david.sehnal@gmail.com>
*/
import * as argparse from 'argparse'
import { preprocessFile } from './preprocess/preprocess';
import * as cluster from 'cluster'
import { runChild } from './preprocess/parallel';
const cmdParser = new argparse.ArgumentParser({
addHelp: true,
description: 'Preprocess CIF files to include custom properties and convert them to BinaryCIF format.'
});
cmdParser.addArgument(['--input', '-i'], { help: 'Input filename', required: true });
cmdParser.addArgument(['--outCIF', '-oc'], { help: 'Output CIF filename', required: false });
cmdParser.addArgument(['--outBCIF', '-ob'], { help: 'Output BinaryCIF filename', required: false });
// TODO: "bulk" mode
interface CmdArgs {
input: string,
outCIF?: string,
outBCIF?: string
if (cluster.isMaster) {
require('./preprocess/master');
} else {
runChild();
}
const cmdArgs = cmdParser.parseArgs() as CmdArgs;
if (cmdArgs.input) preprocessFile(cmdArgs.input, cmdArgs.outCIF, cmdArgs.outBCIF);
// example:
// node build\node_modules\servers\model\preprocess -i e:\test\Quick\1cbs_updated.cif -oc e:\test\mol-star\model\1cbs.cif -ob e:\test\mol-star\model\1cbs.bcif
\ No newline at end of file
......@@ -7,7 +7,7 @@
import { CifCategory, CifField, CifFrame, getCifFieldType } from 'mol-io/reader/cif';
import { CifWriter } from 'mol-io/writer/cif';
import { Task } from 'mol-task';
import { showProgress } from './util';
// import { showProgress } from './util';
function getCategoryInstanceProvider(cat: CifCategory, fields: CifWriter.Field[]): CifWriter.Category {
return {
......@@ -47,5 +47,5 @@ export function classifyCif(frame: CifFrame) {
ret.push(getCategoryInstanceProvider(cat, fields));
}
return ret;
}).run(showProgress, 250);
}).run();
}
\ No newline at end of file
/**
* Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author David Sehnal <david.sehnal@gmail.com>
*/
import * as fs from 'fs'
import * as path from 'path'
import * as argparse from 'argparse'
import { preprocessFile } from './preprocess';
import { ParallelPreprocessConfig, runMaster } from './parallel';
const cmdParser = new argparse.ArgumentParser({
addHelp: true,
description: 'Preprocess CIF files to include custom properties and convert them to BinaryCIF format.'
});
cmdParser.addArgument(['--input', '-i'], { help: 'Input filename', required: false });
cmdParser.addArgument(['--outCIF', '-oc'], { help: 'Output CIF filename', required: false });
cmdParser.addArgument(['--outBCIF', '-ob'], { help: 'Output BinaryCIF filename', required: false });
cmdParser.addArgument(['--bulk', '-b'], { help: 'Bulk JSON ({ numProcesses?: number, entries: { source: string, cif?: string, bcif?: string }[] })', required: false });
cmdParser.addArgument(['--folderIn', '-f'], { help: 'Convert folder', required: false });
cmdParser.addArgument(['--folderOutCIF', '-foc'], { help: 'Convert folder text output', required: false });
cmdParser.addArgument(['--folderOutBCIF', '-fob'], { help: 'Convert folder binary output', required: false });
cmdParser.addArgument(['--folderNumProcesses', '-fp'], { help: 'Convert folder num processes', required: false });
interface CmdArgs {
bulk?: string,
input?: string,
outCIF?: string,
outBCIF?: string,
folderIn?: string,
folderOutCIF?: string,
folderOutBCIF?: string,
folderNumProcesses?: string
}
const cmdArgs = cmdParser.parseArgs() as CmdArgs;
if (cmdArgs.input) preprocessFile(cmdArgs.input, cmdArgs.outCIF, cmdArgs.outBCIF);
else if (cmdArgs.bulk) runBulk(cmdArgs.bulk);
else if (cmdArgs.folderIn) runFolder(cmdArgs);
function runBulk(input: string) {
const config = JSON.parse(fs.readFileSync(input, 'utf8')) as ParallelPreprocessConfig;
runMaster(config);
}
function runFolder(args: CmdArgs) {
const files = fs.readdirSync(args.folderIn!);
const config: ParallelPreprocessConfig = { numProcesses: +args.folderNumProcesses! || 1, entries: [] };
const cifTest = /\.cif$/;
for (const f of files) {
if (!cifTest.test(f)) continue;
config.entries.push({
source: path.join(args.folderIn!, f),
cif: cmdArgs.folderOutCIF ? path.join(args.folderOutCIF!, f) : void 0,
bcif: cmdArgs.folderOutBCIF ? path.join(args.folderOutBCIF!, path.parse(f).name + '.bcif') : void 0,
});
}
runMaster(config);
}
// example:
// node build\node_modules\servers\model\preprocess -i e:\test\Quick\1cbs_updated.cif -oc e:\test\mol-star\model\1cbs.cif -ob e:\test\mol-star\model\1cbs.bcif
\ No newline at end of file
/**
* Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author David Sehnal <david.sehnal@gmail.com>
*/
import * as path from 'path'
import * as cluster from 'cluster'
import { now } from 'mol-task';
import { PerformanceMonitor } from 'mol-util/performance-monitor';
import { preprocessFile } from './preprocess';
export interface PreprocessEntry {
source: string,
cif?: string,
bcif?: string
}
export interface ParallelPreprocessConfig {
numProcesses?: number,
entries: PreprocessEntry[]
}
export function runMaster(config: ParallelPreprocessConfig) {
const parts = partitionArray(config.entries, config.numProcesses || 1);
// const numForks = Math.min(parts.length, config.numProcesses);
const started = now();
let progress = 0;
const onMessage = (msg: any) => {
if (msg.type === 'tick') {
progress++;
const elapsed = now() - started;
console.log(`[${progress}/${config.entries.length}] in ${PerformanceMonitor.format(elapsed)} (avg ${PerformanceMonitor.format(elapsed / progress)}).`);
} else if (msg.type === 'error') {
console.error(`${msg.id}: ${msg.error}`)
}
}
for (const _ of parts) {
const worker = cluster.fork();
worker.on('message', onMessage);
}
let i = 0;
for (const id in cluster.workers) {
cluster.workers[id]!.send(parts[i++]);
}
}
export function runChild() {
process.on('message', async (entries: PreprocessEntry[]) => {
for (const entry of entries) {
try {
await preprocessFile(entry.source, entry.cif, entry.bcif);
} catch (e) {
process.send!({ type: 'error', id: path.parse(entry.source).name, error: '' + e });
}
process.send!({ type: 'tick' });
}
process.exit();
});
}
function partitionArray<T>(xs: T[], count: number): T[][] {
const ret: T[][] = [];
const s = Math.ceil(xs.length / count);
for (let i = 0; i < xs.length; i += s) {
const bucket: T[] = [];
for (let j = i, _j = Math.min(xs.length, i + s); j < _j; j++) {
bucket.push(xs[j]);
}
ret.push(bucket);
}
return ret;
}
......@@ -6,58 +6,62 @@
import { readStructure } from '../server/structure-wrapper';
import { classifyCif } from './converter';
import { ConsoleLogger } from 'mol-util/console-logger';
// import { ConsoleLogger } from 'mol-util/console-logger';
import { Structure } from 'mol-model/structure';
import { CifWriter } from 'mol-io/writer/cif';
import Writer from 'mol-io/writer/writer';
import { wrapFileToWriter } from '../server/api-local';
import { Task } from 'mol-task';
import { showProgress, clearLine } from './util';
import { Task/*, now*/ } from 'mol-task';
import { /*showProgress, clearLine */ } from './util';
import { encode_mmCIF_categories, CifExportContext } from 'mol-model/structure/export/mmcif';
// TODO: error handling, bulk mode
// TODO: error handling
// let linearId = 0;
export async function preprocessFile(filename: string, outputCif?: string, outputBcif?: string) {
ConsoleLogger.log('ModelServer', `Reading ${filename}...`);
// linearId++;
//const started = now();
//ConsoleLogger.log(`${linearId}`, `Reading '${filename}'...`);
const input = await readStructure('entry', '_local_', filename);
ConsoleLogger.log('ModelServer', `Classifying CIF categories...`);
//ConsoleLogger.log(`${linearId}`, `Classifying CIF categories...`);
const categories = await classifyCif(input.cifFrame);
clearLine();
//clearLine();
const exportCtx = CifExportContext.create(input.structure, input.structure.models[0]);
if (outputCif) {
ConsoleLogger.log('ModelServer', `Encoding CIF...`);
//ConsoleLogger.log(`${linearId}`, `Encoding CIF...`);
const writer = wrapFileToWriter(outputCif);
const encoder = CifWriter.createEncoder({ binary: false });
await encode(input.structure, input.cifFrame.header, categories, encoder, exportCtx, writer);
clearLine();
// clearLine();
writer.end();
}
if (outputBcif) {
ConsoleLogger.log('ModelServer', `Encoding BinaryCIF...`);
// ConsoleLogger.log(`${linearId}`, `Encoding BinaryCIF...`);
const writer = wrapFileToWriter(outputBcif);
const encoder = CifWriter.createEncoder({ binary: true, binaryAutoClassifyEncoding: true });
await encode(input.structure, input.cifFrame.header, categories, encoder, exportCtx, writer);
clearLine();
//clearLine();
writer.end();
}
ConsoleLogger.log('ModelServer', `Done.`);
// ConsoleLogger.log(`${linearId}`, `Finished '${filename}' in ${Math.round(now() - started)}ms`);
}
function encode(structure: Structure, header: string, categories: CifWriter.Category[], encoder: CifWriter.Encoder, exportCtx: CifExportContext, writer: Writer) {
return Task.create('Encode', async ctx => {
const skipCategoryNames = new Set<string>(categories.map(c => c.name));
encoder.startDataBlock(header);
let current = 0;
// let current = 0;
for (const cat of categories){
encoder.writeCategory(cat);
current++;
if (ctx.shouldUpdate) await ctx.update({ message: 'Encoding...', current, max: categories.length });
// current++;
// if (ctx.shouldUpdate) await ctx.update({ message: 'Encoding...', current, max: categories.length });
}
encode_mmCIF_categories(encoder, structure, { skipCategoryNames, exportCtx });
encoder.encode();
encoder.writeTo(writer);
}).run(showProgress, 250);
}).run();
}
\ No newline at end of file
......@@ -6,14 +6,14 @@
*/
import { Model } from 'mol-model/structure';
import { PDBe_structureQualityReport } from './properties/pdbe';
import { RCSB_assemblySymmetry } from './properties/rcsb';
//import { PDBe_structureQualityReport } from './properties/pdbe';
//import { RCSB_assemblySymmetry } from './properties/rcsb';
export function attachModelProperties(model: Model): Promise<any>[] {
// return a list of promises that start attaching the props in parallel
// (if there are downloads etc.)
return [
PDBe_structureQualityReport(model),
RCSB_assemblySymmetry(model)
//PDBe_structureQualityReport(model),
//RCSB_assemblySymmetry(model)
];
}
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment