diff --git a/src/servers/volume/common/binary-schema.ts b/src/servers/volume/common/binary-schema.ts index 8ce1b47007f50177248f128924d75f617a8daf22..3bd0b10cc276d19f6fee996b42e398d0bb473913 100644 --- a/src/servers/volume/common/binary-schema.ts +++ b/src/servers/volume/common/binary-schema.ts @@ -7,6 +7,7 @@ */ import * as UTF8 from 'mol-io/common/utf8' +import { SimpleBuffer } from 'mol-io/common/simple-buffer'; export type Bool = { kind: 'bool' } export type Int = { kind: 'int' } @@ -105,7 +106,7 @@ export function encode(element: Element, src: any): Buffer { return write(element, src); } -function decodeElement(e: Element, buffer: Buffer, offset: number, target: { value: any }) { +function decodeElement(e: Element, buffer: SimpleBuffer, offset: number, target: { value: any }) { switch (e.kind) { case 'bool': target.value = !!buffer.readInt8(offset); offset += 1; break; case 'int': target.value = buffer.readInt32LE(offset); offset += 4; break; @@ -147,7 +148,7 @@ function decodeElement(e: Element, buffer: Buffer, offset: number, target: { val return offset; } -export function decode<T>(element: Element, buffer: Buffer, offset?: number) { +export function decode<T>(element: Element, buffer: SimpleBuffer, offset?: number) { const target = { value: void 0 as any }; decodeElement(element, buffer, offset! | 0, target); return target.value as T; diff --git a/src/servers/volume/common/data-format.ts b/src/servers/volume/common/data-format.ts index f1fdeda83277dd145831667fdad3dd4f476bd7af..0492ab16ce2bda7263a578e26d5db4a1bdd1cef3 100644 --- a/src/servers/volume/common/data-format.ts +++ b/src/servers/volume/common/data-format.ts @@ -6,8 +6,8 @@ * @author David Sehnal <david.sehnal@gmail.com> */ -import * as File from './file' import * as Schema from './binary-schema' +import { FileHandle } from 'mol-io/common/file-handle'; export type ValueType = 'float32' | 'int8' | 'int16' @@ -121,12 +121,12 @@ export function encodeHeader(header: Header) { return Schema.encode(headerSchema, header); } -export async function readHeader(file: number): Promise<{ header: Header, dataOffset: number }> { - let { buffer } = await File.readBuffer(file, 0, 4 * 4096); +export async function readHeader(file: FileHandle): Promise<{ header: Header, dataOffset: number }> { + let { buffer } = await file.readBuffer(0, 4 * 4096); const headerSize = buffer.readInt32LE(0); if (headerSize > buffer.byteLength - 4) { - buffer = (await File.readBuffer(file, 0, headerSize + 4)).buffer; + buffer = (await file.readBuffer(0, headerSize + 4)).buffer; } const header = Schema.decode<Header>(headerSchema, buffer, 4); diff --git a/src/servers/volume/common/file.ts b/src/servers/volume/common/file.ts index 6b83f00cef7e225de8dde078e257698fc17eb162..8067b9eaf87419a1f0919a99c243c0c6e8f546dd 100644 --- a/src/servers/volume/common/file.ts +++ b/src/servers/volume/common/file.ts @@ -9,6 +9,8 @@ import * as fs from 'fs' import * as path from 'path' import * as DataFormat from './data-format' +import { FileHandle } from 'mol-io/common/file-handle'; +import { SimpleBuffer } from 'mol-io/common/simple-buffer'; export const IsNativeEndianLittle = new Uint16Array(new Uint8Array([0x12, 0x34]).buffer)[0] === 0x3412; @@ -29,43 +31,6 @@ export async function openRead(filename: string) { }); } -export function readBuffer(file: number, position: number, sizeOrBuffer: Buffer | number, size?: number, byteOffset?: number): Promise<{ bytesRead: number, buffer: Buffer }> { - return new Promise((res, rej) => { - if (typeof sizeOrBuffer === 'number') { - let buff = new Buffer(new ArrayBuffer(sizeOrBuffer)); - fs.read(file, buff, 0, sizeOrBuffer, position, (err, bytesRead, buffer) => { - if (err) { - rej(err); - return; - } - res({ bytesRead, buffer }); - }); - } else { - if (size === void 0) { - rej('readBuffer: Specify size.'); - return; - } - - fs.read(file, sizeOrBuffer, byteOffset ? +byteOffset : 0, size, position, (err, bytesRead, buffer) => { - if (err) { - rej(err); - return; - } - res({ bytesRead, buffer }); - }); - } - }) -} - -export function writeBuffer(file: number, position: number, buffer: Buffer, size?: number): Promise<number> { - return new Promise<number>((res, rej) => { - fs.write(file, buffer, 0, size !== void 0 ? size : buffer.length, position, (err, written) => { - if (err) rej(err); - else res(written); - }) - }) -} - function makeDir(path: string, root?: string): boolean { let dirs = path.split(/\/|\\/g), dir = dirs.shift(); @@ -95,19 +60,10 @@ export function createFile(filename: string) { }); } -const __emptyFunc = function () { }; -export function close(file: number | undefined) { - try { - if (file !== void 0) fs.close(file, __emptyFunc); - } catch (e) { - - } -} - -const smallBuffer = new Buffer(8); -export async function writeInt(file: number, value: number, position: number) { +const smallBuffer = SimpleBuffer.fromBuffer(new Buffer(8)); +export async function writeInt(file: FileHandle, value: number, position: number) { smallBuffer.writeInt32LE(value, 0); - await writeBuffer(file, position, smallBuffer, 4); + await file.writeBuffer(position, smallBuffer, 4); } export interface TypedArrayBufferContext { @@ -144,7 +100,7 @@ export function createTypedArrayBufferContext(size: number, type: DataFormat.Val }; } -function flipByteOrder(source: Buffer, target: Uint8Array, byteCount: number, elementByteSize: number, offset: number) { +function flipByteOrder(source: SimpleBuffer, target: Uint8Array, byteCount: number, elementByteSize: number, offset: number) { for (let i = 0, n = byteCount; i < n; i += elementByteSize) { for (let j = 0; j < elementByteSize; j++) { target[offset + i + elementByteSize - j - 1] = source[offset + i + j]; @@ -152,19 +108,19 @@ function flipByteOrder(source: Buffer, target: Uint8Array, byteCount: number, el } } -export async function readTypedArray(ctx: TypedArrayBufferContext, file: number, position: number, count: number, valueOffset: number, littleEndian?: boolean) { +export async function readTypedArray(ctx: TypedArrayBufferContext, file: FileHandle, position: number, count: number, valueOffset: number, littleEndian?: boolean) { let byteCount = ctx.elementByteSize * count; let byteOffset = ctx.elementByteSize * valueOffset; - await readBuffer(file, position, ctx.readBuffer, byteCount, byteOffset); + await file.readBuffer(position, ctx.readBuffer, byteCount, byteOffset); if (ctx.elementByteSize > 1 && ((littleEndian !== void 0 && littleEndian !== IsNativeEndianLittle) || !IsNativeEndianLittle)) { - // fix the endian + // fix the endian flipByteOrder(ctx.readBuffer, ctx.valuesBuffer, byteCount, ctx.elementByteSize, byteOffset); } return ctx.values; } -export function ensureLittleEndian(source: Buffer, target: Buffer, byteCount: number, elementByteSize: number, offset: number) { +export function ensureLittleEndian(source: SimpleBuffer, target: SimpleBuffer, byteCount: number, elementByteSize: number, offset: number) { if (IsNativeEndianLittle) return; if (!byteCount || elementByteSize <= 1) return; flipByteOrder(source, target, byteCount, elementByteSize, offset); diff --git a/src/servers/volume/pack/ccp4.ts b/src/servers/volume/pack/ccp4.ts index 46ac835bc1d1f6dd33d749d2a0458e8071e41657..f41d2d8a23ca5d7381119ab8db155e3115d90fb3 100644 --- a/src/servers/volume/pack/ccp4.ts +++ b/src/servers/volume/pack/ccp4.ts @@ -1,13 +1,16 @@ /** - * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info. + * Copyright (c) 2018-2019 mol* contributors, licensed under MIT, See LICENSE file for more info. * * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer) * * @author David Sehnal <david.sehnal@gmail.com> + * @author Alexander Rose <alexander.rose@weirdbyte.de> */ import * as File from '../common/file' import * as DataFormat from '../common/data-format' +import { FileHandle } from 'mol-io/common/file-handle'; +import { readCcp4Header } from 'mol-io/reader/ccp4/parser'; export const enum Mode { Int8 = 0, Int16 = 1, Float32 = 2 } @@ -40,7 +43,7 @@ export interface SliceBuffer { export interface Data { header: Header, - file: number, + file: FileHandle, slices: SliceBuffer } @@ -84,47 +87,24 @@ export function compareHeaders(a: Header, b: Header) { return true; } -function getArray(r: (offset: number) => number, offset: number, count: number) { - const ret: number[] = []; - for (let i = 0; i < count; i++) { - ret[i] = r(offset + i); - } - return ret; -} - -async function readHeader(name: string, file: number) { - const headerSize = 1024; - const { buffer: data } = await File.readBuffer(file, 0, headerSize); - - let littleEndian = true; - - let mode = data.readInt32LE(3 * 4); - if (mode < 0 || mode > 2) { - littleEndian = false; - mode = data.readInt32BE(3 * 4, true); - if (mode < 0 || mode > 2) { - throw Error('Only CCP4 modes 0, 1, and 2 are supported.'); - } - } - - const readInt = littleEndian ? (o: number) => data.readInt32LE(o * 4) : (o: number) => data.readInt32BE(o * 4); - const readFloat = littleEndian ? (o: number) => data.readFloatLE(o * 4) : (o: number) => data.readFloatBE(o * 4); +async function readHeader(name: string, file: FileHandle) { + const { header: ccp4Header, littleEndian } = await readCcp4Header(file) - const origin2k = getArray(readFloat, 49, 3); - const nxyzStart = getArray(readInt, 4, 3); + const origin2k = [ccp4Header.originX, ccp4Header.originY, ccp4Header.originZ]; + const nxyzStart = [ccp4Header.NCSTART, ccp4Header.NRSTART, ccp4Header.NSSTART]; const header: Header = { name, - mode, - grid: getArray(readInt, 7, 3), - axisOrder: getArray(readInt, 16, 3).map(i => i - 1), - extent: getArray(readInt, 0, 3), + mode: ccp4Header.MODE, + grid: [ccp4Header.NX, ccp4Header.NY, ccp4Header.NZ], + axisOrder: [ccp4Header.MAPC, ccp4Header.MAPR, ccp4Header.MAPS].map(i => i - 1), + extent: [ccp4Header.NC, ccp4Header.NR, ccp4Header.NS], origin: origin2k[0] === 0.0 && origin2k[1] === 0.0 && origin2k[2] === 0.0 ? nxyzStart : origin2k, - spacegroupNumber: readInt(22), - cellSize: getArray(readFloat, 10, 3), - cellAngles: getArray(readFloat, 13, 3), + spacegroupNumber: ccp4Header.ISPG, + cellSize: [ccp4Header.xLength, ccp4Header.yLength, ccp4Header.zLength], + cellAngles: [ccp4Header.alpha, ccp4Header.beta, ccp4Header.gamma], // mean: readFloat(21), littleEndian, - dataOffset: headerSize + readInt(23) /* symBytes */ + dataOffset: 256 * 4 + ccp4Header.NSYMBT /* symBytes */ }; // "normalize" the grid axis order header.grid = [header.grid[header.axisOrder[0]], header.grid[header.axisOrder[1]], header.grid[header.axisOrder[2]]]; @@ -153,7 +133,8 @@ export async function readSlices(data: Data) { } export async function open(name: string, filename: string): Promise<Data> { - const file = await File.openRead(filename); + const descriptor = await File.openRead(filename); + const file = FileHandle.fromDescriptor(descriptor) const header = await readHeader(name, file); return { header, diff --git a/src/servers/volume/pack/data-model.ts b/src/servers/volume/pack/data-model.ts index 268fdeb5cd9dbb43f7d194f2f4a0d08e069085b8..ef6696b41c460a3026aa82a725a941b4edf82380 100644 --- a/src/servers/volume/pack/data-model.ts +++ b/src/servers/volume/pack/data-model.ts @@ -7,6 +7,8 @@ */ import * as CCP4 from './ccp4' import * as DataFormat from '../common/data-format' +import { FileHandle } from 'mol-io/common/file-handle'; +import { SimpleBuffer } from 'mol-io/common/simple-buffer'; const FORMAT_VERSION = '1.0.0'; @@ -24,7 +26,7 @@ export interface ValuesInfo { export interface BlockBuffer { values: DataFormat.ValueArray[], - buffers: Buffer[], + buffers: SimpleBuffer[], slicesWritten: number } @@ -68,7 +70,7 @@ export interface Kernel { export interface Context { /** Output file handle */ - file: number, + file: FileHandle, /** Periodic are x-ray density files that cover the entire grid and have [0,0,0] origin */ isPeriodic: boolean, @@ -77,9 +79,9 @@ export interface Context { valueType: DataFormat.ValueType, blockSize: number, /** Able to store channels.length * blockSize^3 values. */ - cubeBuffer: Buffer, + cubeBuffer: SimpleBuffer, /** All values are stored in little endian format which might not be the native endian of the system */ - litteEndianCubeBuffer: Buffer, + litteEndianCubeBuffer: SimpleBuffer, kernel: Kernel, sampling: Sampling[], diff --git a/src/servers/volume/pack/main.ts b/src/servers/volume/pack/main.ts index 116be4bb1f030f8546d0f5a8b48aa3566fb0d5a4..a3ccc8b7a8ea2244be386949303e1a6aea1a040c 100644 --- a/src/servers/volume/pack/main.ts +++ b/src/servers/volume/pack/main.ts @@ -11,7 +11,7 @@ import * as File from '../common/file' import * as Data from './data-model' import * as Sampling from './sampling' import * as DataFormat from '../common/data-format' -import * as fs from 'fs' +import { FileHandle } from 'mol-io/common/file-handle'; export default async function pack(input: { name: string, filename: string }[], blockSize: number, isPeriodic: boolean, outputFilename: string) { try { @@ -36,7 +36,7 @@ function updateAllocationProgress(progress: Data.Progress, progressDone: number) } /** - * Pre allocate the disk space to be able to do "random" writes into the entire file. + * Pre allocate the disk space to be able to do "random" writes into the entire file. */ async function allocateFile(ctx: Data.Context) { const { totalByteSize, file } = ctx; @@ -44,7 +44,7 @@ async function allocateFile(ctx: Data.Context) { const progress: Data.Progress = { current: 0, max: Math.ceil(totalByteSize / buffer.byteLength) }; let written = 0; while (written < totalByteSize) { - written += fs.writeSync(file, buffer, 0, Math.min(totalByteSize - written, buffer.byteLength)); + written += file.writeBufferSync(written, buffer, Math.min(totalByteSize - written, buffer.byteLength)); updateAllocationProgress(progress, 1); } } @@ -66,7 +66,7 @@ function determineBlockSize(data: CCP4.Data, blockSize: number) { async function writeHeader(ctx: Data.Context) { const header = DataFormat.encodeHeader(Data.createHeader(ctx)); await File.writeInt(ctx.file, header.byteLength, 0); - await File.writeBuffer(ctx.file, 4, header); + await ctx.file.writeBuffer(4, header); } async function create(filename: string, sourceDensities: { name: string, filename: string }[], sourceBlockSize: number, isPeriodic: boolean) { @@ -81,7 +81,7 @@ async function create(filename: string, sourceDensities: { name: string, filenam } process.stdout.write('Initializing... '); - const files: number[] = []; + const files: FileHandle[] = []; try { // Step 1a: Read the CCP4 headers const channels: CCP4.Data[] = []; @@ -102,18 +102,18 @@ async function create(filename: string, sourceDensities: { name: string, filenam console.log(`Block size: ${blockSize}`); - // Step 2: Allocate disk space. + // Step 2: Allocate disk space. process.stdout.write('Allocating... 0%'); await allocateFile(context); process.stdout.write('\rAllocating... done.\n'); - // Step 3: Process and write the data + // Step 3: Process and write the data process.stdout.write('Writing data... 0%'); await Sampling.processData(context); process.stdout.write('\rWriting data... done.\n'); // Step 4: Write the header at the start of the file. - // The header is written last because the sigma/min/max values are computed + // The header is written last because the sigma/min/max values are computed // during step 3. process.stdout.write('Writing header... '); await writeHeader(context); @@ -123,11 +123,11 @@ async function create(filename: string, sourceDensities: { name: string, filenam const time = getTime() - startedTime; console.log(`[Done] ${time.toFixed(0)}ms.`); } finally { - for (let f of files) File.close(f); + for (let f of files) f.close(); // const ff = await File.openRead(filename); // const hh = await DataFormat.readHeader(ff); // File.close(ff); // console.log(hh.header); } -} \ No newline at end of file +} \ No newline at end of file diff --git a/src/servers/volume/pack/sampling.ts b/src/servers/volume/pack/sampling.ts index e193fb218bba78b243e4ebbcf4d837ec705bd552..9c5cc44c7546201ca4d55b56c71f49253af5c72e 100644 --- a/src/servers/volume/pack/sampling.ts +++ b/src/servers/volume/pack/sampling.ts @@ -12,6 +12,7 @@ import * as File from '../common/file' import * as Downsampling from './downsampling' import * as Writer from './writer' import * as DataFormat from '../common/data-format' +import { FileHandle } from 'mol-io/common/file-handle'; export async function createContext(filename: string, channels: CCP4.Data[], blockSize: number, isPeriodic: boolean): Promise<Data.Context> { const header = channels[0].header; @@ -29,7 +30,7 @@ export async function createContext(filename: string, channels: CCP4.Data[], blo } const ctx: Data.Context = { - file: await File.createFile(filename), + file: FileHandle.fromDescriptor(await File.createFile(filename)), isPeriodic, channels, valueType, diff --git a/src/servers/volume/pack/writer.ts b/src/servers/volume/pack/writer.ts index 6cf357df36f23a3a36e0724d8709c97a511e4c6a..e63750780a0f9241ce105ad9980a7a0887d98fb7 100644 --- a/src/servers/volume/pack/writer.ts +++ b/src/servers/volume/pack/writer.ts @@ -19,7 +19,7 @@ export async function writeBlockLayer(ctx: Data.Context, sampling: Data.Sampling for (let v = 0; v < nV; v++) { for (let u = 0; u < nU; u++) { const size = fillCubeBuffer(ctx, sampling, u, v); - await File.writeBuffer(ctx.file, startOffset + sampling.writeByteOffset, ctx.litteEndianCubeBuffer, size); + await ctx.file.writeBuffer(startOffset + sampling.writeByteOffset, ctx.litteEndianCubeBuffer, size); sampling.writeByteOffset += size; updateProgress(ctx.progress, 1); } @@ -46,7 +46,8 @@ function fillCubeBuffer(ctx: Data.Context, sampling: Data.Sampling, u: number, v for (let k = offsetK; k < maxK; k++) { // copying the bytes direct is faster than using buffer.write* functions. const start = (l * sizeHK + k * sizeH + offsetH) * elementSize; - src.copy(cubeBuffer, writeOffset, start, start + copyH); + // TODO + cubeBuffer.set(src.subarray(start, start + copyH), writeOffset) writeOffset += copyH; } } diff --git a/src/servers/volume/server/api.ts b/src/servers/volume/server/api.ts index c1046a4875498e357e62f37450109b264161a797..42f028cf0f6890cc38cc70d5626e33c7955eab38 100644 --- a/src/servers/volume/server/api.ts +++ b/src/servers/volume/server/api.ts @@ -12,6 +12,7 @@ import * as Data from './query/data-model' import { ConsoleLogger } from 'mol-util/console-logger' import * as DataFormat from '../common/data-format' import ServerConfig from '../server-config' +import { FileHandle } from 'mol-io/common/file-handle'; export function getOutputFilename(source: string, id: string, { asBinary, box, detail, forcedSamplingLevel }: Data.QueryParams) { function n(s: string) { return (s || '').replace(/[ \n\t]/g, '').toLowerCase() } @@ -57,16 +58,16 @@ export async function queryBox(params: Data.QueryParams, outputProvider: () => D } async function readHeader(filename: string | undefined, sourceId: string) { - let file: number | undefined = void 0; + let file: FileHandle | undefined; try { if (!filename) return void 0; - file = await File.openRead(filename); + file = FileHandle.fromDescriptor(await File.openRead(filename)); const header = await DataFormat.readHeader(file); return header.header; } catch (e) { ConsoleLogger.error(`Info ${sourceId}`, e); return void 0; } finally { - File.close(file); + if (file) file.close(); } } \ No newline at end of file diff --git a/src/servers/volume/server/query/data-model.ts b/src/servers/volume/server/query/data-model.ts index d20c192ce4b1c319fac66eedc57858ab319c9e7d..9df53a9414cd4e7da79d00faffbcc5f63bf93d6a 100644 --- a/src/servers/volume/server/query/data-model.ts +++ b/src/servers/volume/server/query/data-model.ts @@ -11,6 +11,7 @@ import * as Coords from '../algebra/coordinate' import * as Box from '../algebra/box' import Writer from 'mol-io/writer/writer' import { SpacegroupCell } from 'mol-math/geometry'; +import { FileHandle } from 'mol-io/common/file-handle'; ////////////////////////////////////// // DATA @@ -25,7 +26,7 @@ export interface Sampling { } export interface DataContext { - file: number, + file: FileHandle, header: DataFormat.Header, spacegroup: SpacegroupCell, dataBox: Box.Fractional, diff --git a/src/servers/volume/server/query/execute.ts b/src/servers/volume/server/query/execute.ts index c5d4272dfdbb87975ec80759d5abab86e4cdf6cf..cf62baa880b7e561b04d18d8da6a5f059020fabb 100644 --- a/src/servers/volume/server/query/execute.ts +++ b/src/servers/volume/server/query/execute.ts @@ -21,6 +21,7 @@ import encode from './encode' import { SpacegroupCell } from 'mol-math/geometry'; import { Vec3 } from 'mol-math/linear-algebra'; import { UUID } from 'mol-util'; +import { FileHandle } from 'mol-io/common/file-handle'; export default async function execute(params: Data.QueryParams, outputProvider: () => Data.QueryOutputStream) { const start = getTime(); @@ -30,16 +31,16 @@ export default async function execute(params: Data.QueryParams, outputProvider: params.detail = Math.min(Math.max(0, params.detail | 0), ServerConfig.limits.maxOutputSizeInVoxelCountByPrecisionLevel.length - 1); ConsoleLogger.logId(guid, 'Info', `id=${params.sourceId},encoding=${params.asBinary ? 'binary' : 'text'},detail=${params.detail},${queryBoxToString(params.box)}`); - let sourceFile: number | undefined = void 0; + let sourceFile: FileHandle | undefined; try { - sourceFile = await File.openRead(params.sourceFilename); + sourceFile = FileHandle.fromDescriptor(await File.openRead(params.sourceFilename)); await _execute(sourceFile, params, guid, outputProvider); return true; } catch (e) { ConsoleLogger.errorId(guid, e); return false; } finally { - File.close(sourceFile); + if (sourceFile) sourceFile.close(); ConsoleLogger.logId(guid, 'Time', `${Math.round(getTime() - start)}ms`); State.pendingQueries--; } @@ -80,7 +81,7 @@ function createSampling(header: DataFormat.Header, index: number, dataOffset: nu } } -async function createDataContext(file: number): Promise<Data.DataContext> { +async function createDataContext(file: FileHandle): Promise<Data.DataContext> { const { header, dataOffset } = await DataFormat.readHeader(file); const origin = Coords.fractional(header.origin[0], header.origin[1], header.origin[2]); @@ -185,7 +186,7 @@ function createQueryContext(data: Data.DataContext, params: Data.QueryParams, gu } -async function _execute(file: number, params: Data.QueryParams, guid: string, outputProvider: () => Data.QueryOutputStream) { +async function _execute(file: FileHandle, params: Data.QueryParams, guid: string, outputProvider: () => Data.QueryOutputStream) { let output: any = void 0; try { // Step 1a: Create data context