Skip to content
Snippets Groups Projects
Commit fa912a1d authored by David Sehnal's avatar David Sehnal
Browse files

Work on mmCIF exporter

parent 9633bf70
No related branches found
No related tags found
No related merge requests found
Showing
with 297 additions and 293 deletions
......@@ -62,6 +62,15 @@ entity.pdbx_mutation
entity.pdbx_fragment
entity.pdbx_ec
entity_poly.entity_id
entity_poly.type
entity_poly.nstd_linkage
entity_poly.nstd_monomer
entity_poly.pdbx_seq_one_letter_code
entity_poly.pdbx_seq_one_letter_code_can
entity_poly.pdbx_strand_id
entity_poly.pdbx_target_identifier
entity_poly_seq.entity_id
entity_poly_seq.num
entity_poly_seq.mon_id
......
......@@ -24,15 +24,11 @@ async function getCIF(ctx: RuntimeContext, path: string) {
return parsed.result;
}
function getCategoryInstanceProvider(cat: CifCategory, fields: CifWriter.Field[]): CifWriter.Category.Provider {
return function (ctx: any) {
return {
data: cat,
name: cat.name,
fields,
rowCount: cat.rowCount
};
}
function getCategoryInstanceProvider(cat: CifCategory, fields: CifWriter.Field[]): CifWriter.Category {
return {
name: cat.name,
instance: () => ({ data: cat, fields, rowCount: cat.rowCount })
};
}
export default function convert(path: string, asText = false) {
......
......@@ -7,7 +7,7 @@
import { Table } from 'mol-data/db'
import { CifWriter } from 'mol-io/writer/cif'
import * as S from './schemas'
import { getCategoryInstanceProvider } from './utils'
//import { getCategoryInstanceProvider } from './utils'
export default function create(allData: any) {
const mols = Object.keys(allData);
......@@ -21,7 +21,7 @@ export default function create(allData: any) {
const sources = getSources(data);
if (!sources._rowCount) return enc.getData();
enc.writeCategory(getCategoryInstanceProvider(`pdbx_domain_annotation_sources`, sources));
enc.writeCategory({ name: `pdbx_domain_annotation_sources`, instance: () => CifWriter.Category.ofTable(sources) });
for (const cat of Object.keys(S.categories)) {
writeDomain(enc, getDomain(cat, (S.categories as any)[cat], data));
......@@ -38,8 +38,8 @@ type MappingRow = Table.Row<S.mapping>;
function writeDomain(enc: CifWriter.Encoder, domain: DomainAnnotation | undefined) {
if (!domain) return;
enc.writeCategory(getCategoryInstanceProvider(`pdbx_${domain.name}_domain_annotation`, domain.domains));
enc.writeCategory(getCategoryInstanceProvider(`pdbx_${domain.name}_domain_mapping`, domain.mappings));
enc.writeCategory({ name: `pdbx_${domain.name}_domain_annotation`, instance: () => CifWriter.Category.ofTable(domain.domains) });
enc.writeCategory({ name: `pdbx_${domain.name}_domain_mapping`, instance: () => CifWriter.Category.ofTable(domain.mappings) });
}
function getSources(data: any): Table<S.Sources> {
......
/**
* Copyright (c) 2017 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author David Sehnal <david.sehnal@gmail.com>
*/
import { Table } from 'mol-data/db'
import { CifWriter } from 'mol-io/writer/cif'
function columnValue(k: string) {
return (i: number, d: any) => d[k].value(i);
}
function columnValueKind(k: string) {
return (i: number, d: any) => d[k].valueKind(i);
}
function ofSchema(schema: Table.Schema) {
const fields: CifWriter.Field[] = [];
for (const k of Object.keys(schema)) {
const t = schema[k];
const type: any = t.valueType === 'str' ? CifWriter.Field.Type.Str : t.valueType === 'int' ? CifWriter.Field.Type.Int : CifWriter.Field.Type.Float;
fields.push({ name: k, type, value: columnValue(k), valueKind: columnValueKind(k) })
}
return fields;
}
export function getCategoryInstanceProvider(name: string, table: Table<any>): CifWriter.Category.Provider {
return () => {
return {
data: table,
name,
fields: ofSchema(table._schema),
rowCount: table._rowCount
};
}
}
......@@ -80,16 +80,26 @@ export const mmCIF_Schema = {
pdbx_aromatic_flag: Aliased<'Y' | 'N'>(str),
},
entity: {
details: str,
formula_weight: float,
id: str,
src_method: Aliased<'nat' | 'man' | 'syn'>(str),
type: Aliased<'polymer' | 'non-polymer' | 'macrolide' | 'water'>(str),
src_method: Aliased<'nat' | 'man' | 'syn'>(str),
formula_weight: float,
pdbx_description: str,
pdbx_number_of_molecules: float,
pdbx_mutation: str,
pdbx_fragment: str,
pdbx_ec: List(',', x => x),
details: str,
},
entity_poly: {
entity_id: str,
type: str,
nstd_linkage: Aliased<'no' | 'n' | 'yes' | 'y'>(str),
nstd_monomer: Aliased<'no' | 'n' | 'yes' | 'y'>(str),
pdbx_seq_one_letter_code: str,
pdbx_seq_one_letter_code_can: str,
pdbx_strand_id: str,
pdbx_target_identifier: str,
},
entity_poly_seq: {
entity_id: str,
......@@ -109,11 +119,11 @@ export const mmCIF_Schema = {
title: str,
},
struct_asym: {
details: str,
entity_id: str,
id: str,
entity_id: str,
pdbx_modified: str,
pdbx_blank_PDB_chainid_flag: Aliased<'Y' | 'N'>(str),
details: str,
},
struct_conf: {
beg_label_asym_id: str,
......
......@@ -69,19 +69,19 @@ export namespace Field {
}
}
export interface Category<Key = any, Data = any> {
export interface Category<Ctx = any> {
name: string,
fields: Field<Key, Data>[],
data?: Data,
rowCount: number,
keys?: () => Iterator<Key>
instance(ctx: Ctx): Category.Instance
}
export namespace Category {
export const Empty: Category = { name: 'empty', rowCount: 0, fields: [] };
export const Empty: Instance = { fields: [], rowCount: 0 };
export interface Provider<Ctx = any> {
(ctx: Ctx): Category
export interface Instance<Key = any, Data = any> {
fields: Field[],
data?: Data,
rowCount: number,
keys?: () => Iterator<Key>
}
export interface Filter {
......@@ -102,11 +102,11 @@ export namespace Category {
getFormat(cat, field) { return void 0; }
}
export function ofTable(name: string, table: Table<Table.Schema>, indices?: ArrayLike<number>): Category<number, Table<Table.Schema>> {
export function ofTable(table: Table<Table.Schema>, indices?: ArrayLike<number>): Category.Instance {
if (indices) {
return { name, fields: cifFieldsFromTableSchema(table._schema), data: table, rowCount: indices.length, keys: () => Iterator.Array(indices) };
return { fields: cifFieldsFromTableSchema(table._schema), data: table, rowCount: indices.length, keys: () => Iterator.Array(indices) };
}
return { name, fields: cifFieldsFromTableSchema(table._schema), data: table, rowCount: table._rowCount };
return { fields: cifFieldsFromTableSchema(table._schema), data: table, rowCount: table._rowCount };
}
}
......@@ -115,7 +115,7 @@ export interface Encoder<T = string | Uint8Array> extends EncoderBase {
setFormatter(formatter?: Category.Formatter): void,
startDataBlock(header: string): void,
writeCategory<Ctx>(category: Category.Provider<Ctx>, contexts?: Ctx[]): void,
writeCategory<Ctx>(category: Category<Ctx>, contexts?: Ctx[]): void,
getData(): T
}
......@@ -123,7 +123,7 @@ export namespace Encoder {
export function writeDatabase(encoder: Encoder, name: string, database: Database<Database.Schema>) {
encoder.startDataBlock(name);
for (const table of database._tableNames) {
encoder.writeCategory(() => Category.ofTable(table, database[table]));
encoder.writeCategory({ name: table, instance: () => Category.ofTable(database[table]) });
}
}
......
......@@ -38,7 +38,7 @@ export default class BinaryEncoder implements Encoder<Uint8Array> {
});
}
writeCategory<Ctx>(category: Category.Provider<Ctx>, contexts?: Ctx[]) {
writeCategory<Ctx>(category: Category<Ctx>, contexts?: Ctx[]) {
if (!this.data) {
throw new Error('The writer contents have already been encoded, no more writing.');
}
......@@ -47,23 +47,23 @@ export default class BinaryEncoder implements Encoder<Uint8Array> {
throw new Error('No data block created.');
}
const src = !contexts || !contexts.length ? [category(<any>void 0)] : contexts.map(c => category(c));
const categories = src.filter(c => c && c.rowCount > 0);
if (!categories.length) return;
if (!this.filter.includeCategory(categories[0].name)) return;
if (!this.filter.includeCategory(category.name)) return;
const count = categories.reduce((a, c) => a + c.rowCount, 0);
const src = !contexts || !contexts.length ? [category.instance(<any>void 0)] : contexts.map(c => category.instance(c));
const instances = src.filter(c => c && c.rowCount > 0);
if (!instances.length) return;
const count = instances.reduce((a, c) => a + c.rowCount, 0);
if (!count) return;
const first = categories[0]!;
const cat: EncodedCategory = { name: '_' + first.name, columns: [], rowCount: count };
const data = categories.map(c => ({ data: c.data, keys: () => c.keys ? c.keys() : Iterator.Range(0, c.rowCount - 1) }));
const fields = getIncludedFields(first);
const cat: EncodedCategory = { name: '_' + category.name, columns: [], rowCount: count };
const data = instances.map(c => ({ data: c.data, keys: () => c.keys ? c.keys() : Iterator.Range(0, c.rowCount - 1) }));
const fields = getIncludedFields(instances[0]);
for (const f of fields) {
if (!this.filter.includeField(first.name, f.name)) continue;
if (!this.filter.includeField(category.name, f.name)) continue;
const format = this.formatter.getFormat(first.name, f.name);
const format = this.formatter.getFormat(category.name, f.name);
cat.columns.push(encodeField(f, data, count, getArrayCtor(f, format), getEncoder(f, format)));
}
// no columns included.
......
......@@ -33,7 +33,7 @@ export default class TextEncoder implements Encoder<string> {
StringBuilder.write(this.builder, `data_${(header || '').replace(/[ \n\t]/g, '').toUpperCase()}\n#\n`);
}
writeCategory<Ctx>(category: Category.Provider<Ctx>, contexts?: Ctx[]) {
writeCategory<Ctx>(category: Category<Ctx>, contexts?: Ctx[]) {
if (this.encoded) {
throw new Error('The writer contents have already been encoded, no more writing.');
}
......@@ -42,18 +42,20 @@ export default class TextEncoder implements Encoder<string> {
throw new Error('No data block created.');
}
const categories = !contexts || !contexts.length ? [category(<any>void 0)] : contexts.map(c => category(c));
if (!categories.length) return;
if (!this.filter.includeCategory(categories[0].name)) return;
if (!this.filter.includeCategory(category.name)) return;
const rowCount = categories.reduce((v, c) => v + c.rowCount, 0);
const src = !contexts || !contexts.length ? [category.instance(<any>void 0)] : contexts.map(c => category.instance(c));
const instances = src.filter(c => c && c.rowCount > 0);
if (!instances.length) return;
const rowCount = instances.reduce((v, c) => v + c.rowCount, 0);
if (rowCount === 0) return;
if (rowCount === 1) {
writeCifSingleRecord(categories[0]!, this.builder, this.filter, this.formatter);
writeCifSingleRecord(category, instances[0]!, this.builder, this.filter, this.formatter);
} else {
writeCifLoop(categories, this.builder, this.filter, this.formatter);
writeCifLoop(category, instances, this.builder, this.filter, this.formatter);
}
}
......@@ -108,19 +110,19 @@ function getFloatPrecisions(categoryName: string, fields: Field[], formatter: Ca
return ret;
}
function writeCifSingleRecord(category: Category<any>, builder: StringBuilder, filter: Category.Filter, formatter: Category.Formatter) {
const fields = getIncludedFields(category);
const data = category.data;
function writeCifSingleRecord(category: Category, instance: Category.Instance, builder: StringBuilder, filter: Category.Filter, formatter: Category.Formatter) {
const fields = getIncludedFields(instance);
const data = instance.data;
let width = fields.reduce((w, f) => filter.includeField(category.name, f.name) ? Math.max(w, f.name.length) : 0, 0);
// this means no field from this category is included.
if (width === 0) return;
width += category.name.length + 6;
const it = category.keys ? category.keys() : Iterator.Range(0, category.rowCount - 1);
const it = instance.keys ? instance.keys() : Iterator.Range(0, instance.rowCount - 1);
const key = it.move();
const precisions = getFloatPrecisions(category.name, category.fields, formatter);
const precisions = getFloatPrecisions(category.name, instance.fields, formatter);
for (let _f = 0; _f < fields.length; _f++) {
const f = fields[_f];
......@@ -133,28 +135,27 @@ function writeCifSingleRecord(category: Category<any>, builder: StringBuilder, f
StringBuilder.write(builder, '#\n');
}
function writeCifLoop(categories: Category[], builder: StringBuilder, filter: Category.Filter, formatter: Category.Formatter) {
const first = categories[0];
const fieldSource = getIncludedFields(first);
const fields = filter === Category.DefaultFilter ? fieldSource : fieldSource.filter(f => filter.includeField(first.name, f.name));
function writeCifLoop(category: Category, instances: Category.Instance[], builder: StringBuilder, filter: Category.Filter, formatter: Category.Formatter) {
const fieldSource = getIncludedFields(instances[0]);
const fields = filter === Category.DefaultFilter ? fieldSource : fieldSource.filter(f => filter.includeField(category.name, f.name));
const fieldCount = fields.length;
if (fieldCount === 0) return;
const precisions = getFloatPrecisions(first.name, fields, formatter);
const precisions = getFloatPrecisions(category.name, fields, formatter);
writeLine(builder, 'loop_');
for (let i = 0; i < fieldCount; i++) {
writeLine(builder, `_${first.name}.${fields[i].name}`);
writeLine(builder, `_${category.name}.${fields[i].name}`);
}
let index = 0;
for (let _c = 0; _c < categories.length; _c++) {
const category = categories[_c];
const data = category.data;
for (let _c = 0; _c < instances.length; _c++) {
const instance = instances[_c];
const data = instance.data;
if (category.rowCount === 0) continue;
if (instance.rowCount === 0) continue;
const it = category.keys ? category.keys() : Iterator.Range(0, category.rowCount - 1);
const it = instance.keys ? instance.keys() : Iterator.Range(0, instance.rowCount - 1);
while (it.hasNext) {
const key = it.move();
......
......@@ -11,7 +11,7 @@ export function getFieldDigitCount(field: Field) {
return 6;
}
export function getIncludedFields(category: Category<any, any>) {
export function getIncludedFields(category: Category.Instance) {
return category.fields.some(f => !!f.shouldInclude)
? category.fields.filter(f => !f.shouldInclude || f.shouldInclude(category.data))
: category.fields;
......
......@@ -39,16 +39,18 @@ const atom_site_fields: CifField<StructureElement>[] = [
CifField.int('pdbx_PDB_model_num', P.unit.model_num, { encoder: E.deltaRLE }),
CifField.str<StructureElement, Structure>('operator_name', P.unit.operator_name, {
shouldInclude: structure => structure.units.some(u => !u.conformation.operator.isIdentity)
shouldInclude: structure => { console.log(!!structure); return structure.units.some(u => !u.conformation.operator.isIdentity) }
})
];
export function _atom_site({ structure }: CifExportContext): CifCategory {
return {
data: structure,
name: 'atom_site',
fields: atom_site_fields,
rowCount: structure.elementCount,
keys: () => structure.elementLocations()
export const _atom_site: CifCategory<CifExportContext> = {
name: 'atom_site',
instance({ structure }: CifExportContext) {
return {
fields: atom_site_fields,
data: structure,
rowCount: structure.elementCount,
keys: () => structure.elementLocations()
};
}
}
\ No newline at end of file
......@@ -13,16 +13,6 @@ import { CifExportContext } from '../mmcif';
import CifField = CifWriter.Field
import CifCategory = CifWriter.Category
export function _pdbx_struct_mod_residue(ctx: CifExportContext): CifCategory {
const residues = getModifiedResidues(ctx);
return {
data: residues,
name: 'pdbx_struct_mod_residue',
fields: pdbx_struct_mod_residue_fields,
rowCount: residues.length
};
}
const pdbx_struct_mod_residue_fields: CifField<number, StructureElement[]>[] = [
CifField.index('id'),
CifField.str(`label_comp_id`, (i, xs) => P.residue.label_comp_id(xs[i])),
......@@ -58,4 +48,12 @@ function getModifiedResidues({ model, structure }: CifExportContext): StructureE
}
}
return ret;
}
export const _pdbx_struct_mod_residue: CifCategory<CifExportContext> = {
name: 'pdbx_struct_mod_residue',
instance(ctx) {
const residues = getModifiedResidues(ctx);
return { fields: pdbx_struct_mod_residue_fields, data: residues, rowCount: residues.length };
}
}
\ No newline at end of file
......@@ -13,25 +13,21 @@ import CifField = CifWriter.Field
import CifCategory = CifWriter.Category
import { Column } from 'mol-data/db';
export function _struct_conf(ctx: CifExportContext): CifCategory {
const elements = findElements(ctx, 'helix');
return {
data: elements,
name: 'struct_conf',
fields: struct_conf_fields,
rowCount: elements.length
};
}
export const _struct_conf: CifCategory<CifExportContext> = {
name: 'struct_conf',
instance(ctx) {
const elements = findElements(ctx, 'helix');
return { fields: struct_conf_fields, data: elements, rowCount: elements.length };
}
};
export function _struct_sheet_range(ctx: CifExportContext): CifCategory {
const elements = (findElements(ctx, 'sheet') as SSElement<SecondaryStructure.Sheet>[]).sort(compare_ssr);
return {
data: elements,
name: 'struct_sheet_range',
fields: struct_sheet_range_fields,
rowCount: elements.length
};
}
export const _struct_sheet_range: CifCategory<CifExportContext> = {
name: 'struct_sheet_range',
instance(ctx) {
const elements = (findElements(ctx, 'sheet') as SSElement<SecondaryStructure.Sheet>[]).sort(compare_ssr);
return { fields: struct_sheet_range_fields, data: elements, rowCount: elements.length };
}
};
function compare_ssr(x: SSElement<SecondaryStructure.Sheet>, y: SSElement<SecondaryStructure.Sheet>) {
const a = x.element, b = y.element;
......
......@@ -10,28 +10,34 @@ import { mmCIF_Schema } from 'mol-io/reader/cif/schema/mmcif'
import { Structure } from '../structure'
import { Model } from '../model'
import { _atom_site } from './categories/atom_site';
import CifCategory = CifWriter.Category
import { _struct_conf, _struct_sheet_range } from './categories/secondary-structure';
import { _pdbx_struct_mod_residue } from './categories/modified-residues';
export interface CifExportContext {
structure: Structure,
model: Model
model: Model,
cache: any
}
import CifCategory = CifWriter.Category
import { _struct_conf, _struct_sheet_range } from './categories/secondary-structure';
import { _pdbx_struct_mod_residue } from './categories/modified-residues';
function copy_mmCif_category(name: keyof mmCIF_Schema) {
return ({ model }: CifExportContext) => {
if (model.sourceData.kind !== 'mmCIF') return CifCategory.Empty;
const table = model.sourceData.data[name];
if (!table || !table._rowCount) return CifCategory.Empty;
return CifCategory.ofTable(name, table);
function copy_mmCif_category(name: keyof mmCIF_Schema): CifCategory<CifExportContext> {
return {
name,
instance({ model }) {
if (model.sourceData.kind !== 'mmCIF') return CifCategory.Empty;
const table = model.sourceData.data[name];
if (!table || !table._rowCount) return CifCategory.Empty;
return CifCategory.ofTable(table);
}
};
}
function _entity({ model, structure }: CifExportContext): CifCategory {
const keys = Structure.getEntityKeys(structure);
return CifCategory.ofTable('entity', model.entities.data, keys);
const _entity: CifCategory<CifExportContext> = {
name: 'entity',
instance({ structure, model}) {
const keys = Structure.getEntityKeys(structure);
return CifCategory.ofTable(model.entities.data, keys);
}
}
const Categories = [
......@@ -53,6 +59,11 @@ const Categories = [
_struct_conf,
_struct_sheet_range,
// Sequence
copy_mmCif_category('struct_asym'), // TODO: filter only present chains?
copy_mmCif_category('entity_poly'),
copy_mmCif_category('entity_poly_seq'),
// Misc
// TODO: filter for actual present residues?
copy_mmCif_category('chem_comp'),
......@@ -81,13 +92,13 @@ export function encode_mmCIF_categories(encoder: CifWriter.Encoder, structure: S
if (models.length !== 1) throw 'Can\'t export stucture composed from multiple models.';
const model = models[0];
const ctx: CifExportContext[] = [{ structure, model }];
const ctx: CifExportContext[] = [{ structure, model, cache: Object.create(null) }];
for (const cat of Categories) {
encoder.writeCategory(cat, ctx);
}
for (const customProp of model.customProperties.all) {
const cats = customProp.cifExport.categoryProvider(ctx[0]);
const cats = customProp.cifExport.categories;
for (const cat of cats) {
encoder.writeCategory(cat, ctx);
}
......
......@@ -22,22 +22,22 @@ export namespace ComponentBond {
isStatic: true,
name: 'chem_comp_bond',
cifExport: {
categoryNames: ['chem_comp_bond'],
categoryProvider(ctx) {
const chem_comp_bond = getChemCompBond(ctx.model);
if (!chem_comp_bond) return [];
const comp_names = getUniqueResidueNames(ctx.structure);
const { comp_id, _rowCount } = chem_comp_bond;
const indices: number[] = [];
for (let i = 0; i < _rowCount; i++) {
if (comp_names.has(comp_id.value(i))) indices[indices.length] = i;
categories: [{
name: 'chem_comp_bond',
instance(ctx) {
const chem_comp_bond = getChemCompBond(ctx.model);
if (!chem_comp_bond) return CifWriter.Category.Empty;
const comp_names = getUniqueResidueNames(ctx.structure);
const { comp_id, _rowCount } = chem_comp_bond;
const indices: number[] = [];
for (let i = 0; i < _rowCount; i++) {
if (comp_names.has(comp_id.value(i))) indices[indices.length] = i;
}
return CifWriter.Category.ofTable(chem_comp_bond, indices)
}
return [
() => CifWriter.Category.ofTable('chem_comp_bond', chem_comp_bond, indices)
];
}
}]
}
}
......
......@@ -27,38 +27,38 @@ export namespace StructConn {
isStatic: true,
name: 'struct_conn',
cifExport: {
categoryNames: ['struct_conn'],
categoryProvider(ctx) {
const struct_conn = getStructConn(ctx.model);
if (!struct_conn) return [];
categories: [{
name: 'struct_conn',
instance(ctx) {
const struct_conn = getStructConn(ctx.model);
if (!struct_conn) return CifWriter.Category.Empty;
const strConn = get(ctx.model);
if (!strConn || strConn.entries.length === 0) return [];
const strConn = get(ctx.model);
if (!strConn || strConn.entries.length === 0) return CifWriter.Category.Empty;
const foundAtoms = new Set<ElementIndex>();
const indices: number[] = [];
for (const entry of strConn.entries) {
const { partners } = entry;
let hasAll = true;
for (let i = 0, _i = partners.length; i < _i; i++) {
const atom = partners[i].atomIndex;
if (foundAtoms.has(atom)) continue;
if (hasAtom(ctx.structure, atom)) {
foundAtoms.add(atom);
} else {
hasAll = false;
break;
const foundAtoms = new Set<ElementIndex>();
const indices: number[] = [];
for (const entry of strConn.entries) {
const { partners } = entry;
let hasAll = true;
for (let i = 0, _i = partners.length; i < _i; i++) {
const atom = partners[i].atomIndex;
if (foundAtoms.has(atom)) continue;
if (hasAtom(ctx.structure, atom)) {
foundAtoms.add(atom);
} else {
hasAll = false;
break;
}
}
if (hasAll) {
indices[indices.length] = entry.rowIndex;
}
}
if (hasAll) {
indices[indices.length] = entry.rowIndex;
}
}
return [
() => CifWriter.Category.ofTable('struct_conn', struct_conn, indices)
];
}
return CifWriter.Category.ofTable(struct_conn, indices);
}
}]
}
}
......
......@@ -12,9 +12,7 @@ interface ModelPropertyDescriptor {
readonly name: string,
cifExport: {
/** used category names that can be used for "filtering" by the writer */
readonly categoryNames: ReadonlyArray<string>,
categoryProvider: (ctx: CifExportContext) => CifWriter.Category.Provider[]
categories: CifWriter.Category<CifExportContext>[]
}
}
......
......@@ -5,6 +5,7 @@
*/
import { CifWriter } from 'mol-io/writer/cif'
import * as fs from 'fs'
const category1fields: CifWriter.Field[] = [
CifWriter.Field.str('f1', i => 'v' + i),
......@@ -18,24 +19,49 @@ const category2fields: CifWriter.Field[] = [
CifWriter.Field.float('e3', i => Math.random()),
];
function getInstance(ctx: { name: string, fields: CifWriter.Field[], rowCount: number }): CifWriter.Category {
function getCat(name: string): CifWriter.Category {
return {
data: void 0,
name: ctx.name,
fields: ctx.fields,
rowCount: ctx.rowCount
name,
instance(ctx: { fields: CifWriter.Field[], rowCount: number }) {
return { data: void 0, fields: ctx.fields, rowCount: ctx.rowCount };
}
};
}
function testText() {
const enc = CifWriter.createEncoder();
const filter: CifWriter.Category.Filter = {
includeCategory(cat) { return true; },
includeField(cat, field) { return !(cat === 'cat2' && field === 'e2') }
}
enc.startDataBlock('test');
enc.setFilter(filter);
enc.writeCategory(getCat('cat1'), [{ rowCount: 5, fields: category1fields }]);
enc.writeCategory(getCat('cat2'), [{ rowCount: 1, fields: category2fields }]);
console.log(enc.getData());
}
const enc = CifWriter.createEncoder();
testText();
function testBinary() {
const enc = CifWriter.createEncoder({ binary: true });
const filter: CifWriter.Category.Filter = {
includeCategory(cat) { return true; },
includeField(cat, field) { return !(cat === 'cat2' && field === 'e2') }
}
const filter: CifWriter.Category.Filter = {
includeCategory(cat) { return true; },
includeField(cat, field) { return !(cat === 'cat2' && field === 'e2') }
enc.startDataBlock('test');
enc.setFilter(filter);
enc.writeCategory(getCat('cat1'), [{ rowCount: 5, fields: category1fields }]);
enc.writeCategory(getCat('cat2'), [{ rowCount: 1, fields: category2fields }]);
enc.encode();
const data = enc.getData() as Uint8Array;
fs.writeFileSync('e:/test/mol-star/test.bcif', new Buffer(data));
console.log('written binary');
}
enc.startDataBlock('test');
enc.setFilter(filter);
enc.writeCategory(getInstance, [{ rowCount: 5, name: 'cat1', fields: category1fields }]);
enc.writeCategory(getInstance, [{ rowCount: 1, name: 'cat2', fields: category2fields }]);
console.log(enc.getData());
testBinary();
\ No newline at end of file
......@@ -95,6 +95,7 @@ export async function resolveRequest(req: Request, writer: Writer) {
};
encoder.writeCategory(_model_server_stats, [stats]);
encoder.encode();
encoder.writeTo(writer);
......@@ -112,9 +113,9 @@ import CifField = CifWriter.Field
function string<T>(name: string, str: (data: T, i: number) => string, isSpecified?: (data: T) => boolean): CifField<number, T> {
if (isSpecified) {
return CifField.str(name, (i, d) => str(d, i), { valueKind: (i, d) => isSpecified(d) ? Column.ValueKind.Present : Column.ValueKind.NotPresent });
return CifField.str(name, (i, d) => str(d, i), { valueKind: (i, d) => isSpecified(d) ? Column.ValueKind.Present : Column.ValueKind.NotPresent });
}
return CifField.str(name, (i, d) => str(d, i));
return CifField.str(name, (i, d) => str(d, i));
}
function int32<T>(name: string, value: (data: T) => number): CifField<number, T> {
......@@ -144,33 +145,29 @@ const _model_server_stats_fields: CifField<number, Stats>[] = [
];
function _model_server_result(request: Request): CifWriter.Category {
return {
data: request,
name: 'model_server_result',
fields: _model_server_result_fields,
rowCount: 1
};
}
function _model_server_params(request: Request): CifWriter.Category {
const params: string[][] = [];
for (const k of Object.keys(request.normalizedParams)) {
params.push([k, '' + request.normalizedParams[k]]);
const _model_server_result: CifWriter.Category<Request> = {
name: 'model_server_result',
instance: (request) => ({ data: request, fields: _model_server_result_fields, rowCount: 1 })
};
const _model_server_params: CifWriter.Category<Request> = {
name: 'model_server_params',
instance(request) {
const params: string[][] = [];
for (const k of Object.keys(request.normalizedParams)) {
params.push([k, '' + request.normalizedParams[k]]);
}
return {
data: params,
fields: _model_server_params_fields,
rowCount: params.length
}
}
return {
data: params,
name: 'model_server_params',
fields: _model_server_params_fields,
rowCount: params.length
};
}
};
function _model_server_stats(stats: Stats): CifWriter.Category {
return {
data: stats,
name: 'model_server_stats',
fields: _model_server_stats_fields,
rowCount: 1
};
const _model_server_stats: CifWriter.Category<Stats> = {
name: 'model_server_stats',
instance: (stats) => ({ data: stats, fields: _model_server_stats_fields, rowCount: 1 })
}
\ No newline at end of file
......@@ -28,9 +28,9 @@ interface ResultContext {
function string<T>(name: string, str: (data: T) => string, isSpecified?: (data: T) => boolean): CifWriter.Field<number, T> {
if (isSpecified) {
return CifWriter.Field.str(name, (i, d) => str(d), { valueKind: (i, d) => isSpecified(d) ? Column.ValueKind.Present : Column.ValueKind.NotPresent });
return CifWriter.Field.str(name, (i, d) => str(d), { valueKind: (i, d) => isSpecified(d) ? Column.ValueKind.Present : Column.ValueKind.NotPresent });
}
return CifWriter.Field.str(name, (i, d) => str(d));
return CifWriter.Field.str(name, (i, d) => str(d));
}
function int32<T>(name: string, value: (data: T) => number): CifWriter.Field<number, T> {
......@@ -90,53 +90,54 @@ const _volume_data_3d_info_fields = [
float64<_vd3d_Ctx>('max_sampled', ctx => ctx.sampledValuesInfo.max)
];
function _volume_data_3d_info(result: ResultContext): CifWriter.Category {
const ctx: _vd3d_Ctx = {
header: result.query.data.header,
channelIndex: result.channelIndex,
grid: result.query.samplingInfo.gridDomain,
sampleRate: result.query.samplingInfo.sampling.rate,
globalValuesInfo: result.query.data.header.sampling[0].valuesInfo[result.channelIndex],
sampledValuesInfo: result.query.data.header.sampling[result.query.samplingInfo.sampling.index].valuesInfo[result.channelIndex]
};
return {
data: ctx,
name: 'volume_data_3d_info',
fields: _volume_data_3d_info_fields,
rowCount: 1
};
}
const _volume_data_3d_info: CifWriter.Category<ResultContext> = {
name: 'volume_data_3d_info',
instance(result) {
const ctx: _vd3d_Ctx = {
header: result.query.data.header,
channelIndex: result.channelIndex,
grid: result.query.samplingInfo.gridDomain,
sampleRate: result.query.samplingInfo.sampling.rate,
globalValuesInfo: result.query.data.header.sampling[0].valuesInfo[result.channelIndex],
sampledValuesInfo: result.query.data.header.sampling[result.query.samplingInfo.sampling.index].valuesInfo[result.channelIndex]
};
return { data: ctx, fields: _volume_data_3d_info_fields, rowCount: 1 }
}
};
function _volume_data_3d_number(i: number, ctx: DataFormat.ValueArray): number {
return ctx[i];
}
function _volume_data_3d(ctx: ResultContext) {
const data = ctx.query.values[ctx.channelIndex];
const E = ArrayEncoding;
let encoder: ArrayEncoder;
let typedArray: any;
if (ctx.query.data.header.valueType === DataFormat.ValueType.Float32 || ctx.query.data.header.valueType === DataFormat.ValueType.Int16) {
let min: number, max: number;
min = data[0], max = data[0];
for (let i = 0, n = data.length; i < n; i++) {
let v = data[i];
if (v < min) min = v;
else if (v > max) max = v;
const _volume_data_3d: CifWriter.Category<ResultContext> = {
name: 'volume_data_3d',
instance(ctx) {
const data = ctx.query.values[ctx.channelIndex];
const E = ArrayEncoding;
let encoder: ArrayEncoder;
let typedArray: any;
if (ctx.query.data.header.valueType === DataFormat.ValueType.Float32 || ctx.query.data.header.valueType === DataFormat.ValueType.Int16) {
let min: number, max: number;
min = data[0], max = data[0];
for (let i = 0, n = data.length; i < n; i++) {
let v = data[i];
if (v < min) min = v;
else if (v > max) max = v;
}
typedArray = Float32Array;
// encode into 255 steps and store each value in 1 byte.
encoder = E.by(E.intervalQuantizaiton(min, max, 255, Uint8Array)).and(E.byteArray);
} else {
typedArray = Int8Array;
// just encode the bytes
encoder = E.by(E.byteArray)
}
typedArray = Float32Array;
// encode into 255 steps and store each value in 1 byte.
encoder = E.by(E.intervalQuantizaiton(min, max, 255, Uint8Array)).and(E.byteArray);
} else {
typedArray = Int8Array;
// just encode the bytes
encoder = E.by(E.byteArray)
}
const fields = [CifWriter.Field.float('values', _volume_data_3d_number, { encoder, typedArray, digitCount: 6 })];
return { data, name: 'volume_data_3d', fields, rowCount: data.length };
const fields = [CifWriter.Field.float('values', _volume_data_3d_number, { encoder, typedArray, digitCount: 6 })];
return { data, fields, rowCount: data.length };
}
}
function pickQueryBoxDimension(ctx: Data.QueryContext, e: 'a' | 'b', d: number) {
......@@ -171,13 +172,9 @@ const _density_server_result_fields = [
queryBoxDimension('b', 2)
]
function _density_server_result(ctx: Data.QueryContext): CifWriter.Category {
return {
data: ctx,
name: 'density_server_result',
fields: _density_server_result_fields,
rowCount: 1
};
const _density_server_result: CifWriter.Category<Data.QueryContext> = {
name: 'density_server_result',
instance: ctx => ({ data: ctx, fields: _density_server_result_fields, rowCount: 1 })
}
function write(encoder: CifWriter.Encoder, query: Data.QueryContext) {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment