Skip to content
Snippets Groups Projects
Commit f831c015 authored by David Sehnal's avatar David Sehnal
Browse files

ModelServer local api

parent f6df5705
Branches
Tags
No related merge requests found
/**
* Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author David Sehnal <david.sehnal@gmail.com>
*/
import * as fs from 'fs'
import Version from './version';
import { LocalInput, runLocal } from './server/api-local';
console.log(`Mol* ModelServer (${Version}), (c) 2018 Mol* authors`);
console.log(``);
let exampleWorkload: LocalInput = [{
input: 'c:/test/quick/1tqn.cif',
output: 'c:/test/quick/localapi/1tqn_full.cif',
query: 'full', // same as defined in Api/Queries
}, {
input: 'c:/test/quick/1tqn.cif',
output: 'c:/test/quick/localapi/1tqn_full.bcif',
query: 'full',
params: { binary: true }
}, {
input: 'c:/test/quick/1cbs_updated.cif',
output: 'c:/test/quick/localapi/1cbs_ligint.cif',
query: 'residueInteraction', // action is case sensitive
params: { label_comp_id: 'REA' }
}, {
input: 'c:/test/quick/1cbs_updated.cif', // multiple files that are repeated will only be parsed once
output: 'c:/test/quick/localapi/1cbs_ligint.bcif',
query: 'residueInteraction',
params: { label_comp_id: 'REA', binary: true } // parameters are just a JSON version of the query string
}
];
if (process.argv.length !== 3) {
let help = [
`Usage: `,
``,
` node local jobs.json`,
``,
`jobs.json is a JSON version of the WebAPI. Query names are case sensitive.`,
`The jobs are automatically sorted by inputFilenama and the given file is only loaded once.`,
`All processing errors are sent to stderr.`,
``,
`Jobs example:`,
``,
JSON.stringify(exampleWorkload, null, 2)
];
console.log(help.join('\n'));
} else {
try {
const input = JSON.parse(fs.readFileSync(process.argv[2], 'utf8'));
runLocal(input);
} catch (e) {
console.error(e);
}
}
// TODO: write utility that splits jobs into multiple chunks?
......@@ -9,7 +9,7 @@ import * as compression from 'compression'
import ServerConfig from './config'
import { ConsoleLogger } from 'mol-util/console-logger';
import { PerformanceMonitor } from 'mol-util/performance-monitor';
import { initWebApi } from './server/web-api';
import { initWebApi } from './server/api-web';
import Version from './version'
function setupShutdown() {
......
/**
* Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author David Sehnal <david.sehnal@gmail.com>
*/
import * as fs from 'fs';
import * as path from 'path';
import { JobManager, Job } from './jobs';
import { ConsoleLogger } from 'mol-util/console-logger';
import { resolveJob } from './query';
import { StructureCache } from './structure-wrapper';
import { now } from 'mol-task';
import { PerformanceMonitor } from 'mol-util/performance-monitor';
export type LocalInput = {
input: string,
output: string,
query: string,
params?: any
}[];
export async function runLocal(input: LocalInput) {
if (!input.length) {
ConsoleLogger.error('Local', 'No input');
return;
}
for (const job of input) {
JobManager.add('_local_', job.input, job.query, job.params || { }, job.output);
}
JobManager.sort();
const started = now();
let job: Job | undefined = JobManager.getNext();
let key = job.key;
let progress = 0;
while (job) {
try {
const encoder = await resolveJob(job);
const writer = wrapFile(job.outputFilename!);
encoder.writeTo(writer);
writer.end();
ConsoleLogger.logId(job.id, 'Query', 'Written.');
if (JobManager.hasNext()) {
job = JobManager.getNext();
if (key !== job.key) StructureCache.expire(key);
key = job.key;
} else {
break;
}
} catch (e) {
ConsoleLogger.errorId(job.id, e);
}
ConsoleLogger.log('Progress', `[${++progress}/${input.length}] after ${PerformanceMonitor.format(now() - started)}.`);
}
ConsoleLogger.log('Progress', `Done in ${PerformanceMonitor.format(now() - started)}.`);
StructureCache.expireAll();
}
function wrapFile(fn: string) {
const w = {
open(this: any) {
if (this.opened) return;
makeDir(path.dirname(fn));
this.file = fs.openSync(fn, 'w');
this.opened = true;
},
writeBinary(this: any, data: Uint8Array) {
this.open();
fs.writeSync(this.file, new Buffer(data));
return true;
},
writeString(this: any, data: string) {
this.open();
fs.writeSync(this.file, data);
return true;
},
end(this: any) {
if (!this.opened || this.ended) return;
fs.close(this.file, function () { });
this.ended = true;
},
file: 0,
ended: false,
opened: false
};
return w;
}
function makeDir(path: string, root?: string): boolean {
let dirs = path.split(/\/|\\/g),
dir = dirs.shift();
root = (root || '') + dir + '/';
try { fs.mkdirSync(root); }
catch (e) {
if (!fs.statSync(root).isDirectory()) throw new Error(e);
}
return !dirs.length || makeDir(dirs.join('/'), root);
}
\ No newline at end of file
......@@ -22,10 +22,12 @@ export interface Job {
queryDefinition: QueryDefinition,
normalizedParams: any,
responseFormat: ResponseFormat
responseFormat: ResponseFormat,
outputFilename?: string
}
export function createJob(sourceId: '_local_' | string, entryId: string, queryName: string, params: any): Job {
export function createJob(sourceId: '_local_' | string, entryId: string, queryName: string, params: any, outputFilename?: string): Job {
const queryDefinition = getQueryByName(queryName);
if (!queryDefinition) throw new Error(`Query '${queryName}' is not supported.`);
......@@ -39,7 +41,8 @@ export function createJob(sourceId: '_local_' | string, entryId: string, queryNa
entryId,
queryDefinition,
normalizedParams,
responseFormat: { isBinary: !!params.binary }
responseFormat: { isBinary: !!params.binary },
outputFilename
};
}
......@@ -50,8 +53,8 @@ class _JobQueue {
return this.list.count;
}
add(sourceId: '_local_' | string, entryId: string, queryName: string, params: any) {
const job = createJob(sourceId, entryId, queryName, params);
add(sourceId: '_local_' | string, entryId: string, queryName: string, params: any, outputFilename?: string) {
const job = createJob(sourceId, entryId, queryName, params, outputFilename);
this.list.addLast(job);
return job.id;
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment