Skip to content
Snippets Groups Projects
Commit f6df5705 authored by David Sehnal's avatar David Sehnal
Browse files

wip ModelServer

parent c636caab
No related branches found
No related tags found
No related merge requests found
...@@ -6,7 +6,6 @@ ...@@ -6,7 +6,6 @@
import { Column } from 'mol-data/db'; import { Column } from 'mol-data/db';
import { CifWriter } from 'mol-io/writer/cif'; import { CifWriter } from 'mol-io/writer/cif';
import Writer from 'mol-io/writer/writer';
import { StructureQuery, StructureSelection } from 'mol-model/structure'; import { StructureQuery, StructureSelection } from 'mol-model/structure';
import { encode_mmCIF_categories } from 'mol-model/structure/export/mmcif'; import { encode_mmCIF_categories } from 'mol-model/structure/export/mmcif';
import { now, Progress } from 'mol-task'; import { now, Progress } from 'mol-task';
...@@ -26,12 +25,11 @@ export interface Stats { ...@@ -26,12 +25,11 @@ export interface Stats {
const perf = new PerformanceMonitor(); const perf = new PerformanceMonitor();
export async function resolveJob(job: Job, writer: Writer) { export async function resolveJob(job: Job): Promise<CifWriter.Encoder<any>> {
ConsoleLogger.logId(job.id, 'Query', 'Starting.'); ConsoleLogger.logId(job.id, 'Query', 'Starting.');
const wrappedStructure = await getStructure(job); const wrappedStructure = await getStructure(job);
let startedWriting = false;
try { try {
const encoder = CifWriter.createEncoder({ binary: job.responseFormat.isBinary, encoderName: `ModelServer ${Version}` }); const encoder = CifWriter.createEncoder({ binary: job.responseFormat.isBinary, encoderName: `ModelServer ${Version}` });
perf.start('query'); perf.start('query');
...@@ -54,8 +52,6 @@ export async function resolveJob(job: Job, writer: Writer) { ...@@ -54,8 +52,6 @@ export async function resolveJob(job: Job, writer: Writer) {
// encoder.setFilter(); // encoder.setFilter();
perf.end('encode'); perf.end('encode');
ConsoleLogger.logId(job.id, 'Query', 'Encoded.');
const stats: Stats = { const stats: Stats = {
structure: wrappedStructure, structure: wrappedStructure,
queryTimeMs: perf.time('query'), queryTimeMs: perf.time('query'),
...@@ -64,26 +60,21 @@ export async function resolveJob(job: Job, writer: Writer) { ...@@ -64,26 +60,21 @@ export async function resolveJob(job: Job, writer: Writer) {
encoder.writeCategory(_model_server_stats, [stats]); encoder.writeCategory(_model_server_stats, [stats]);
encoder.encode(); encoder.encode();
startedWriting = true; ConsoleLogger.logId(job.id, 'Query', 'Encoded.');
encoder.writeTo(writer); return encoder;
ConsoleLogger.logId(job.id, 'Query', 'Written.');
} catch (e) { } catch (e) {
ConsoleLogger.errorId(job.id, e); ConsoleLogger.errorId(job.id, e);
if (!startedWriting) { return doError(job, e);
doError(job, writer, e);
} else {
ConsoleLogger.errorId(job.id, 'Error was not relayed to the user because it happened during "write".');
}
} }
} }
function doError(job: Job, writer: Writer, e: any) { function doError(job: Job, e: any) {
const encoder = CifWriter.createEncoder({ binary: job.responseFormat.isBinary, encoderName: `ModelServer ${Version}` }); const encoder = CifWriter.createEncoder({ binary: job.responseFormat.isBinary, encoderName: `ModelServer ${Version}` });
encoder.writeCategory(_model_server_result, [job]); encoder.writeCategory(_model_server_result, [job]);
encoder.writeCategory(_model_server_params, [job]); encoder.writeCategory(_model_server_params, [job]);
encoder.writeCategory(_model_server_error, ['' + e]); encoder.writeCategory(_model_server_error, ['' + e]);
encoder.encode(); encoder.encode();
encoder.writeTo(writer); return encoder;
} }
const maxTime = Config.maxQueryTimeInMs; const maxTime = Config.maxQueryTimeInMs;
......
...@@ -85,7 +85,7 @@ async function parseCif(data: string|Uint8Array) { ...@@ -85,7 +85,7 @@ async function parseCif(data: string|Uint8Array) {
async function readStructure(key: string, sourceId: string, entryId: string) { async function readStructure(key: string, sourceId: string, entryId: string) {
const filename = sourceId === '_local_' ? entryId : Config.mapFile(sourceId, entryId); const filename = sourceId === '_local_' ? entryId : Config.mapFile(sourceId, entryId);
if (!filename) throw new Error(`Cound not map '${key}' to a valid filename.`); if (!filename) throw new Error(`Cound not map '${key}' to a valid filename.`);
if (!fs.existsSync(filename)) throw new Error(`Could not map '${key}' to an existing file.`); if (!fs.existsSync(filename)) throw new Error(`Could not find source file for '${key}'.`);
perf.start('read'); perf.start('read');
let data; let data;
......
...@@ -18,9 +18,9 @@ function makePath(p: string) { ...@@ -18,9 +18,9 @@ function makePath(p: string) {
function wrapResponse(fn: string, res: express.Response) { function wrapResponse(fn: string, res: express.Response) {
const w = { const w = {
doError(this: any, code = 404) { doError(this: any, code = 404, message = 'Not Found.') {
if (!this.headerWritten) { if (!this.headerWritten) {
res.writeHead(code); res.status(code).send(message);
this.headerWritten = true; this.headerWritten = true;
} }
this.end(); this.end();
...@@ -66,33 +66,34 @@ async function processNextJob() { ...@@ -66,33 +66,34 @@ async function processNextJob() {
const filenameBase = `${job.entryId}_${job.queryDefinition.name.replace(/\s/g, '_')}` const filenameBase = `${job.entryId}_${job.queryDefinition.name.replace(/\s/g, '_')}`
const writer = wrapResponse(job.responseFormat.isBinary ? `${filenameBase}.bcif` : `${filenameBase}.cif`, response); const writer = wrapResponse(job.responseFormat.isBinary ? `${filenameBase}.bcif` : `${filenameBase}.cif`, response);
try { try {
const encoder = await resolveJob(job);
writer.writeHeader(job.responseFormat.isBinary); writer.writeHeader(job.responseFormat.isBinary);
await resolveJob(job, writer); encoder.writeTo(writer);
} catch (e) { } catch (e) {
ConsoleLogger.errorId(job.id, '' + e); ConsoleLogger.errorId(job.id, '' + e);
// TODO: add some error? writer.doError(404, '' + e);
writer.doError(404);
} finally { } finally {
writer.end(); writer.end();
ConsoleLogger.logId(job.id, 'Query', 'Finished.');
setImmediate(processNextJob); setImmediate(processNextJob);
} }
} }
function mapQuery(app: express.Express, queryName: string, queryDefinition: QueryDefinition) { function mapQuery(app: express.Express, queryName: string, queryDefinition: QueryDefinition) {
app.get(makePath(':entryId/' + queryName), async (req, res) => { app.get(makePath(':entryId/' + queryName), (req, res) => {
ConsoleLogger.log('Server', `Query '${req.params.entryId}/${queryName}'...`); ConsoleLogger.log('Server', `Query '${req.params.entryId}/${queryName}'...`);
if (JobManager.size >= Config.maxQueueLength) { if (JobManager.size >= Config.maxQueueLength) {
// TODO use proper code: server busy res.status(503).send('Too many queries, please try again later.');
res.writeHead(404);
res.end(); res.end();
return; return;
} }
const jobId = JobManager.add('pdb', req.params.entryId, queryName, req.query); const jobId = JobManager.add('pdb', req.params.entryId, queryName, req.query);
responseMap.set(jobId, res); responseMap.set(jobId, res);
processNextJob(); if (JobManager.size === 1) processNextJob();
}); });
} }
......
...@@ -36,8 +36,9 @@ function wrapFile(fn: string) { ...@@ -36,8 +36,9 @@ function wrapFile(fn: string) {
async function run() { async function run() {
try { try {
const request = createJob('_local_', 'e:/test/quick/1cbs_updated.cif', 'residueInteraction', { label_comp_id: 'REA' }); const request = createJob('_local_', 'e:/test/quick/1cbs_updated.cif', 'residueInteraction', { label_comp_id: 'REA' });
const encoder = await resolveJob(request);
const writer = wrapFile('e:/test/mol-star/1cbs_full.cif'); const writer = wrapFile('e:/test/mol-star/1cbs_full.cif');
await resolveJob(request, writer); encoder.writeTo(writer);
writer.end(); writer.end();
} finally { } finally {
StructureCache.expireAll(); StructureCache.expireAll();
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment