diff --git a/exampleData/x.gbz.db b/exampleData/x.gbz.db
new file mode 100644
index 00000000..6542087e
Binary files /dev/null and b/exampleData/x.gbz.db differ
diff --git a/package-lock.json b/package-lock.json
index 2ffa6061..8d2aab2e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -7,6 +7,7 @@
"": {
"name": "sequence-tube-maps",
"version": "0.1.0",
+ "hasInstallScript": true,
"license": "MIT",
"dependencies": {
"@bjorn3/browser_wasi_shim": "^0.2.17",
@@ -34,7 +35,7 @@
"es-dirname": "^0.1.0",
"express": "^4.18.2",
"fs-extra": "^10.1.0",
- "gbz-base": "^0.1.0-alpha.0",
+ "gbz-base": "^0.1.0-alpha.1",
"gh-pages": "^4.0.0",
"markdown-to-jsx": "^7.2.0",
"multer": "^1.4.5-lts.1",
@@ -61,7 +62,8 @@
"uuid": "^9.0.0",
"webpack": "^5.82.0",
"webpack-dev-server": "4.11.1",
- "websocket": "^1.0.34"
+ "websocket": "^1.0.34",
+ "worker-rpc": "^0.2.0"
},
"devDependencies": {
"jest-compact-reporter": "^1.2.9",
@@ -9906,9 +9908,9 @@
}
},
"node_modules/gbz-base": {
- "version": "0.1.0-alpha.0",
- "resolved": "https://registry.npmjs.org/gbz-base/-/gbz-base-0.1.0-alpha.0.tgz",
- "integrity": "sha512-OkIQeQpRH5K5uiNp/4+0eMP5eug/bdsABwVGSHHQLY+YW+I+uPxkMvF2gNx/FBcHKtDZxATZDz6zFDXM2LGT0A=="
+ "version": "0.1.0-alpha.1",
+ "resolved": "https://registry.npmjs.org/gbz-base/-/gbz-base-0.1.0-alpha.1.tgz",
+ "integrity": "sha512-ULedBstK4T9geErTMrkLfeMf/rtMYZ1SabIx+5GAcdq2gZDxFrtzrodtcsP21pqDiNj0VOy/ODo2mZtWlVSwGg=="
},
"node_modules/gensync": {
"version": "1.0.0-beta.2",
@@ -13720,6 +13722,11 @@
"node": ">= 0.6"
}
},
+ "node_modules/microevent.ts": {
+ "version": "0.2.1",
+ "resolved": "https://registry.npmjs.org/microevent.ts/-/microevent.ts-0.2.1.tgz",
+ "integrity": "sha512-YaOQr4V70QzTy3sTRkBUa7+clmN4rMdKs9L5wCCxYjo8gknO/FXhcEX5Pot4IWtAdiZqhxN7vskoywQbAOAkDQ=="
+ },
"node_modules/micromatch": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
@@ -19716,6 +19723,14 @@
"workbox-core": "6.5.4"
}
},
+ "node_modules/worker-rpc": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/worker-rpc/-/worker-rpc-0.2.0.tgz",
+ "integrity": "sha512-S74HnfAdmMlUYmr6+Lx6TmxvffM2vRZSk4RfI/Bxco4xZGw+FREzLRZhFxf8QIzI2/5NKNMn5+Pj69Bp+rweIg==",
+ "dependencies": {
+ "microevent.ts": "~0.2.1"
+ }
+ },
"node_modules/wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
@@ -26963,9 +26978,9 @@
"integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ=="
},
"gbz-base": {
- "version": "0.1.0-alpha.0",
- "resolved": "https://registry.npmjs.org/gbz-base/-/gbz-base-0.1.0-alpha.0.tgz",
- "integrity": "sha512-OkIQeQpRH5K5uiNp/4+0eMP5eug/bdsABwVGSHHQLY+YW+I+uPxkMvF2gNx/FBcHKtDZxATZDz6zFDXM2LGT0A=="
+ "version": "0.1.0-alpha.1",
+ "resolved": "https://registry.npmjs.org/gbz-base/-/gbz-base-0.1.0-alpha.1.tgz",
+ "integrity": "sha512-ULedBstK4T9geErTMrkLfeMf/rtMYZ1SabIx+5GAcdq2gZDxFrtzrodtcsP21pqDiNj0VOy/ODo2mZtWlVSwGg=="
},
"gensync": {
"version": "1.0.0-beta.2",
@@ -29915,6 +29930,11 @@
"resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
"integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="
},
+ "microevent.ts": {
+ "version": "0.2.1",
+ "resolved": "https://registry.npmjs.org/microevent.ts/-/microevent.ts-0.2.1.tgz",
+ "integrity": "sha512-YaOQr4V70QzTy3sTRkBUa7+clmN4rMdKs9L5wCCxYjo8gknO/FXhcEX5Pot4IWtAdiZqhxN7vskoywQbAOAkDQ=="
+ },
"micromatch": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
@@ -34162,6 +34182,14 @@
"workbox-core": "6.5.4"
}
},
+ "worker-rpc": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/worker-rpc/-/worker-rpc-0.2.0.tgz",
+ "integrity": "sha512-S74HnfAdmMlUYmr6+Lx6TmxvffM2vRZSk4RfI/Bxco4xZGw+FREzLRZhFxf8QIzI2/5NKNMn5+Pj69Bp+rweIg==",
+ "requires": {
+ "microevent.ts": "~0.2.1"
+ }
+ },
"wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
diff --git a/package.json b/package.json
index 9754631b..7cdc60f6 100644
--- a/package.json
+++ b/package.json
@@ -29,7 +29,7 @@
"es-dirname": "^0.1.0",
"express": "^4.18.2",
"fs-extra": "^10.1.0",
- "gbz-base": "^0.1.0-alpha.0",
+ "gbz-base": "^0.1.0-alpha.1",
"gh-pages": "^4.0.0",
"markdown-to-jsx": "^7.2.0",
"multer": "^1.4.5-lts.1",
@@ -56,7 +56,8 @@
"uuid": "^9.0.0",
"webpack": "^5.82.0",
"webpack-dev-server": "4.11.1",
- "websocket": "^1.0.34"
+ "websocket": "^1.0.34",
+ "worker-rpc": "^0.2.0"
},
"scripts": {
"start": "concurrently -n frontend,backend -c red,green 'HOST=${HOST:=127.0.0.1} PORT=${PORT:=3001} react-scripts start' 'npm:serve'",
diff --git a/src/App.js b/src/App.js
index b92cc3ce..43f7c821 100644
--- a/src/App.js
+++ b/src/App.js
@@ -14,8 +14,8 @@ import Footer from "./components/Footer";
import { dataOriginTypes } from "./enums";
import "./config-client.js";
import { config } from "./config-global.mjs";
-import ServerAPI from "./ServerAPI.mjs";
-import { GBZBaseAPI } from "./GBZBaseAPI.mjs";
+import ServerAPI from "./api/ServerAPI.mjs";
+import { LocalAPI } from "./api/LocalAPI.mjs";
const EXAMPLE_TRACKS = [
// Fake tracks for the generated examples.
@@ -47,19 +47,6 @@ class App extends Component {
constructor(props) {
super(props);
- // See if the WASM API is available.
- // Right now this just tests and logs, but eventually we will be able to use it.
- let gbzApi = new GBZBaseAPI();
- gbzApi.available().then((working) => {
- if (working) {
- console.log("WASM API implementation available!");
- } else {
- console.error("WASM API implementation not available!");
- }
- });
-
- this.APIInterface = new ServerAPI(props.apiUrl);
-
console.log("App component starting up with API URL: " + props.apiUrl);
// Set defaultViewTarget to either URL params (if present) or the first example
@@ -83,8 +70,68 @@ class App extends Component {
colorSchemes: getColorSchemesFromTracks(this.defaultViewTarget.tracks),
mappingQualityCutoff: 0,
},
+ APIInterface: new ServerAPI(props.apiUrl)
};
}
+
+ /**
+ * Set which API implementation to query for graph data.
+ *
+ * Mode can be "local" or "server".
+ */
+ setAPIMode(mode) {
+ this.setState((state) => {
+ if (mode !== this.getAPIMode(state)) {
+ if (mode === "local") {
+ // Make a local API
+ return {
+ APIInterface: new LocalAPI(),
+ // Set up an empty view target that can't really render.
+ // TODO: Let us control HeaderForm's dataType state so we can pop it right over to custom, or feed it a different defaultViewTarget
+ dataOrigin: dataOriginTypes.API,
+ viewTarget: {
+ tracks: []
+ },
+ visOptions: {
+ ...state.visOptions,
+ colorSchemes: [],
+ },
+ };
+ } else if (mode === "server") {
+ // Make a server API
+ return {
+ APIInterface: new ServerAPI(this.props.apiUrl),
+ // Also reset to a current view target this can show
+ dataOrigin: dataOriginTypes.API,
+ viewTarget: this.defaultViewTarget,
+ visOptions: {
+ ...state.visOptions,
+ colorSchemes: getColorSchemesFromTracks(this.defaultViewTarget.tracks),
+ },
+ };
+ } else {
+ throw new Error("Unimplemented API mode: " + mode)
+ }
+ }
+ });
+ }
+
+ /**
+ * Get the string describing the current API mode ("local" or "server"),
+ * given the state (by default the current state).
+ */
+ getAPIMode(state) {
+ if (state === undefined) {
+ state = this.state;
+ }
+ if (state.APIInterface instanceof LocalAPI) {
+ return "local";
+ } else if (state.APIInterface instanceof ServerAPI) {
+ return "server";
+ } else {
+ throw new Error("Unnamed API implementation: " + state.APIInterface);
+ }
+ }
/*
* Drop undefined values
@@ -197,13 +244,13 @@ class App extends Component {
dataOrigin={this.state.dataOrigin}
defaultViewTarget={this.defaultViewTarget}
getCurrentViewTarget={this.getCurrentViewTarget}
- APIInterface={this.APIInterface}
+ APIInterface={this.state.APIInterface}
/>
diff --git a/src/App.test.js b/src/App.test.js
index 8ec0a41c..ab282838 100644
--- a/src/App.test.js
+++ b/src/App.test.js
@@ -11,7 +11,7 @@ import { fetchAndParse } from "./fetchAndParse";
// We want to be able to replace the `fetchAndParse` that *other* files see,
// and we want to use *different* implementations for different tests in this
// file. We can mock it with Jest, but Jest will move this call before the
-// imports when runnin the tests, so we can't access any file-level variables
+// imports when running the tests, so we can't access any file-level variables
// in it. So we need to do some sneaky global trickery.
// Register the given replacement function to be called instead of fetchAndParse.
@@ -25,7 +25,7 @@ function clearFetchAndParseMock() {
}
jest.mock("./fetchAndParse", () => {
- // This dispatcher will replace fetchAndParse when we or anyone eles imports it.
+ // This dispatcher will replace fetchAndParse when we or anyone else imports it.
function fetchAndParseDispatcher() {
// Ge tthe real fetchAndParse
const { fetchAndParse } = jest.requireActual("./fetchAndParse");
diff --git a/src/GBZBaseAPI.mjs b/src/GBZBaseAPI.mjs
deleted file mode 100644
index 36d4c2e6..00000000
--- a/src/GBZBaseAPI.mjs
+++ /dev/null
@@ -1,210 +0,0 @@
-import { APIInterface } from "./APIInterface.mjs";
-import { WASI, File, OpenFile } from "@bjorn3/browser_wasi_shim";
-
-// TODO: The Webpack way to get the WASM would be something like:
-//import QueryWasm from "gbz-base/target/wasm32-wasi/release/query.wasm";
-// if the export mapping is broken, or
-//import QueryWasm from "gbz-base/query.wasm";
-// if it is working. In Jest, not only is the export mapping not working, but
-// also it can't get us a fetch-able string from the import like Webpack does.
-// So we will need some fancy Jest config to mock the WASM file into a js
-// module that does *something*, and also to mock fetch() into something that
-// can fetch it. Or else we need to hide that all behind something that can
-// fetch the WASM on either Webpack or Jest with its own strategies/by being
-// swapped out.
-
-// Resolve with the bytes or Response of the WASM query blob, on Jest or Webpack.
-async function getWasmBytes() {
- let blobBytes = null;
-
- if (!window["jest"]) {
- // Not running on Jest, we should be able to dynamic import a binary asset
- // by export name and get the bytes, and Webpack will handle it.
- try {
- let blobImport = await import("gbz-base/query.wasm");
- return fetch(blobImport.default);
- } catch (e) {
- console.error("Could not dynamically import WASM blob.", e);
- // Leave blobBytes unset to try a fallback method.
- }
- }
-
- if (!blobBytes) {
- // Either we're on Jest, or the dynamic import didn't work (maybe we're on
- // plain Node?).
- //
- // Try to open the file from the filesystem.
- //
- // Don't actually try and ship the filesystem module in the browser though:
- // see
- let fs = await import(/* webpackIgnore: true */ "fs-extra");
- blobBytes = await fs.readFile("node_modules/gbz-base/target/wasm32-wasi/release/query.wasm");
- }
-
- console.log("Got blob bytes: ", blobBytes);
- return blobBytes;
-}
-
-/**
- * API implementation that uses tools compiled to WebAssembly, client-side.
- */
-export class GBZBaseAPI extends APIInterface {
- constructor() {
- super();
-
- // We can take user uploads, in which case we need to hold on to them somewhere.
- // This holds all the file objects.
- this.files = [];
-
- // We need to index all their names by type.
- this.filesByType = {};
-
- // This is a promise for the compiled WebAssembly blob.
- this.compiledWasm = undefined;
- }
-
- // Make sure our WASM backend is ready.
- async setUp() {
- if (this.compiledWasm === undefined) {
- // Kick off and save exactly one request to get and load the WASM bytes.
- this.compiledWasm = getWasmBytes().then((result) => {
- if (result instanceof Response) {
- // If a fetch request was made, compile as it streams in
- return WebAssembly.compileStreaming(result);
- } else {
- // We have all the bytes, so compile right away.
- // TODO: Put this logic in the function?
- return WebAssembly.compile(result);
- }
- });
- }
-
- // Wait for the bytes to be available.
- this.compiledWasm = await this.compiledWasm;
- }
-
- // Make a call into the WebAssembly code and return the result.
- async callWasm(argv) {
- if (argv.length < 1) {
- // We need at least one command line argument to be the program name.
- throw new Error("Not safe to invoke main() without program name");
- }
-
- // Make sure this.compiledWasm is set.
- // TODO: Change to an accessor method?
- await this.setUp();
-
- // Define the places to store program input and output
- let stdin = new File([]);
- let stdout = new File([]);
- let stderr = new File([]);
-
- // Environment variables as NAME=value strings
- const environment = ["RUST_BACKTRACE=full"];
-
- // File descriptors for the process in number order
- let file_descriptors = [new OpenFile(stdin), new OpenFile(stdout), new OpenFile(stderr)];
-
- // Set up the WASI interface
- let wasi = new WASI(argv, environment, file_descriptors);
-
- // Set up the WebAssembly run
- let instantiation = await WebAssembly.instantiate(this.compiledWasm, {
- "wasi_snapshot_preview1": wasi.wasiImport,
- });
-
- try {
- // Make the WASI system call main
- let returnCode = wasi.start(instantiation);
- console.log("Return code:", returnCode);
- } finally {
- // The WASM code can throw right out of the WASI shim if Rust panics.
- console.log("Standard Output:", new TextDecoder().decode(stdout.data));
- console.log("Standard Error:", new TextDecoder().decode(stderr.data));
- }
- }
-
- // Return true if the WASM setup is working, and false otherwise.
- async available() {
- try {
- await this.callWasm(["query", "--help"]);
- return true;
- } catch {
- return false;
- }
- }
-
- /////////
- // Tube Map API implementation
- /////////
-
- async getChunkedData(viewTarget, cancelSignal) {
- return {
- graph: {},
- gam: {},
- region: null,
- coloredNodes: [],
- };
- }
-
- async getFilenames(cancelSignal) {
- // Set up an empty response.
- let response = {
- files: [],
- bedFiles: [],
- };
-
- for (let type of this.filesByType) {
- if (type === "bed") {
- // Just send all these files in bedFiles.
- response.bedFiles = this.filesByType[type];
- } else {
- for (let fileName of this.filesByType[type]) {
- // We sens a name/type record for each non-BED file
- response.files.push({ name: fileName, type: type });
- }
- }
- }
-
- return response;
- }
-
- subscribeToFilenameChanges(handler, cancelSignal) {
- return {};
- }
-
- async putFile(fileType, file, cancelSignal) {
- // We track files just by array index.
- let fileName = this.files.length.toString();
- // Just hang on to the File object.
- this.files.push(file);
-
- if (this.filesByType[fileType] === undefined) {
- this.filesByType[fileType] = [];
- }
- // Index the name we produced by type.
- this.filesByType[fileType].push(fileName);
-
- return fileName;
- }
-
- async getBedRegions(bedFile, cancelSignal) {
- return {
- bedRegions: [],
- };
- }
-
- async getPathNames(graphFile, cancelSignal) {
- return {
- pathNames: [],
- };
- }
-
- async getChunkTracks(bedFile, chunk, cancelSignal) {
- return {
- tracks: [],
- };
- }
-}
-
-export default GBZBaseAPI;
diff --git a/src/GBZBaseAPI.test.js b/src/GBZBaseAPI.test.js
deleted file mode 100644
index e8e3562c..00000000
--- a/src/GBZBaseAPI.test.js
+++ /dev/null
@@ -1,38 +0,0 @@
-import { GBZBaseAPI } from "./GBZBaseAPI.mjs";
-
-import fs from "fs-extra";
-
-it("can be constructed", () => {
- let api = new GBZBaseAPI();
-});
-
-it("can self-test its WASM setup", async () => {
- let api = new GBZBaseAPI();
- let working = await api.available();
- expect(working).toBeTruthy();
-});
-
-it("can have a file uploaded", async () => {
- let api = new GBZBaseAPI();
-
- // We need to make sure we make a jsdom File (which is a jsdom Blob), and not
- // a Node Blob, for our test file. Otherwise it doesn't work with jsdom's
- // upload machinery.
- // See for example for
- // background on the many flavors of Blob.
- const fileData = await fs.readFileSync("exampleData/cactus.vg");
- // Since a Node Buffer is an ArrayBuffer, we can use it to make a jsdom File.
- // We need to put the data block in an enclosing array, or else the block
- // will be iterated and each byte will be stringified and *those* bytes will
- // be uploaded.
- const file = new window.File([fileData], "cactus.vg", {
- type: "application/octet-stream",
- });
-
- // Set up for canceling the upload
- let controller = new AbortController();
-
- let uploadName = await api.putFile("graph", file, controller.signal);
-
- expect(uploadName).toBeTruthy();
-});
diff --git a/src/Types.ts b/src/Types.ts
index 9ac28e0a..09746e98 100644
--- a/src/Types.ts
+++ b/src/Types.ts
@@ -7,7 +7,7 @@ type DataPath = "mounted" | "default" | "upload";
// Describes whether a built-in example, user uploaded file, mounted, or synthetic example
// Fills input for the "Data:" dropdown in the HeaderForm
-type DataType = "built-in" | "file-upload" | "mounted files" | "examples";
+type DataType = "built-in" | "mounted files" | "examples";
// Possible filestypes taken from the request
// Files like GBZ contains graph and maybe haplotype and so can be either
@@ -84,4 +84,4 @@ type ColorScheme = {
// Entries correspond to their track counterpart, e.g colorSchemes[0] corresponds to tracks[0]
type colorSchemes = {
trackID: ColorScheme;
-}
\ No newline at end of file
+}
diff --git a/src/APIInterface.mjs b/src/api/APIInterface.mjs
similarity index 96%
rename from src/APIInterface.mjs
rename to src/api/APIInterface.mjs
index e0a44d43..e6591b54 100644
--- a/src/APIInterface.mjs
+++ b/src/api/APIInterface.mjs
@@ -6,6 +6,8 @@ export class APIInterface {
// Expects a object to be returned with the necessary information to draw a tubemap from vg
// object should contain keys: graph, gam, region, coloredNodes.
// cancelSignal is an AbortSignal that can be used to cancel the request.
+ // If the request is not structured correctly, or something goes wrong
+ // internally, throws an Error.
async getChunkedData(viewTarget, cancelSignal) {
throw new Error("getChunkedData function not implemented");
}
diff --git a/src/api/GBZBaseAPI.mjs b/src/api/GBZBaseAPI.mjs
new file mode 100644
index 00000000..6e3a3a27
--- /dev/null
+++ b/src/api/GBZBaseAPI.mjs
@@ -0,0 +1,508 @@
+/**
+ * GBZBase-based API implementation. Designed to run in a worker efficiently.
+ */
+
+// Use this magic comment to tell eslint we are allowed to use worker things
+// during the Webpack build.
+/* eslint-env worker */
+
+import "../config-client.js";
+import { APIInterface } from "./APIInterface.mjs";
+import { WASI, File, OpenFile, SyncOPFSFile, PreopenDirectory } from "@bjorn3/browser_wasi_shim";
+
+import {
+ parseRegion,
+ convertRegionToRangeRegion,
+ stringifyRegion
+} from "../common.mjs";
+
+// The Webpack way to get the WASM would be something like:
+//
+// import QueryWasm from "gbz-base/query.wasm";
+//
+// In Jest, not only is the export mapping not working, but also it can't get
+// us a fetch-able string from the import like Webpack does.
+//
+// So we use this function to detect if we are on Jest and get the blob from
+// the filesystem then, and to otherwise get ti with fetch.
+
+// Resolve with the bytes or Response of the WASM query blob, on Jest or Webpack.
+async function getWasmBytes() {
+ if (getWasmBytes.cached) {
+ return getWasmBytes.cached;
+ }
+
+ let blobBytes = null;
+
+ if (typeof window === "undefined" || !window["jest"]) {
+ // Not running on Jest, we should be able to dynamic import a binary asset
+ // by export name and get the bytes, and Webpack will handle it.
+ try {
+ let blobImport = await import("gbz-base/query.wasm");
+ return fetch(blobImport.default);
+ } catch (e) {
+ console.error("Could not dynamically import WASM blob.", e);
+ // Leave blobBytes unset to try a fallback method.
+ }
+ }
+
+ if (!blobBytes) {
+ // Either we're on Jest, or the dynamic import didn't work (maybe we're on
+ // plain Node?).
+ //
+ // Try to open the file from the filesystem.
+ //
+ // Don't actually try and ship the filesystem module in the browser though:
+ // see
+ let fs = await import(/* webpackIgnore: true */ "fs-extra");
+ blobBytes = await fs.readFile("node_modules/gbz-base/target/wasm32-wasi/release/query.wasm");
+ }
+
+ console.log("Got blob bytes: ", blobBytes);
+ getWasmBytes.cached = blobBytes;
+ return blobBytes;
+}
+
+/**
+ * Get an ArrayBuffer from a Blob. Works in both the browser and in jsdom
+ * (which doesn't actually implement .arrayBuffer(); see
+ * ).
+ */
+export async function blobToArrayBuffer(blob) {
+ try {
+ // Browser blob has this method.
+ return await blob.arrayBuffer()
+ } catch {
+ // jsdom blob needs to go through a FileReader
+ return new Promise((resolve, reject) => {
+ let reader = new FileReader();
+ reader.addEventListener("load", () => { resolve(reader.result); });
+ reader.addEventListener("error", () => { reject(reader.error); });
+ reader.readAsArrayBuffer(blob);
+ });
+ }
+}
+
+/**
+ * Convert a graph from GBZ-style JSON to vg-style JSON that matches the vg
+ * protobuf schema. See
+ *
+ *
+ * Does not leave the input graph intact.
+ */
+function convertSchema(inGraph) {
+
+ let outGraph = {};
+
+ // "nodes" becomes "node"
+ outGraph["node"] = inGraph["nodes"];
+
+ // We have to track the node lengths to synthisize the path mappings.
+ let nodeLength = new Map();
+ for (let node of outGraph["node"]) {
+ nodeLength.set(node["id"], node["sequence"].length);
+ }
+
+ // "edges" becomes "edge"
+ outGraph["edge"] = inGraph["edges"];
+ for (let edge of outGraph["edge"]) {
+ // And the names for the reverse flags change.
+ edge["from_start"] = edge["from_is_reverse"];
+ delete edge["from_is_reverse"];
+ edge["to_end"] = edge["to_is_reverse"];
+ delete edge["to_is_reverse"];
+ }
+
+ // "paths" becomes "path"
+ outGraph["path"] = inGraph["paths"];
+ for (let path of outGraph["path"]) {
+ path["mapping"] = [];
+ for (let visit of path["path"]) {
+ let length = nodeLength.get(visit["id"]);
+ // Make a full-length perfect match mapping
+ let mapping = {
+ "position": {"node_id": visit["id"], "is_reverse": visit["is_reverse"]},
+ "edit": [{"from_length": length, "to_length": length}]
+ };
+ path["mapping"].push(mapping);
+ }
+
+ delete path["path"];
+ }
+ return outGraph;
+}
+
+/**
+ * Implementation of a WASI Browser Shim file that is backed by FileReaderSync and operates on a backing Blob.
+ *
+ * Read-only.
+ *
+ * We extend SyncOPFSFile because then we can use OpenSyncOPFSFile for free, as long as we re-implement this.handle.
+ */
+class SyncWorkerBlobFile extends SyncOPFSFile {
+ constructor(backing_blob) {
+ super(new FileSystemSyncAccessHandlePolyfill(backing_blob), {readonly: true});
+ }
+}
+
+/**
+ * Implementation of the API of FileSystemSyncAccessHandle
+ * ,
+ * as used by Browser WASI Shim
+ * .
+ *
+ * Implements the API on top of a Blob, using FileReaderSync.
+ *
+ * Only allows read access.
+ */
+class FileSystemSyncAccessHandlePolyfill {
+ /**
+ * Make a new FileSystemSyncAccessHandlePolyfill acting like a
+ * FileSystemSyncAccessHandle to the file represented by the given Blob.
+ */
+ constructor(blob) {
+ // Start open
+ this.closed = false;
+ // Save the blob
+ this.blob = blob;
+ // Make sure right away we actually can have a FileReaderSync
+ this.reader = new FileReaderSync();
+ }
+
+ /**
+ * Close the file.
+ */
+ close() {
+ this.closed = true;
+ }
+
+ /**
+ * Flush changes to the file to disk.
+ *
+ * Not implemented since we are read-only.
+ */
+ flush() {
+ throw new Error("Flush not implemented; blobs are read only");
+ }
+
+ /**
+ * Get the size of the file in bytes as a number.
+ */
+ getSize() {
+ if (this.closed) {
+ throw new Error("Can't get size of closed file");
+ }
+
+ return this.blob.size;
+ }
+
+ /**
+ * Read into the given ArrayBuffer or ArrayBufferView.
+ *
+ * Starts at 0 in the file, unless "at" is set in options, in which case it
+ * starts there in the file.
+ *
+ * Tries to fill the whole buffer/view.
+ *
+ * Returns the number of bytes read.
+ */
+ read(buffer, options) {
+ if (this.closed) {
+ throw new Error("Can't read closed file");
+ }
+
+ // Use the actual buffer we got with offset 0, or get the buffer and offset
+ // out of the view
+ let destinationBuffer = buffer.buffer ?? buffer;
+ let destinationOffest = buffer.byteOffset ?? 0;
+
+ // Where should we start in the file
+ let startByte = options?.at ?? 0;
+
+ // How many bytes are we going to move?
+ //
+ // ArrayBuffer and ArrayBufferView both have a byteLength to see how much
+ // we were asked for.
+ //
+ // But we can't read past the end of the Blob.
+ let length = Math.min(buffer.byteLength, this.blob.size - startByte);
+
+ // Slice the blob to the part we want to read.
+ let partBlob = this.blob.slice(startByte, startByte + length);
+
+ // And read into a new ArrayBuffer using the sync reader.
+ let partBuffer = this.reader.readAsArrayBuffer(partBlob);
+
+ // Now blit from that buffer into the destination
+ let destinationArray = new Uint8Array(destinationBuffer, destinationOffest, length);
+ let sourceArray = new Uint8Array(partBuffer, 0, length);
+ destinationArray.set(sourceArray);
+
+ // Return the length we thought we could do
+ return length;
+ }
+
+ /**
+ * Truncate the file to the given number of bytes.
+ *
+ * Not implemented since we are read-only.
+ */
+ truncate(to: number) {
+ throw new Error("Truncate not implemented; blobs are read only");
+ }
+
+ /**
+ * Write the given buffer or view's contents to the file.
+ *
+ * Writes at the start of the file, unless at is set in options, in which
+ * case it writes at that position in the file. Returns the number of bytes
+ * written.
+ *
+ * Not actually implemented since we are read-only.
+ */
+ write(buffer, options) {
+ throw new Error("Write not implemented; blobs are read only");
+ }
+
+}
+
+/**
+ * API implementation that uses tools compiled to WebAssembly, client-side.
+ *
+ * Can operate either in the main thread or in a worker, but handles file
+ * uploads differently depending on where you put it.
+ */
+export class GBZBaseAPI extends APIInterface {
+ constructor() {
+ super();
+
+ // We can take user uploads, in which case we need to hold on to them somewhere.
+ // This holds all the file objects.
+ this.files = [];
+
+ // We need to index all their names by type.
+ this.filesByType = new Map();
+
+ // This is a promise for the compiled WebAssembly blob.
+ this.compiledWasm = undefined;
+ }
+
+ // Make sure our WASM backend is ready.
+ async setUp() {
+ if (this.compiledWasm === undefined) {
+ // Kick off and save exactly one request to get and load the WASM bytes.
+ this.compiledWasm = getWasmBytes().then((result) => {
+ if (result instanceof Response) {
+ // If a fetch request was made, compile as it streams in
+ return WebAssembly.compileStreaming(result);
+ } else {
+ // We have all the bytes, so compile right away.
+ // TODO: Put this logic in the function?
+ return WebAssembly.compile(result);
+ }
+ });
+ }
+
+ // Wait for the bytes to be available.
+ this.compiledWasm = await this.compiledWasm;
+ }
+
+ // Make a call into the WebAssembly code and return the result.
+ //
+ // If workingDirectory is set, it is an object from filename to blob to
+ // present as the current directory.
+ async callWasm(argv, workingDirectory) {
+ if (argv.length < 1) {
+ // We need at least one command line argument to be the program name.
+ throw new Error("Not safe to invoke main() without program name");
+ }
+
+ // Make sure this.compiledWasm is set.
+ // TODO: Change to an accessor method?
+ await this.setUp();
+
+ // Define the places to store program input and output
+ let stdin = new File([]);
+ let stdout = new File([]);
+ let stderr = new File([]);
+
+ // Environment variables as NAME=value strings
+ const environment = ["RUST_BACKTRACE=full"];
+
+ // File descriptors for the process in number order
+ let fileDescriptors = [new OpenFile(stdin), new OpenFile(stdout), new OpenFile(stderr)];
+
+ if (workingDirectory) {
+ let nameToWASIFile = {};
+ for (const [filename, blob] of Object.entries(workingDirectory)) {
+ console.log(`Mount ${blob.size} byte blob:`, blob);
+ if (typeof FileReaderSync !== "undefined") {
+ // On a worker where we can do sync reads
+ nameToWASIFile[filename] = new SyncWorkerBlobFile(blob);
+ } else {
+ // In the main thread where we can't do sync reads
+ console.warn("Sync blob read is not available. Reading " + blob.size + " byte blob into memory asynchronously to consult synchronously later!");
+ nameToWASIFile[filename] = new File(await blobToArrayBuffer(blob));
+ }
+ console.log("Mount file:", nameToWASIFile[filename]);
+ }
+ // As shown in the browser_wasi_shim examples, if we provide a
+ // PreopenDirectory at FD 4 it is shown to the process.
+ fileDescriptors.push(new PreopenDirectory(".", nameToWASIFile));
+ }
+
+ // Set up the WASI interface
+ let wasi = new WASI(argv, environment, fileDescriptors);
+
+ // Set up the WebAssembly run
+ let instantiation = await WebAssembly.instantiate(this.compiledWasm, {
+ "wasi_snapshot_preview1": wasi.wasiImport,
+ });
+
+ console.log("Running WASM with arguments:", argv)
+ console.log("Running WASM with FDs:", fileDescriptors)
+
+ let returnCode = null;
+ let stdOutText = null;
+ let stdErrText = null;
+
+ try {
+ // Make the WASI system call main
+ returnCode = wasi.start(instantiation);
+ // TODO: the shim logs loads of attempts to make/open the lock file, is it maybe not being allowed to be read back?
+ // TODO: Our return code is undefined for some reason; it is supposed to come out of start.
+ console.log("Execution finished with return code:", returnCode);
+ } finally {
+ // The WASM code can throw right out of the WASI shim if Rust panics.
+ stdOutText = new TextDecoder().decode(stdout.data);
+ stdErrText = new TextDecoder().decode(stderr.data);
+ console.log("Standard Output:", stdOutText);
+ console.log("Standard Error:", stdErrText);
+ }
+
+ return {returnCode, stdout: stdOutText, stderr: stdErrText}
+ }
+
+ // Return true if the WASM setup is working, and false otherwise.
+ async available() {
+ try {
+ await this.callWasm(["query", "--help"]);
+ return true;
+ } catch {
+ return false;
+ }
+ }
+
+ /////////
+ // Tube Map API implementation
+ /////////
+
+ async getChunkedData(viewTarget, cancelSignal) {
+
+ console.log("Got view target:", viewTarget)
+
+ // Find the graph track
+ let graphTrack = null
+ // TODO: We need to handle object tracks; move to array tracks only!
+ for (let trackKey in viewTarget.tracks) {
+ let track = viewTarget.tracks[trackKey];
+ if (track.trackType === "graph") {
+ graphTrack = track;
+ }
+ }
+ if (!graphTrack) {
+ throw new Error("No graph track selected");
+ }
+
+ // Since all the names are numbers, parse it and get the real file blob
+ let graphFileBlob = this.files[parseInt(graphTrack.trackFile)];
+
+ if (graphFileBlob === undefined) {
+ throw new Error("Graph file " + graphTrack.trackFile + " does not exist");
+ }
+
+ // Find the region
+ let region = convertRegionToRangeRegion(parseRegion(viewTarget.region));
+
+ if (!region.contig.includes("#")) {
+ // This isn't PanSN already so adjust to ask for a generic path.
+ region.contig = "_gbwt_ref#" + region.contig;
+ }
+
+ let parts = region.contig.split("#");
+
+ let {stdout} = await this.callWasm(["query", "--sample", parts[0], "--contig", parts[parts.length - 1], "--interval", `${region.start}..${region.end}`, "--format", "json", "--distinct", "graph.gbz.db"], {"graph.gbz.db": graphFileBlob});
+
+ let result = convertSchema(JSON.parse(stdout));
+
+ return {
+ graph: result,
+ gam: [],
+ region: stringifyRegion(region),
+ coloredNodes: [],
+ };
+ }
+
+ async getFilenames(cancelSignal) {
+ // Set up an empty response.
+ let response = {
+ files: [],
+ bedFiles: [],
+ };
+
+ for (let [type, files] of this.filesByType) {
+ if (type === "bed") {
+ // Just send all these files in bedFiles.
+ response.bedFiles = files;
+ } else {
+ for (let fileName of files) {
+ // We sens a name/type record for each non-BED file
+ response.files.push({ name: fileName, type: type });
+ }
+ }
+ }
+
+ return response;
+ }
+
+ subscribeToFilenameChanges(handler, cancelSignal) {
+ return {};
+ }
+
+ async putFile(fileType, file, cancelSignal) {
+ // We track files just by array index.
+ let fileName = this.files.length.toString();
+ // Just hang on to the File object.
+ this.files.push(file);
+
+ console.log(`Store ${file.size} byte upload:`, file);
+
+ if (!this.filesByType.has(fileType)) {
+ this.filesByType.set(fileType, []);
+ }
+ // Index the name we produced by type.
+ this.filesByType.get(fileType).push(fileName);
+
+ return fileName;
+ }
+
+ async getBedRegions(bedFile, cancelSignal) {
+ return {
+ bedRegions: [],
+ };
+ }
+
+ async getPathNames(graphFile, cancelSignal) {
+ return {
+ pathNames: [],
+ };
+ }
+
+ async getChunkTracks(bedFile, chunk, cancelSignal) {
+ return {
+ tracks: [],
+ };
+ }
+}
+
+export default GBZBaseAPI;
diff --git a/src/api/GBZBaseAPI.test.js b/src/api/GBZBaseAPI.test.js
new file mode 100644
index 00000000..b6c87a98
--- /dev/null
+++ b/src/api/GBZBaseAPI.test.js
@@ -0,0 +1,87 @@
+import { GBZBaseAPI, blobToArrayBuffer } from "./GBZBaseAPI.mjs";
+
+import fs from "fs-extra";
+
+it("can be constructed", () => {
+ let api = new GBZBaseAPI();
+});
+
+it("can self-test its WASM setup", async () => {
+ let api = new GBZBaseAPI();
+ let working = await api.available();
+ expect(working).toBeTruthy();
+});
+
+it("can have a file uploaded", async () => {
+ let api = new GBZBaseAPI();
+
+ // We need to make sure we make a jsdom File (which is a jsdom Blob), and not
+ // a Node Blob, for our test file. Otherwise it doesn't work with jsdom's
+ // upload machinery.
+ // See for example for
+ // background on the many flavors of Blob.
+ const fileData = await fs.readFileSync("exampleData/x.gbz.db");
+ // Since a Node Buffer is an ArrayBuffer, we can use it to make a jsdom File.
+ // We need to put the data block in an enclosing array, or else the block
+ // will be iterated and each byte will be stringified and *those* bytes will
+ // be uploaded.
+ const file = new window.File([fileData], "x.gbz.db", {
+ type: "application/octet-stream",
+ });
+
+ // Set up for canceling the upload
+ let controller = new AbortController();
+
+ let uploadName = await api.putFile("graph", file, controller.signal);
+
+ expect(uploadName).toBeTruthy();
+});
+
+describe("when a file is uploaded", () => {
+ let uploadName = null;
+ const api = new GBZBaseAPI();
+
+ beforeAll(async () => {
+ const fileData = await fs.readFileSync("exampleData/x.gbz.db");
+ const file = new window.File([fileData], "x.gbz.db", {
+ type: "application/octet-stream",
+ });
+
+ // Make sure the file actually is readable
+ let fileDataRetrieved = await blobToArrayBuffer(file);
+ if (fileDataRetrieved.byteLength != fileData.length) {
+ throw new Error("Can't put data into and out of jsdom File");
+ }
+
+ let controller = new AbortController();
+ uploadName = await api.putFile("graph", file, controller.signal);
+ });
+
+ it("should show up in the list of files", async () => {
+ let fileNames = await api.getFilenames();
+ let found = false;
+ for (let file of fileNames.files) {
+ if (file.name === uploadName) {
+ expect(file.type).toEqual("graph");
+ found = true;
+ }
+ }
+ expect(found).toBeTruthy();
+ });
+
+ it("can be asked for a view", async () => {
+ const region = "_gbwt_ref#x:1-10";
+ const viewTarget = {
+ "dataType": "mounted files",
+ "tracks": [
+ {"trackFile": uploadName, "trackType": "graph"}
+ ],
+ "region": "x:1-10"
+ };
+ let controller = new AbortController();
+ let view = await api.getChunkedData(viewTarget, controller.signal);
+
+ expect(view.graph).toBeTruthy();
+ expect(view.graph.node).toBeTruthy();
+ });
+});
diff --git a/src/api/LocalAPI.mjs b/src/api/LocalAPI.mjs
new file mode 100644
index 00000000..d885412c
--- /dev/null
+++ b/src/api/LocalAPI.mjs
@@ -0,0 +1,112 @@
+import { APIInterface } from "./APIInterface.mjs";
+
+import { makeWorker } from "./local/WorkerFactory.js";
+
+import { RpcProvider } from "worker-rpc";
+
+
+/**
+ * API implementation that uses a web worker to run a GBZBaseAPI.
+ */
+export class LocalAPI extends APIInterface {
+ constructor() {
+ super();
+
+ // Make a worker
+ this.worker = makeWorker();
+
+ // Make an RPC channel to the worker
+ this.rpc = new RpcProvider((message, transfer) => {
+ this.worker.postMessage(message, transfer);
+ });
+
+ // Hook up the incoming messages to the provider.
+ this.worker.addEventListener("message", (e) => {
+ return this.rpc.dispatch(e.data);
+ });
+
+ // Each call that can be canceled gets an ID for canceling it over the
+ // channel to the worker.
+ this.nextID = 0;
+
+ // File name change subscriptions go through this EventTarget
+ this.nameChangeEvents = new EventTarget();
+
+ this.rpc.registerRpcHandler("_filename_change", async () => {
+ // If a filename change message comes in from the worker, tell our
+ // subscribers.
+ this.nameChangeEvents.dispatchEvent(new CustomEvent("change"));
+ });
+ }
+
+ /// Get a fresh RPC cancelation ID that will be canceled if the given
+ /// AbortSignal aborts. If no AbortSignal is passed, returns undefined.
+ getCancelID(signal) {
+ if (signal === undefined) {
+ // We don't need an ID, this request is uncancelable.
+ return undefined;
+ }
+ let cancelID = this.nextID;
+ this.nextID++;
+ signal.addEventListener("abort", () => {
+ this.rpc.rpc("_cancel", {cancelID});
+ });
+ return cancelID
+ }
+
+ /////////
+ // Tube Map API implementation
+ /////////
+
+ async getChunkedData(viewTarget, cancelSignal) {
+ let cancelID = this.getCancelID(cancelSignal);
+ return await this.rpc.rpc("getChunkedData", {viewTarget, cancelID});
+ }
+
+ async getFilenames(cancelSignal) {
+ let cancelID = this.getCancelID(cancelSignal);
+ return await this.rpc.rpc("getFilenames", {cancelID});
+ }
+
+ subscribeToFilenameChanges(handler, cancelSignal) {
+ let eventHandler = () => {
+ if (!cancelSignal.aborted) {
+ // Protect the real handler from event arguments and also calls after
+ // canceling.
+ handler();
+ }
+ };
+ let unsubscribe = () => {
+ // When the signal aborts, clean up everything so we don't keep any
+ // references to things.
+ this.nameChangeEvents.removeEventListener("change", eventHandler);
+ cancelSignal.removeEventListener("abort", unsubscribe);
+ };
+ this.nameChangeEvents.addEventListener("change", eventHandler);
+ cancelSignal.addEventListener("abort", unsubscribe);
+ return {};
+ }
+
+ async putFile(fileType, file, cancelSignal) {
+ let cancelID = this.getCancelID(cancelSignal);
+ // The RPC system magically takes care of transfering the file via transfer. Probably.
+ return await this.rpc.rpc("putFile", {fileType, file, cancelID});
+ }
+
+ async getBedRegions(bedFile, cancelSignal) {
+ let cancelID = this.getCancelID(cancelSignal);
+ return await this.rpc.rpc("getBedRegions", {bedFile, cancelID});
+ }
+
+ async getPathNames(graphFile, cancelSignal) {
+ let cancelID = this.getCancelID(cancelSignal);
+ return await this.rpc.rpc("getPathNames", {graphFile, cancelID});
+ }
+
+ async getChunkTracks(bedFile, chunk, cancelSignal) {
+ let cancelID = this.getCancelID(cancelSignal);
+ return await this.rpc.rpc("getChunkTracks", {bedFile, chunk, cancelID});
+ }
+}
+
+export default LocalAPI;
diff --git a/src/api/LocalAPI.test.js b/src/api/LocalAPI.test.js
new file mode 100644
index 00000000..2d045f97
--- /dev/null
+++ b/src/api/LocalAPI.test.js
@@ -0,0 +1,75 @@
+import { LocalAPI } from "./LocalAPI.mjs";
+
+import fs from "fs-extra";
+
+it("can be constructed", () => {
+ let api = new LocalAPI();
+});
+
+it("can have a file uploaded", async () => {
+ let api = new LocalAPI();
+
+ // We need to make sure we make a jsdom File (which is a jsdom Blob), and not
+ // a Node Blob, for our test file. Otherwise it doesn't work with jsdom's
+ // upload machinery.
+ // See for example for
+ // background on the many flavors of Blob.
+ const fileData = await fs.readFileSync("exampleData/x.gbz.db");
+ // Since a Node Buffer is an ArrayBuffer, we can use it to make a jsdom File.
+ // We need to put the data block in an enclosing array, or else the block
+ // will be iterated and each byte will be stringified and *those* bytes will
+ // be uploaded.
+ const file = new window.File([fileData], "x.gbz.db", {
+ type: "application/octet-stream",
+ });
+
+ // Set up for canceling the upload
+ let controller = new AbortController();
+
+ let uploadName = await api.putFile("graph", file, controller.signal);
+
+ expect(uploadName).toBeTruthy();
+});
+
+describe("when a file is uploaded", () => {
+ let uploadName = null;
+ const api = new LocalAPI();
+
+ beforeAll(async () => {
+ const fileData = await fs.readFileSync("exampleData/x.gbz.db");
+ const file = new window.File([fileData], "x.gbz.db", {
+ type: "application/octet-stream",
+ });
+
+ let controller = new AbortController();
+ uploadName = await api.putFile("graph", file, controller.signal);
+ });
+
+ it("should show up in the list of files", async () => {
+ let fileNames = await api.getFilenames();
+ let found = false;
+ for (let file of fileNames.files) {
+ if (file.name === uploadName) {
+ expect(file.type).toEqual("graph");
+ found = true;
+ }
+ }
+ expect(found).toBeTruthy();
+ });
+
+ it("can be asked for a view", async () => {
+ const region = "_gbwt_ref#x:1-10";
+ const viewTarget = {
+ "dataType": "mounted files",
+ "tracks": [
+ {"trackFile": uploadName, "trackType": "graph"}
+ ],
+ "region": "x:1-10"
+ };
+ let controller = new AbortController();
+ let view = await api.getChunkedData(viewTarget, controller.signal);
+
+ expect(view.graph).toBeTruthy();
+ expect(view.graph.node).toBeTruthy();
+ });
+});
diff --git a/src/ServerAPI.mjs b/src/api/ServerAPI.mjs
similarity index 89%
rename from src/ServerAPI.mjs
rename to src/api/ServerAPI.mjs
index 39ee0bf5..abc389f3 100644
--- a/src/ServerAPI.mjs
+++ b/src/api/ServerAPI.mjs
@@ -1,4 +1,4 @@
-import { fetchAndParse } from "./fetchAndParse.js";
+import { fetchAndParse } from "../fetchAndParse.js";
import { APIInterface } from "./APIInterface.mjs";
/**
@@ -42,12 +42,22 @@ export class ServerAPI extends APIInterface {
// We make a function to connect the websocket, which we can call to reconnect.
let connect = () => {
subscription.ws = new WebSocket(this.apiUrl.replace(/^http/, "ws"));
+
+ // We make a function to disconnect also and remove all the event handlers.
+ subscription.disconnect = () => {
+ subscription.ws.close();
+ cancelSignal.removeEventListener("abort", subscription.disconnect);
+ subscription.ws.onmessage = undefined;
+ subscription.ws.onclose = undefined;
+ subscription.ws.onerror = undefined;
+ };
+
subscription.ws.onmessage = (message) => {
if (!cancelSignal.aborted) {
// Tell the user that something changed
handler();
} else {
- subscription.ws.close();
+ subscription.disconnect();
}
};
subscription.ws.onclose = (event) => {
@@ -58,15 +68,17 @@ export class ServerAPI extends APIInterface {
};
subscription.ws.onerror = (event) => {
// Close the socket if something went wrong
- subscription.ws.close();
+ subscription.disconnect();
};
+
+ // Close the scoket if the user wants to cancel the subscription.
+ cancelSignal.addEventListener("abort", subscription.disconnect);
};
connect();
// Give the subscription back to the caller to hold.
// TODO: Do we really need to hold the web socket in scope?
- // TODO: How does the user close the socket without a message arriving after cancelation?
return subscription;
}
diff --git a/src/api/local/Worker.mjs b/src/api/local/Worker.mjs
new file mode 100644
index 00000000..c2f3e153
--- /dev/null
+++ b/src/api/local/Worker.mjs
@@ -0,0 +1,21 @@
+/**
+ * Web Worker entry point for the local API implementation.
+ *
+ * Doesn't actually do any work, just hooks the guts of the implementation up
+ * to the real Web Worker in the browser. Under Jest, we bypass this and
+ * polyfill some event emitters around the implementation instead. See
+ * WorkerFactory.js and its mock.
+ */
+
+import { setUpWorker } from "./WorkerImplementation.mjs";
+
+// Because of Create React App's Opinions, we can't use the idiomatic "self"
+// here without fiddling with the linter, even if we claim to be in the
+// "worker" eslint environment. See
+// . Supposedly
+// there's a way to get at the service worker with "this", but "this" right now
+// appears undefined in the browser.
+//
+// So we turn off that linting rule
+/* eslint-disable no-restricted-globals */
+setUpWorker(self);
diff --git a/src/api/local/WorkerFactory.js b/src/api/local/WorkerFactory.js
new file mode 100644
index 00000000..45adeee0
--- /dev/null
+++ b/src/api/local/WorkerFactory.js
@@ -0,0 +1,20 @@
+/**
+ * This file needs to be .js so Jest can mock it properly. And it needs to be
+ * imported without extension.
+ */
+
+/**
+ * Make a new worker in a way that only works on Webpack, outside of Jest.
+ * Returns the worker object. On Jest, this will be replaced with a mock that
+ * runs the worker in-process.
+ */
+export function makeWorker() {
+ // Jest will crash if it ever sees "import.meta" in a source file, but
+ // Webpack keys on this exact
+ //
+ // new Worker(new URL(literal string, import.meta.url))
+ //
+ // syntactic construction to know to actually pack up a
+ // worker JS file.
+ return new Worker(new URL('./Worker.mjs', import.meta.url));
+}
diff --git a/src/api/local/WorkerImplementation.mjs b/src/api/local/WorkerImplementation.mjs
new file mode 100644
index 00000000..a1a954fd
--- /dev/null
+++ b/src/api/local/WorkerImplementation.mjs
@@ -0,0 +1,121 @@
+/**
+ * Guts of the local API Web Worker. Runs in a web worker in the browser and in the main thread in Jest.
+ */
+
+import { RpcProvider } from "worker-rpc";
+
+// We are actually a proxying wrapper around this actual implementation.
+import { GBZBaseAPI } from "../GBZBaseAPI.mjs";
+
+export function setUpWorker(self) {
+ // Here we have access to the Web Worker self (or a good immitation)
+
+ // Set up an RPC channel over the web worker message passing.
+ // This one doesn't assume we're *really* using a worker, which is important
+ // because sometimes it is mocked out.
+ const rpc = new RpcProvider((message, transfer) => {
+ return self.postMessage(message, transfer);
+ });
+
+ // Hook up the incoming messages to the provider.
+ self.addEventListener("message", (e) => {
+ return rpc.dispatch(e.data);
+ });
+
+ // Make an API implementation.
+ // Really we just proxy between this in ther worker and a proxy object in the
+ // page thread.
+ const api = new GBZBaseAPI();
+
+ // Now register RPC messages. The handlers can only take a sungle object, but
+ // they can be async.
+
+ // Because we can't get cancel signals themselves over the channel, we need to make new abort controllers here.
+ let abortControllers = new Map();
+
+ // Get a cancellation signal that will trip when the given request is
+ // canceled. If the passed ID is undefined, returns undefined, and the
+ // request cannot be canceled.
+ function getSignal(requestID) {
+ if (requestID === undefined) {
+ return undefined;
+ }
+ abortControllers.set(requestID, new AbortController());
+ return abortControllers.get(requestID).signal;
+ }
+
+ // When a request finishes, get rid of the AbortController for canceling it.
+ function requestOver(requestID) {
+ abortControllers.delete(requestID);
+ }
+
+ // When someone wants to cancel a request, flip its AbortController.
+ function cancelRequest(requestID) {
+ if (abortControllers.has(requestID)) {
+ // If the request is still in flight, abort it.
+ abortControllers.get(requestID).abort()
+ }
+ }
+
+ // Instead of taking real cancel signals, we take unique IDs that can be canceled with another call.
+ rpc.registerRpcHandler('getChunkedData', async ({viewTarget, cancelID}) => {
+ try {
+ return await api.getChunkedData(viewTarget, getSignal(cancelID));
+ } finally {
+ requestOver(cancelID);
+ }
+ });
+
+ rpc.registerRpcHandler('getFilenames', async ({cancelID}) => {
+ try {
+ return await api.getFilenames(getSignal(cancelID));
+ } finally {
+ requestOver(cancelID);
+ }
+ });
+
+ rpc.registerRpcHandler('putFile', async ({fileType, file, cancelID}) => {
+ try {
+ return await api.putFile(fileType, file, getSignal(cancelID));
+ } finally {
+ requestOver(cancelID);
+ }
+ });
+
+ rpc.registerRpcHandler('getBedRegions', async ({bedFile, cancelID}) => {
+ try {
+ return await api.getBedRegions(bedFile, getSignal(cancelID));
+ } finally {
+ requestOver(cancelID);
+ }
+ });
+
+ rpc.registerRpcHandler('getPathNames', async ({graphFile, cancelID}) => {
+ try {
+ return await api.getPathNames(graphFile, getSignal(cancelID));
+ } finally {
+ requestOver(cancelID);
+ }
+ });
+
+ rpc.registerRpcHandler('getChunkTracks', async ({bedFile, chunk, cancelID}) => {
+ try {
+ return await api.getChunkTracks(bedFile, chunk, getSignal(cancelID));
+ } finally {
+ requestOver(cancelID);
+ }
+ });
+
+ // Subscribe to file updates and always publish them over the link.
+ // We don't ever actually abort the subscription.
+ let subscriptionAbortController = new AbortController();
+ api.subscribeToFilenameChanges(() => {
+ rpc.rpc("_filename_change", {});
+ }, subscriptionAbortController.signal);
+ // TODO: Do we need to stash the returned subscription somewhere safe?
+
+ // If a call is canceled, cancel it on this side.
+ rpc.registerRpcHandler('_cancel', ({cancelID}) => {
+ cancelRequest(cancelID);
+ })
+}
diff --git a/src/api/local/__mocks__/WorkerFactory.js b/src/api/local/__mocks__/WorkerFactory.js
new file mode 100644
index 00000000..0b082376
--- /dev/null
+++ b/src/api/local/__mocks__/WorkerFactory.js
@@ -0,0 +1,40 @@
+/**
+ * Fake a new worker in a way that works on Jest. This is used
+ * under Jest to mock WorkerFactory.js with something that can run on
+ * Jest.
+ */
+
+import { setUpWorker } from "../WorkerImplementation.mjs"
+
+import { EventEmitter } from "events";
+
+export function makeWorker() {
+
+ // Make a couple EventEmmitters, chrome them up to look more browser-y, and
+ // cross-connect their message events and postMessage functions.
+ let workerSide = new EventEmitter();
+ workerSide.addEventListener = workerSide.on;
+
+ let userSide = new EventEmitter();
+ userSide.addEventListener = userSide.on;
+
+ workerSide.postMessage = (message, options) => {
+ setTimeout(() => {
+ userSide.emit("message", {data: message});
+ });
+ }
+
+ userSide.postMessage = (message, options) => {
+ setTimeout(() => {
+ workerSide.emit("message", {data: message});
+ });
+ }
+
+ setUpWorker(workerSide);
+
+ // Hide the one side in the other.
+ userSide.actualWorker = workerSide;
+
+ return userSide;
+
+}
diff --git a/src/components/CustomizationAccordion.js b/src/components/CustomizationAccordion.js
index e83a7fcb..7b7b5afb 100644
--- a/src/components/CustomizationAccordion.js
+++ b/src/components/CustomizationAccordion.js
@@ -17,6 +17,7 @@ class VisualizationOptions extends Component {
state = {
isOpenLegend: false,
isOpenVisualizationOptions: true,
+ isOpenServer: false
};
toggleLegend = (e) => {
@@ -31,6 +32,11 @@ class VisualizationOptions extends Component {
e.preventDefault();
};
+ toggleServer = (e) => {
+ this.setState({ isOpenServer: !this.state.isOpenServer });
+ e.preventDefault();
+ };
+
handleMappingQualityCutoffChange = (event) => {
this.props.handleMappingQualityCutoffChange(event.target.value);
};
@@ -210,6 +216,42 @@ class VisualizationOptions extends Component {
+
+
+
+
+
+
+
+
+
+
+
+
+
+
);
@@ -220,6 +262,8 @@ VisualizationOptions.propTypes = {
handleMappingQualityCutoffChange: PropTypes.func.isRequired,
setColorSetting: PropTypes.func.isRequired,
tracks: PropTypes.array.isRequired,
+ currentAPIMode: PropTypes.string,
+ setAPIMode: PropTypes.func
};
export default VisualizationOptions;
diff --git a/src/components/HeaderForm.js b/src/components/HeaderForm.js
index 7b0e7c6c..dae178fd 100644
--- a/src/components/HeaderForm.js
+++ b/src/components/HeaderForm.js
@@ -4,6 +4,7 @@ import { Container, Row, Col, Label, Alert, Button } from "reactstrap";
import { dataOriginTypes } from "../enums";
import "../config-client.js";
import { config } from "../config-global.mjs";
+import { LocalAPI } from "../api/LocalAPI.mjs";
import DataPositionFormRow from "./DataPositionFormRow";
import ExampleSelectButtons from "./ExampleSelectButtons";
import RegionInput from "./RegionInput";
@@ -68,6 +69,8 @@ const CLEAR_STATE = {
pathNames: [],
tracks: {},
+ // BED file of regions to jump between. Regions may have pre-extracted chunks in the last column.
+ // If not used, may be undefined or may have the sting value "none".
bedFile: undefined,
region: "",
name: undefined,
@@ -94,6 +97,12 @@ const EMPTY_STATE = {
bedSelectOptions: [],
};
+// Return true if file is set to a string file name or URL, and false if it is
+// falsey or the "none" sentinel.
+function isSet(file) {
+ return (file !== "none" && file);
+}
+
// Creates track to be stored in ViewTarget
// Modify as the track system changes
// INPUT: file structure, see Types.ts
@@ -264,7 +273,6 @@ class HeaderForm extends Component {
componentDidMount() {
this.fetchCanceler = new AbortController();
this.cancelSignal = this.fetchCanceler.signal;
- this.api = this.props.APIInterface;
this.initState();
this.getMountedFilenames();
this.setUpWebsocket();
@@ -289,7 +297,7 @@ class HeaderForm extends Component {
initState = () => {
// Populate state with either viewTarget or the first example
let ds = this.props.defaultViewTarget ?? DATA_SOURCES[0];
- const bedSelect = ds.bedFile ? ds.bedFile : "none";
+ const bedSelect = isSet(ds.bedFile) ? ds.bedFile : "none";
if (bedSelect !== "none") {
this.getBedRegions(bedSelect);
}
@@ -317,7 +325,7 @@ class HeaderForm extends Component {
setTrackFile = (type, index, file) => {
// Set the nth track of the given type to the given file.
// If there is no nth track of that type, create one.
- // If the file is "none", remove that track.
+ // If the file is unset, remove that track.
this.setState((state) => {
console.log(
"Set file " +
@@ -340,13 +348,13 @@ class HeaderForm extends Component {
let track = state.tracks[key];
if (track.trackType === type) {
if (seenTracksOfType === index) {
- if (file !== "none") {
+ if (isSet(file)) {
// We want to adjust it, so keep a modified copy of it
let newTrack = JSON.parse(JSON.stringify(track));
newTrack.trackFile = file;
newTracks[key] = newTrack;
}
- // If the file is "none" we drop the track.
+ // If the file is unset we drop the track.
} else {
// We want to keep it as is
newTracks[key] = track;
@@ -364,7 +372,7 @@ class HeaderForm extends Component {
console.log(
"Saw " + seenTracksOfType + " tracks of type vs index " + index
);
- if (seenTracksOfType === index && file !== "none") {
+ if (seenTracksOfType === index && isSet(file)) {
// We need to add this track
console.log("Create track at index " + (maxKey + 1));
newTracks[maxKey + 1] = createTrack({ type: type, name: file });
@@ -379,8 +387,8 @@ class HeaderForm extends Component {
};
getTrackFile = (tracks, type, index) => {
- // Get the file used in the nth track of the gicen type, or "none" if no
- // such track exists.
+ // Get the file used in the nth track of the given type, or the unset
+ // "none" sentinel if no such track exists.
let seenTracksOfType = 0;
for (const key in tracks) {
let track = tracks[key];
@@ -402,7 +410,7 @@ class HeaderForm extends Component {
getMountedFilenames = async () => {
this.setState({ error: null });
try {
- const json = await this.api.getFilenames(this.cancelSignal);
+ const json = await this.props.APIInterface.getFilenames(this.cancelSignal);
if (!json.files || json.files.length === 0) {
// We did not get back a graph, only (possibly) an error.
const error =
@@ -416,7 +424,7 @@ class HeaderForm extends Component {
const bedSelect = json.bedFiles.includes(state.bedSelect)
? state.bedSelect
: "none";
- if (bedSelect !== "none") {
+ if (isSet(bedSelect)) {
this.getBedRegions(bedSelect);
}
for (const key in state.tracks) {
@@ -450,7 +458,7 @@ class HeaderForm extends Component {
getBedRegions = async (bedFile) => {
this.setState({ error: null });
try {
- const json = await this.api.getBedRegions(bedFile, this.cancelSignal);
+ const json = await this.props.APIInterface.getBedRegions(bedFile, this.cancelSignal);
// We need to do all our parsing here, if we expect the catch to catch errors.
if (!json.bedRegions || !(json.bedRegions["desc"] instanceof Array)) {
throw new Error(
@@ -477,7 +485,7 @@ class HeaderForm extends Component {
getPathNames = async (graphFile) => {
this.setState({ error: null });
try {
- const json = await this.api.getPathNames(graphFile, this.cancelSignal);
+ const json = await this.props.APIInterface.getPathNames(graphFile, this.cancelSignal);
// We need to do all our parsing here, if we expect the catch to catch errors.
let pathNames = json.pathNames;
if (!(pathNames instanceof Array)) {
@@ -515,7 +523,7 @@ class HeaderForm extends Component {
DATA_SOURCES.forEach((ds) => {
if (ds.name === value) {
let bedSelect = "none";
- if (ds.bedFile) {
+ if (isSet(ds.bedFile)) {
this.getBedRegions(ds.bedFile);
bedSelect = ds.bedFile;
} else {
@@ -648,9 +656,9 @@ class HeaderForm extends Component {
if (tracks) {
this.setState({ tracks: this.convertArrayToObject(tracks) });
console.log("New tracks have been applied");
- } else if (this.state.bedFile && chunk) {
+ } else if (isSet(this.state.bedFile) && chunk) {
// Try to retrieve tracks from the server
- const json = await this.api.getChunkTracks(
+ const json = await this.props.APIInterface.getChunkTracks(
this.state.bedFile,
chunk,
this.cancelSignal
@@ -681,7 +689,7 @@ class HeaderForm extends Component {
// update path names
const graphFile = this.getTrackFile(newTracks, fileTypes.GRAPH, 0);
- if (graphFile && graphFile !== "none") {
+ if (isSet(graphFile)) {
this.getPathNames(graphFile);
}
};
@@ -691,7 +699,7 @@ class HeaderForm extends Component {
const value = event.target.value;
this.setState({ [id]: value });
- if (value !== "none") {
+ if (isSet(value)) {
this.getBedRegions(value);
}
this.setState({ bedFile: value });
@@ -737,7 +745,7 @@ class HeaderForm extends Component {
}
canGoLeft = (regionIndex) => {
- if (this.state.bedFile){
+ if (isSet(this.state.bedFile)){
return (regionIndex > 0);
} else {
return true;
@@ -745,7 +753,7 @@ class HeaderForm extends Component {
}
canGoRight = (regionIndex) => {
- if (this.state.bedFile){
+ if (isSet(this.state.bedFile)){
if (!this.state.regionInfo["chr"]){
return false;
}
@@ -757,7 +765,7 @@ class HeaderForm extends Component {
handleGoRight = () => {
- if (this.state.bedFile){
+ if (isSet(this.state.bedFile)){
this.jumpRegion(1);
} else {
this.budgeRegion(0.5);
@@ -765,7 +773,7 @@ class HeaderForm extends Component {
};
handleGoLeft = () => {
- if (this.state.bedFile){
+ if (isSet(this.state.bedFile)){
this.jumpRegion(-1);
} else {
this.budgeRegion(-0.5);
@@ -782,7 +790,7 @@ class HeaderForm extends Component {
// Sends uploaded file to server and returns a path to the file
handleFileUpload = async (fileType, file) => {
- if (file.size > config.MAXUPLOADSIZE) {
+ if (!(this.props.APIInterface instanceof LocalAPI) && file.size > config.MAXUPLOADSIZE) {
this.showFileSizeAlert();
return;
}
@@ -790,7 +798,7 @@ class HeaderForm extends Component {
this.setUploadInProgress(true);
try {
- let fileName = await this.api.putFile(fileType, file, this.cancelSignal);
+ let fileName = await this.props.APIInterface.putFile(fileType, file, this.cancelSignal);
if (fileType === "graph") {
// Refresh the graphs right away
this.getMountedFilenames();
@@ -806,7 +814,7 @@ class HeaderForm extends Component {
};
setUpWebsocket = () => {
- this.subscription = this.api.subscribeToFilenameChanges(
+ this.subscription = this.props.APIInterface.subscribeToFilenameChanges(
this.getMountedFilenames,
this.cancelSignal
);
diff --git a/src/components/TubeMapContainer.js b/src/components/TubeMapContainer.js
index 31f219e2..0c15ac68 100644
--- a/src/components/TubeMapContainer.js
+++ b/src/components/TubeMapContainer.js
@@ -18,7 +18,6 @@ class TubeMapContainer extends Component {
componentDidMount() {
this.fetchCanceler = new AbortController();
this.cancelSignal = this.fetchCanceler.signal;
- this.api = this.props.APIInterface;
this.getRemoteTubeMapData();
}
@@ -135,7 +134,7 @@ class TubeMapContainer extends Component {
getRemoteTubeMapData = async () => {
this.setState({ isLoading: true, error: null });
try {
- const json = await this.api.getChunkedData(
+ const json = await this.props.APIInterface.getChunkedData(
this.props.viewTarget,
this.cancelSignal
);
diff --git a/src/config.json b/src/config.json
index a7f10d20..7f5bfcce 100644
--- a/src/config.json
+++ b/src/config.json
@@ -83,7 +83,7 @@
},
"fileTypeToExtensions": {
- "graph": ".xg,.vg,.hg,.gbz,.pg",
+ "graph": ".xg,.vg,.hg,.gbz,.pg,.db",
"haplotype": ".gbwt,.gbz",
"read": ".gam"
},
diff --git a/src/setupTests.js b/src/setupTests.js
index 24149bdd..60d06362 100644
--- a/src/setupTests.js
+++ b/src/setupTests.js
@@ -9,3 +9,9 @@
import { TextEncoder, TextDecoder } from "util";
globalThis.TextEncoder = TextEncoder;
globalThis.TextDecoder = TextDecoder;
+
+// Make sure the mock version of the web worker we use for the local API
+// implementation loads under Jest, where web workers are not actually
+// available.
+jest.mock("./api/local/WorkerFactory")
+