diff --git a/README.adoc b/README.adoc index effa19f..7990e91 100644 --- a/README.adoc +++ b/README.adoc @@ -74,6 +74,13 @@ WARNING: WebUI conversion is experimental and may be slower or less stable than WARNING: The API is still evolving. Endpoints, parameters, and response formats may change. +Interactive OpenAPI docs are available at: + +* `http://localhost:5000/api/v1/` + +For the Python client-oriented endpoint contract used in `jvm-api-v1`, see: +`docs/jvm_api_v1.md`. + Example (Python) for fetching a submatrix tile as an image: ```python @@ -97,12 +104,19 @@ png_data_url = data["image"] print(png_data_url[:64]) ``` -To apply visualization/normalization settings before fetching tiles: +To apply visualization settings before fetching tiles: * POST `/set_visualization_options` with visualization parameters. -* POST `/set_normalization` with normalization settings. +* Optionally POST `/render_pipeline/set` for custom graph pipeline. * Then call `/get_tile` as shown above. +For tensor workflows (NumPy/Torch), numeric submatrices are available via: + +* `POST /matrix/query` +** units: `PIXELS`, `BINS`, `BP` +** signal modes: `RAW_COUNTS`, `COOLER_WEIGHTED`, `TRADITIONAL_NORMALIZED`, `PIPELINE_SIGNAL` +** binary formats for fast transfer: `BINARY_FLOAT32`, `BINARY_FLOAT64`, `BINARY_INT64` + === Supported platforms / JDK details * *OS/CPU (prebuilt libs):* Linux (glibc) and Windows, AMD64. diff --git a/build.gradle.kts b/build.gradle.kts index 6658928..8ebc209 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -71,7 +71,7 @@ val webUIRepositoryDirectory = if (localWebUIRepositoryDirectory.asFile.exists()) localWebUIRepositoryDirectory else remoteWebUIRepositoryDirectory val webUIRepositoryAddress = "https://github.com/ctlab/HiCT_WebUI.git" val webUITargetDirectory = layout.projectDirectory.dir("src/main/resources/webui") -val webUIBranch = "migrate-converters-update-ui-1dtracks" +val webUIBranch = "master" version = readVersion() diff --git a/docs/jvm_api_v1.md b/docs/jvm_api_v1.md new file mode 100644 index 0000000..4643229 --- /dev/null +++ b/docs/jvm_api_v1.md @@ -0,0 +1,114 @@ +# HiCT_JVM API Reference for Python client (`hict_jvm_api`) + +This document describes the endpoint subset used by the `jvm-api-v1` Python library. +All endpoints are served by HiCT_JVM API server (`start-api-server` or `start-server`). + +Interactive documentation is served by the backend itself: + +- `GET /api/v1/` - Swagger UI +- `GET /api/v1/openapi.yaml` - OpenAPI v1 source + +## Base URL + +Typically: + +- `http://localhost:5000` +- `http://localhost:5001` + +## Core endpoints + +### Session and files + +| Method | Path | Purpose | +|---|---|---| +| `GET` | `/version` | Server/version metadata | +| `POST` | `/list_files` | List files under `DATA_DIR` | +| `POST` | `/list_files_detailed` | List files with metadata | +| `POST` | `/list_coolers` | List `.cool/.mcool` files | +| `POST` | `/list_fasta_files` | List FASTA files | +| `POST` | `/list_agp_files` | List AGP files | +| `POST` | `/open` | Open primary HiCT source | +| `POST` | `/open_progress` | Read open progress | +| `POST` | `/attach` | Attach existing in-memory session | +| `POST` | `/close` | Close active session | + +### Secondary source + +| Method | Path | Purpose | +|---|---|---| +| `POST` | `/secondary/status` | Read secondary-source status | +| `POST` | `/secondary/open` | Attach secondary source (`allowMismatch` supported) | +| `POST` | `/secondary/close` | Detach secondary source | +| `POST` | `/secondary/set_assembly_source` | Set `PRIMARY` or `SECONDARY` assembly source | + +### Tiles and rendering + +| Method | Path | Purpose | +|---|---|---| +| `GET` | `/get_tile` | Fetch rendered tile/region | +| `POST` | `/matrix/query` | Fetch numeric submatrix (raw/weighted/normalized/pipeline signal) | +| `POST` | `/tiles/reload` | Invalidate tile caches and bump versions | +| `POST` | `/get_visualization_options` | Get current visualization options | +| `POST` | `/set_visualization_options` | Set visualization options | +| `POST` | `/render_pipeline/get` | Get custom rendering pipeline graph | +| `POST` | `/render_pipeline/set` | Set custom rendering pipeline graph | +| `POST` | `/render_pipeline/reset` | Reset pipeline to defaults | + +`/get_tile` supported formats: + +- `JSON_PNG_WITH_RANGES` (JSON with base64 PNG + ranges) +- `PNG` (raw PNG bytes) +- `PNG_BY_PIXELS` (raw PNG bytes for arbitrary pixel-space region) + +`/matrix/query` supports: + +- Units: `PIXELS`, `BINS`, `BP` +- Signal modes: `RAW_COUNTS`, `COOLER_WEIGHTED`, `TRADITIONAL_NORMALIZED`, `PIPELINE_SIGNAL` +- Response formats: `BINARY_FLOAT32`, `BINARY_FLOAT64`, `BINARY_INT64`, `JSON` + +### Scaffolding operations + +| Method | Path | Purpose | +|---|---|---| +| `POST` | `/reverse_selection_range` | Reverse assembly interval | +| `POST` | `/move_selection_range` | Move interval | +| `POST` | `/split_contig_at_bin` | Split contig at pixel/bin coordinate | +| `POST` | `/group_contigs_into_scaffold` | Group interval into scaffold | +| `POST` | `/ungroup_contigs_from_scaffold` | Ungroup scaffold interval | +| `POST` | `/move_selection_to_debris` | Move interval to debris | + +### FASTA / AGP + +| Method | Path | Purpose | +|---|---|---| +| `POST` | `/link_fasta` | Link FASTA to current source | +| `POST` | `/get_fasta_for_assembly` | Export FASTA for full assembly | +| `POST` | `/get_fasta_for_selection` | Export FASTA for selected rectangle | +| `POST` | `/get_agp_for_assembly` | Export AGP | +| `POST` | `/load_agp` | Import AGP and update assembly state | + +### Conversion jobs + +| Method | Path | Purpose | +|---|---|---| +| `POST` | `/convert/jobs` | Submit single conversion job | +| `POST` | `/convert/jobs/batch` | Submit batch conversion jobs | +| `POST` | `/convert/jobs/list` | List all conversion jobs | +| `GET` | `/convert/jobs/:jobId` | Get single job status | +| `POST` | `/convert/jobs/:jobId/stop` | Cancel running job | +| `GET` | `/convert/download/:jobId` | Download completed conversion output | + +`/convert/jobs` and `/convert/jobs/batch` support `overwrite` boolean. + +### Diagnostics + +| Method | Path | Purpose | +|---|---|---| +| `POST` | `/diagnostics/workers` | Worker-pool and queue diagnostics | + +## Notes for Python client authors + +- Session state is server-side and mutable; reopen/attach as needed. +- Scaffolding operations update assembly state and bump tile generations. +- `PIXELS` are visible pixels (hidden contigs excluded) at selected resolution. +- For throughput-sensitive region fetches use `GET /get_tile?format=PNG_BY_PIXELS` with persistent HTTP client reuse. diff --git a/ldbg.sh b/ldbg.sh index e59fd1c..5c97674 100755 --- a/ldbg.sh +++ b/ldbg.sh @@ -1,4 +1,4 @@ #!/bin/bash export VERTXWEB_ENVIRONMENT="dev" SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -VXPORT=5000 DATA_DIR="/mnt/Models/HiCT/data/" TILE_SIZE=256 java -jar ${SCRIPT_DIR}/build/libs/hict_server-*-fat.jar $@ +VXPORT=5000 DATA_DIR="/mnt/Models/HiCT/data/" TILE_SIZE=256 HICT_WORKERS_TOTAL_MAX=256 java -jar ${SCRIPT_DIR}/build/libs/hict_server-*-fat.jar $@ diff --git a/scripts/run_optional_data_tests.sh b/scripts/run_optional_data_tests.sh new file mode 100755 index 0000000..66bc27a --- /dev/null +++ b/scripts/run_optional_data_tests.sh @@ -0,0 +1,201 @@ +#!/usr/bin/env bash +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +JVM_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)" +DATA_DIR="${DATA_DIR:-/mnt/Models/HiCT/data}" +PORT="${PORT:-5011}" +PROCESSED_DIR="${PROCESSED_DIR:-/tmp/hict_processed_cache_ci}" +export GRADLE_USER_HOME="${GRADLE_USER_HOME:-/tmp/hict_gradle_home}" +mkdir -p "${GRADLE_USER_HOME}" + +HICT_REL="build/quad/combined_ind2_4DN.hict.hdf5" +FASTA_REL="build/quad/quad_combined_ind2.fasta" +BW_REL="build/quad/ind2.coverage.bw" +BED_REL="build/quad/ind2.alignments.bed.gz" +GFF_REL="" +for candidate in \ + "build/quad/annotation_miniprot/ind2.miniprot.gff3" \ + "build/quad/ind2.miniprot.gff3" \ + "build/quad/ind2.features.gff3" +do + if [[ -f "${DATA_DIR}/${candidate}" ]]; then + GFF_REL="${candidate}" + break + fi +done + +HICT_ABS="${DATA_DIR}/${HICT_REL}" +FASTA_ABS="${DATA_DIR}/${FASTA_REL}" +BW_ABS="${DATA_DIR}/${BW_REL}" +BED_ABS="${DATA_DIR}/${BED_REL}" +GFF_ABS="" +if [[ -n "${GFF_REL}" ]]; then + GFF_ABS="${DATA_DIR}/${GFF_REL}" +fi + +cd "${JVM_DIR}" + +echo "[optional] Building fat jar" +./gradlew shadowJar >/dev/null + +mkdir -p "${PROCESSED_DIR}" +JAR_PATH="$(ls -1 build/libs/hict_server-*-fat.jar | head -n 1)" +if [[ -z "${JAR_PATH}" ]]; then + echo "[optional] ERROR: fat jar was not produced" + exit 1 +fi + +if [[ ! -f "${HICT_ABS}" ]]; then + echo "[optional] Skip: ${HICT_ABS} does not exist" + exit 0 +fi + +echo "[optional] Starting API server on :${PORT}" +VXPORT="${PORT}" SERVE_WEBUI=false DATA_DIR="${DATA_DIR}" PROCESSED_DIR="${PROCESSED_DIR}" \ + java -jar "${JAR_PATH}" start-api-server >/tmp/hict_optional_tests_server.log 2>&1 & +SERVER_PID=$! +cleanup() { + if ps -p "${SERVER_PID}" >/dev/null 2>&1; then + kill "${SERVER_PID}" >/dev/null 2>&1 || true + wait "${SERVER_PID}" >/dev/null 2>&1 || true + fi +} +trap cleanup EXIT + +for _ in $(seq 1 40); do + if curl -fsS "http://localhost:${PORT}/version" >/dev/null 2>&1; then + break + fi + sleep 1 +done +curl -fsS "http://localhost:${PORT}/version" >/dev/null + +echo "[optional] Opening HiCT" +curl -fsS -X POST "http://localhost:${PORT}/open" \ + -H "content-type: application/json" \ + -d "{\"filename\":\"${HICT_REL}\"}" >/dev/null + +echo "[optional] Opening Cooler weights track" +curl -fsS -X POST "http://localhost:${PORT}/tracks/open_cooler_weights" \ + -H "content-type: application/json" \ + -d '{"name":"Cooler weights"}' >/dev/null + +COOLER_TRACK_ID="$( + curl -fsS -X POST "http://localhost:${PORT}/tracks/list" \ + -H "content-type: application/json" \ + -d '{}' | jq -r '.[] | select(.type=="COOLER_WEIGHTS") | .trackId' | head -n1 +)" +if [[ -z "${COOLER_TRACK_ID}" ]]; then + echo "[optional] ERROR: Cooler weights track was not created" + exit 1 +fi +curl -fsS -X POST "http://localhost:${PORT}/tracks/update" \ + -H "content-type: application/json" \ + -d "{\"trackId\":\"${COOLER_TRACK_ID}\",\"logScale\":true}" >/dev/null + +if [[ -f "${FASTA_ABS}" ]]; then + echo "[optional] Linking FASTA" + curl -fsS -X POST "http://localhost:${PORT}/link_fasta" \ + -H "content-type: application/json" \ + -d "{\"fastaFilename\":\"${FASTA_REL}\",\"allowMismatch\":true}" >/dev/null +else + echo "[optional] FASTA not found, skipping FASTA link checks" +fi + +if [[ -f "${BW_ABS}" ]]; then + echo "[optional] Opening BigWig track" + curl -fsS -X POST "http://localhost:${PORT}/tracks/open" \ + -H "content-type: application/json" \ + -d "{\"filename\":\"${BW_REL}\"}" >/dev/null +fi + +if [[ -f "${BED_ABS}" ]]; then + echo "[optional] Opening BED track" + curl -fsS -X POST "http://localhost:${PORT}/tracks/open" \ + -H "content-type: application/json" \ + -d "{\"filename\":\"${BED_REL}\"}" >/dev/null +fi + +if [[ -n "${GFF_ABS}" && -f "${GFF_ABS}" ]]; then + echo "[optional] Opening GFF track" + curl -fsS -X POST "http://localhost:${PORT}/tracks/open" \ + -H "content-type: application/json" \ + -d "{\"filename\":\"${GFF_REL}\"}" >/dev/null +fi + +echo "[optional] Querying tracks in PIXELS/BINS/BP units" +curl -fsS -X POST "http://localhost:${PORT}/tracks/query_1d" \ + -H "content-type: application/json" \ + -d '{"unit":"PIXELS","startPx":0,"endPx":6000,"widthPx":1200,"bpResolution":50000}' \ + | jq -e '.tracks | length >= 0' >/dev/null +curl -fsS -X POST "http://localhost:${PORT}/tracks/query_1d" \ + -H "content-type: application/json" \ + -d '{"unit":"BINS","startBin":0,"endBin":6000,"widthPx":1200,"bpResolution":50000}' \ + | jq -e '.tracks | length >= 0' >/dev/null +curl -fsS -X POST "http://localhost:${PORT}/tracks/query_1d" \ + -H "content-type: application/json" \ + -d '{"unit":"BP","startBP":0,"endBP":300000000,"widthPx":1200,"bpResolution":50000}' \ + | jq -e '.tracks | length >= 0' >/dev/null + +echo "[optional] Querying numeric matrix in JSON and binary modes" +MATRIX_JSON="$(curl -fsS -X POST "http://localhost:${PORT}/matrix/query" \ + -H "content-type: application/json" \ + -d '{"bpResolution":50000,"unit":"PIXELS","startRowPx":0,"endRowPx":32,"startColPx":0,"endColPx":32,"signalMode":"TRADITIONAL_NORMALIZED","format":"JSON"}')" +echo "${MATRIX_JSON}" | jq -e '.rows == 32 and .cols == 32 and (.values|length)==1024' >/dev/null + +MATRIX_BIN_HEADERS="$(mktemp)" +MATRIX_BIN_BODY="$(mktemp)" +curl -fsS -D "${MATRIX_BIN_HEADERS}" -o "${MATRIX_BIN_BODY}" -X POST "http://localhost:${PORT}/matrix/query" \ + -H "content-type: application/json" \ + -d '{"bpResolution":50000,"unit":"PIXELS","startRowPx":0,"endRowPx":16,"startColPx":0,"endColPx":16,"signalMode":"COOLER_WEIGHTED","format":"BINARY_FLOAT32"}' >/dev/null +grep -qi '^x-hict-rows: 16' "${MATRIX_BIN_HEADERS}" +grep -qi '^x-hict-cols: 16' "${MATRIX_BIN_HEADERS}" +grep -qi '^x-hict-dtype: float32' "${MATRIX_BIN_HEADERS}" +[[ "$(wc -c < "${MATRIX_BIN_BODY}")" -eq $((16 * 16 * 4)) ]] +rm -f "${MATRIX_BIN_HEADERS}" "${MATRIX_BIN_BODY}" + +echo "[optional] Cooler weights query sanity check" +curl -fsS -X POST "http://localhost:${PORT}/tracks/query_1d" \ + -H "content-type: application/json" \ + -d '{"unit":"PIXELS","startPx":0,"endPx":7000,"widthPx":1536,"bpResolution":50000}' \ + | jq -e '.tracks[] | select(.type=="COOLER_WEIGHTS") | (.bins | length) > 0' >/dev/null + +if [[ -f "${BW_ABS}" && -f "${BED_ABS}" ]]; then + echo "[optional] BED vs BigWig sanity check" + QUERY_JSON="$(curl -fsS -X POST "http://localhost:${PORT}/tracks/query_1d" \ + -H "content-type: application/json" \ + -d '{"unit":"PIXELS","startPx":0,"endPx":7000,"widthPx":1536,"bpResolution":50000}')" + BW_BINS="$(echo "${QUERY_JSON}" | jq -r '.tracks[] | select(.sourceFile? == null or true) | select(.type=="BIGWIG") | .bins | length' | head -n1 || true)" + BED_BINS="$(echo "${QUERY_JSON}" | jq -r '.tracks[] | select(.type=="BED") | .bins | length' | head -n1 || true)" + if [[ -n "${BW_BINS}" && -n "${BED_BINS}" ]]; then + if [[ "${BW_BINS}" -eq 0 || "${BED_BINS}" -eq 0 ]]; then + echo "[optional] ERROR: BED/BigWig bins are empty" + exit 1 + fi + fi +fi + +if [[ -n "${GFF_ABS}" && -f "${GFF_ABS}" ]]; then + echo "[optional] GFF/GTF structured bins sanity check" + GFF_QUERY_JSON="$(curl -fsS -X POST "http://localhost:${PORT}/tracks/query_1d" \ + -H "content-type: application/json" \ + -d '{"unit":"PIXELS","startPx":0,"endPx":7000,"widthPx":1536,"bpResolution":50000}')" + echo "${GFF_QUERY_JSON}" | jq -e ' + .tracks[] + | select(.type=="GFF_GTF") + | (.bins | length) > 0 + ' >/dev/null + echo "${GFF_QUERY_JSON}" | jq -e ' + .tracks[] + | select(.type=="GFF_GTF") + | (.bins | any((.blocks // []) | length > 0)) + ' >/dev/null + echo "${GFF_QUERY_JSON}" | jq -e ' + .tracks[] + | select(.type=="GFF_GTF") + | (.bins | any(.strand == "+" or .strand == "-")) + ' >/dev/null +fi + +echo "[optional] OK" diff --git a/scripts/run_smoke_tests.sh b/scripts/run_smoke_tests.sh new file mode 100755 index 0000000..de73c8a --- /dev/null +++ b/scripts/run_smoke_tests.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +JVM_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)" +WEBUI_DIR="$(cd "${JVM_DIR}/../HiCT_WebUI" && pwd)" +export GRADLE_USER_HOME="${GRADLE_USER_HOME:-/tmp/hict_gradle_home}" +mkdir -p "${GRADLE_USER_HOME}" + +echo "[smoke] Building JVM module" +cd "${JVM_DIR}" +./gradlew compileJava +echo "[smoke] Running JVM tests" +./gradlew test + +if [[ -d "${WEBUI_DIR}" ]]; then + echo "[smoke] Type-checking and building WebUI" + cd "${WEBUI_DIR}" + npm run type-check + npm run build-only +fi + +echo "[smoke] OK" diff --git a/src/main/java/ru/itmo/ctlab/hict/hict_server/MainVerticle.java b/src/main/java/ru/itmo/ctlab/hict/hict_server/MainVerticle.java index 5b97479..b08cc4a 100644 --- a/src/main/java/ru/itmo/ctlab/hict/hict_server/MainVerticle.java +++ b/src/main/java/ru/itmo/ctlab/hict/hict_server/MainVerticle.java @@ -55,7 +55,9 @@ import ru.itmo.ctlab.hict.hict_server.handlers.names.NameMappingHandlersHolder; import ru.itmo.ctlab.hict.hict_server.handlers.operations.ScaffoldingOpHandlersHolder; import ru.itmo.ctlab.hict.hict_server.handlers.conversion.ConversionHandlersHolder; +import ru.itmo.ctlab.hict.hict_server.handlers.info.ApiDocsHandlersHolder; import ru.itmo.ctlab.hict.hict_server.handlers.info.InfoHandlersHolder; +import ru.itmo.ctlab.hict.hict_server.handlers.tiles.RenderPipelineConfig; import ru.itmo.ctlab.hict.hict_server.handlers.tiles.TileHandlersHolder; import ru.itmo.ctlab.hict.hict_server.handlers.tracks.TrackHandlersHolder; import ru.itmo.ctlab.hict.hict_server.util.shareable.ShareableWrappers; @@ -142,28 +144,28 @@ public void start(final Promise startPromise) throws Exception { perPrioritySizing.put( RequestTaskScheduler.RequestPriority.UI_UX, new RequestTaskScheduler.PoolSizing( - getIntegerSetting(event.result(), "HICT_WORKERS_UI_MIN", 2), + getIntegerSetting(event.result(), "HICT_WORKERS_UI_MIN", 4), getIntegerSetting(event.result(), "HICT_WORKERS_UI_MAX", defaultPoolMax) ) ); perPrioritySizing.put( RequestTaskScheduler.RequestPriority.ASSEMBLY, new RequestTaskScheduler.PoolSizing( - getIntegerSetting(event.result(), "HICT_WORKERS_ASSEMBLY_MIN", 2), + getIntegerSetting(event.result(), "HICT_WORKERS_ASSEMBLY_MIN", 4), getIntegerSetting(event.result(), "HICT_WORKERS_ASSEMBLY_MAX", defaultPoolMax) ) ); perPrioritySizing.put( RequestTaskScheduler.RequestPriority.TILE, new RequestTaskScheduler.PoolSizing( - getIntegerSetting(event.result(), "HICT_WORKERS_TILE_MIN", 2), + getIntegerSetting(event.result(), "HICT_WORKERS_TILE_MIN", 8), getIntegerSetting(event.result(), "HICT_WORKERS_TILE_MAX", defaultPoolMax) ) ); perPrioritySizing.put( RequestTaskScheduler.RequestPriority.TRACK, new RequestTaskScheduler.PoolSizing( - getIntegerSetting(event.result(), "HICT_WORKERS_TRACK_MIN", 2), + getIntegerSetting(event.result(), "HICT_WORKERS_TRACK_MIN", 4), getIntegerSetting(event.result(), "HICT_WORKERS_TRACK_MAX", defaultPoolMax) ) ); @@ -208,6 +210,10 @@ public void start(final Promise startPromise) throws Exception { map.put("visualizationOptions", new ShareableWrappers.SimpleVisualizationOptionsWrapper(defaultVisualizationOptions)); + map.put( + RenderPipelineConfig.LOCAL_MAP_KEY, + new ShareableWrappers.RenderPipelineConfigWrapper(RenderPipelineConfig.disabled()) + ); log.info("Added to local map"); } finally { @@ -270,6 +276,7 @@ public void start(final Promise startPromise) throws Exception { handlersHolders.add(new NameMappingHandlersHolder(vertx)); handlersHolders.add(new ConversionHandlersHolder(vertx)); handlersHolders.add(new InfoHandlersHolder(vertx)); + handlersHolders.add(new ApiDocsHandlersHolder()); handlersHolders.add(new TrackHandlersHolder(vertx)); router.route().failureHandler(ctx -> { diff --git a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/conversion/ConversionHandlersHolder.java b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/conversion/ConversionHandlersHolder.java index 869760b..cbd7eb9 100644 --- a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/conversion/ConversionHandlersHolder.java +++ b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/conversion/ConversionHandlersHolder.java @@ -135,6 +135,7 @@ public void addHandlersToRouter(final @NotNull Router router) { } final var direction = requestJson.getString("direction", "mcool-to-hict"); final var parallelism = requestJson.getInteger("parallelism", Runtime.getRuntime().availableProcessors()); + final var overwrite = requestJson.getBoolean("overwrite", false); final var resolutions = parseResolutions(requestJson.getString("resolutions")); final var compression = requestJson.getInteger("compression", 6); final var compressionAlgorithm = ConversionOptions.CompressionAlgorithm.parse(requestJson.getString("compressionAlgorithm", "deflate")); @@ -154,9 +155,7 @@ public void addHandlersToRouter(final @NotNull Router router) { } final var outputPath = deriveOutputPath(sourcePath); - if (Files.exists(outputPath)) { - throw new IllegalArgumentException("Output file already exists: " + outputPath.getFileName()); - } + prepareOutputPath(outputPath, overwrite); final var options = new ConversionOptions( sourcePath, @@ -201,6 +200,7 @@ public void addHandlersToRouter(final @NotNull Router router) { } final var parallelJobs = Math.max(1, requestJson.getInteger("parallelJobs", 1)); final var parallelism = requestJson.getInteger("parallelism", Runtime.getRuntime().availableProcessors()); + final var overwrite = requestJson.getBoolean("overwrite", false); final var resolutions = parseResolutions(requestJson.getString("resolutions")); final var compression = requestJson.getInteger("compression", 6); final var compressionAlgorithm = ConversionOptions.CompressionAlgorithm.parse(requestJson.getString("compressionAlgorithm", "deflate")); @@ -226,9 +226,7 @@ public void addHandlersToRouter(final @NotNull Router router) { throw new IllegalArgumentException("Source file not found: " + filename); } final var outputPath = deriveOutputPath(sourcePath); - if (Files.exists(outputPath)) { - throw new IllegalArgumentException("Output file already exists: " + outputPath.getFileName()); - } + prepareOutputPath(outputPath, overwrite); final var options = new ConversionOptions( sourcePath, outputPath, @@ -416,6 +414,20 @@ private static Path deriveOutputPath(final @NotNull Path sourcePath) { return sourcePath.getParent().resolve(base + ".hict.hdf5"); } + private static void prepareOutputPath(final @NotNull Path outputPath, final boolean overwrite) { + if (!Files.exists(outputPath)) { + return; + } + if (!overwrite) { + throw new IllegalArgumentException("Output file already exists: " + outputPath.getFileName()); + } + try { + Files.delete(outputPath); + } catch (IOException e) { + throw new RuntimeException("Failed to overwrite existing output file: " + outputPath.getFileName(), e); + } + } + private ConversionJob createJob(final @NotNull Path sourcePath, final @NotNull Path outputPath, final @NotNull String direction, final int parallelism, final boolean deleteSourceOnCleanup, final boolean deleteOutputOnCleanup) throws IOException { final var jobId = UUID.randomUUID().toString(); final var job = new ConversionJob(jobId, sourcePath, outputPath, direction, parallelism, deleteSourceOnCleanup, deleteOutputOnCleanup); diff --git a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/fileop/FileOpHandlersHolder.java b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/fileop/FileOpHandlersHolder.java index 909ab15..3591558 100644 --- a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/fileop/FileOpHandlersHolder.java +++ b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/fileop/FileOpHandlersHolder.java @@ -57,6 +57,13 @@ @Slf4j public class FileOpHandlersHolder extends HandlersHolder { private final Vertx vertx; + private static final String PRIMARY_CHUNKED_FILE_KEY = "chunkedFile"; + private static final String SECONDARY_CHUNKED_FILE_KEY = "chunkedFileSecondary"; + private static final String OPENED_SECONDARY_FILENAME_KEY = "openedSecondaryFilename"; + private static final String SECONDARY_COMPATIBILITY_KEY = "secondarySourceCompatibility"; + private static final String ASSEMBLY_SOURCE_KEY = "assemblyInfoSource"; + private static final String ASSEMBLY_SOURCE_PRIMARY = "PRIMARY"; + private static final String ASSEMBLY_SOURCE_SECONDARY = "SECONDARY"; private record JsonRouteResult(int statusCode, @NotNull io.vertx.core.json.JsonObject payload) {} @Override @@ -93,6 +100,14 @@ public void addHandlersToRouter(final @NotNull Router router) { } final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); + closeChunkedFileWrapper((ShareableWrappers.ChunkedFileWrapper) map.get(PRIMARY_CHUNKED_FILE_KEY)); + map.remove(PRIMARY_CHUNKED_FILE_KEY); + closeChunkedFileWrapper((ShareableWrappers.ChunkedFileWrapper) map.get(SECONDARY_CHUNKED_FILE_KEY)); + map.remove(SECONDARY_CHUNKED_FILE_KEY); + map.remove(OPENED_SECONDARY_FILENAME_KEY); + map.remove(SECONDARY_COMPATIBILITY_KEY); + map.put(ASSEMBLY_SOURCE_KEY, ASSEMBLY_SOURCE_PRIMARY); + final var oldTrackManagerWrapper = (ShareableWrappers.Track1DManagerWrapper) map.get("Track1DManager"); if (oldTrackManagerWrapper != null) { oldTrackManagerWrapper.getTrack1DManager().setLinkedFastaAliasesBySource(java.util.Map.of()); @@ -121,7 +136,7 @@ public void addHandlersToRouter(final @NotNull Router router) { )); final var chunkedFileWrapper = new ShareableWrappers.ChunkedFileWrapper(chunkedFile); log.info("Putting chunkedFile into the local map"); - map.put("chunkedFile", chunkedFileWrapper); + map.put(PRIMARY_CHUNKED_FILE_KEY, chunkedFileWrapper); map.put("openedFilename", filename); map.put("TileStatisticHolder", TileStatisticHolder.newDefaultStatisticHolder(chunkedFile.getResolutions().length)); @@ -163,6 +178,179 @@ public void addHandlersToRouter(final @NotNull Router router) { .end(((io.vertx.core.json.JsonObject) progressObj).encode()); }); + router.post("/secondary/status").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.UI_UX, + null, + () -> secondaryStatusJson(vertx.sharedData().getLocalMap("hict_server")), + response -> ctx.response() + .putHeader("content-type", "application/json") + .end(response.encode()) + ); + }); + + router.post("/secondary/open").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.ASSEMBLY, + null, + () -> { + final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); + final var primaryWrapper = (ShareableWrappers.ChunkedFileWrapper) map.get(PRIMARY_CHUNKED_FILE_KEY); + if (primaryWrapper == null) { + throw new IllegalStateException("Open primary Hi-C source before attaching secondary source"); + } + final var dataDirectoryWrapper = (ShareableWrappers.PathWrapper) map.get("dataDirectory"); + if (dataDirectoryWrapper == null) { + throw new RuntimeException("Data directory is not present in local map"); + } + final var requestJson = ctx.body().asJsonObject(); + final var filename = requestJson.getString("filename"); + final var allowMismatch = requestJson.getBoolean("allowMismatch", false); + if (filename == null || filename.isBlank()) { + throw new IllegalArgumentException("Secondary source filename is required"); + } + final var dataDirectory = dataDirectoryWrapper.getPath(); + final var filePath = dataDirectory.resolve(filename).normalize().toAbsolutePath(); + if (!filePath.startsWith(dataDirectory)) { + throw new IllegalArgumentException("Secondary source path " + filename + " is outside DATA_DIR"); + } + if (!Files.exists(filePath) || !Files.isRegularFile(filePath)) { + throw new IllegalArgumentException("Secondary source file " + filename + " does not exist"); + } + final var secondaryChunkedFile = new ChunkedFile( + new ChunkedFile.ChunkedFileOptions( + filePath, + (int) map.getOrDefault("MIN_DS_POOL", 4), + (int) map.getOrDefault("MAX_DS_POOL", 16) + ) + ); + final SecondaryCompatibility compatibility; + try { + compatibility = analyzeSecondaryCompatibility(primaryWrapper.getChunkedFile(), secondaryChunkedFile); + } catch (final RuntimeException ex) { + try { + secondaryChunkedFile.close(); + } catch (final Exception ignored) { + // no-op + } + throw ex; + } + if (!compatibility.exactMatch() && !allowMismatch) { + try { + secondaryChunkedFile.close(); + } catch (final Exception ignored) { + // no-op + } + final var currentStatus = secondaryStatusJson(map); + return currentStatus + .put("requiresConfirmation", true) + .put("requestedFilename", filename) + .put("compatibility", compatibility.toJson()) + .put("warnings", compatibility.warningsAsJsonArray()); + } + closeChunkedFileWrapper((ShareableWrappers.ChunkedFileWrapper) map.get(SECONDARY_CHUNKED_FILE_KEY)); + map.put(SECONDARY_CHUNKED_FILE_KEY, new ShareableWrappers.ChunkedFileWrapper(secondaryChunkedFile)); + map.put(OPENED_SECONDARY_FILENAME_KEY, filename); + map.put(SECONDARY_COMPATIBILITY_KEY, compatibility.toJson()); + map.putIfAbsent(ASSEMBLY_SOURCE_KEY, ASSEMBLY_SOURCE_PRIMARY); + final var schedulerWrapper = (ShareableWrappers.RequestTaskSchedulerWrapper) map.get(RequestTaskScheduler.LOCAL_MAP_KEY); + if (schedulerWrapper != null) { + schedulerWrapper.getRequestTaskScheduler().bumpGeneration(RequestTaskScheduler.CancellationDomain.TILE); + } + return secondaryStatusJson(map) + .put("requiresConfirmation", false) + .put("compatibility", compatibility.toJson()) + .put("warnings", compatibility.warningsAsJsonArray()); + }, + response -> ctx.response() + .putHeader("content-type", "application/json") + .end(response.encode()) + ); + }); + + router.post("/secondary/close").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.ASSEMBLY, + null, + () -> { + final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); + closeChunkedFileWrapper((ShareableWrappers.ChunkedFileWrapper) map.get(SECONDARY_CHUNKED_FILE_KEY)); + map.remove(SECONDARY_CHUNKED_FILE_KEY); + map.remove(OPENED_SECONDARY_FILENAME_KEY); + map.remove(SECONDARY_COMPATIBILITY_KEY); + if (ASSEMBLY_SOURCE_SECONDARY.equalsIgnoreCase(String.valueOf(map.getOrDefault(ASSEMBLY_SOURCE_KEY, ASSEMBLY_SOURCE_PRIMARY)))) { + map.put(ASSEMBLY_SOURCE_KEY, ASSEMBLY_SOURCE_PRIMARY); + } + final var schedulerWrapper = (ShareableWrappers.RequestTaskSchedulerWrapper) map.get(RequestTaskScheduler.LOCAL_MAP_KEY); + if (schedulerWrapper != null) { + schedulerWrapper.getRequestTaskScheduler().bumpGeneration(RequestTaskScheduler.CancellationDomain.TILE); + } + return secondaryStatusJson(map); + }, + response -> ctx.response() + .putHeader("content-type", "application/json") + .end(response.encode()) + ); + }); + + router.post("/secondary/set_assembly_source").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.ASSEMBLY, + null, + () -> { + final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); + final var requestJson = ctx.body().asJsonObject(); + final var requestedSource = String.valueOf(requestJson.getString("assemblySource", ASSEMBLY_SOURCE_PRIMARY)) + .trim() + .toUpperCase(); + final var primaryWrapper = (ShareableWrappers.ChunkedFileWrapper) map.get(PRIMARY_CHUNKED_FILE_KEY); + if (primaryWrapper == null) { + throw new IllegalStateException("Open primary Hi-C source first"); + } + final var secondaryWrapper = (ShareableWrappers.ChunkedFileWrapper) map.get(SECONDARY_CHUNKED_FILE_KEY); + final ChunkedFile sourceChunkedFile; + final String normalizedSource; + if (ASSEMBLY_SOURCE_SECONDARY.equals(requestedSource)) { + if (secondaryWrapper == null) { + throw new IllegalStateException("Secondary source is not attached"); + } + normalizedSource = ASSEMBLY_SOURCE_SECONDARY; + sourceChunkedFile = secondaryWrapper.getChunkedFile(); + } else { + normalizedSource = ASSEMBLY_SOURCE_PRIMARY; + sourceChunkedFile = primaryWrapper.getChunkedFile(); + } + map.put(ASSEMBLY_SOURCE_KEY, normalizedSource); + return new io.vertx.core.json.JsonObject() + .put("assemblySource", normalizedSource) + .put("assemblyInfo", io.vertx.core.json.JsonObject.mapFrom(AssemblyInfoDTO.generateFromChunkedFile(sourceChunkedFile))); + }, + response -> ctx.response() + .putHeader("content-type", "application/json") + .end(response.encode()) + ); + }); + router.post("/attach").handler(ctx -> { final var scheduler = getScheduler(ctx); if (scheduler == null) { @@ -174,7 +362,7 @@ public void addHandlersToRouter(final @NotNull Router router) { null, () -> { final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); - final var chunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get("chunkedFile"))); + final var chunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get(PRIMARY_CHUNKED_FILE_KEY))); if (chunkedFileWrapper == null) { return new JsonRouteResult( 404, @@ -183,11 +371,13 @@ public void addHandlersToRouter(final @NotNull Router router) { } final var chunkedFile = chunkedFileWrapper.getChunkedFile(); final var filename = (String) map.getOrDefault("openedFilename", ""); + final var secondaryStatus = secondaryStatusJson(map); return new JsonRouteResult( 200, new io.vertx.core.json.JsonObject() .put("filename", filename) .put("fastaFilename", map.getOrDefault("linkedFastaFilename", "")) + .put("secondarySource", secondaryStatus) .put("openFileResponse", generateOpenFileResponse(chunkedFile)) ); }, @@ -209,7 +399,7 @@ public void addHandlersToRouter(final @NotNull Router router) { null, () -> { final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); - final var chunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get("chunkedFile"))); + final var chunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get(PRIMARY_CHUNKED_FILE_KEY))); if (chunkedFileWrapper != null) { try { chunkedFileWrapper.getChunkedFile().close(); @@ -217,7 +407,12 @@ public void addHandlersToRouter(final @NotNull Router router) { log.warn("Failed to close chunked file", e); } } - map.remove("chunkedFile"); + map.remove(PRIMARY_CHUNKED_FILE_KEY); + closeChunkedFileWrapper((ShareableWrappers.ChunkedFileWrapper) map.get(SECONDARY_CHUNKED_FILE_KEY)); + map.remove(SECONDARY_CHUNKED_FILE_KEY); + map.remove(OPENED_SECONDARY_FILENAME_KEY); + map.remove(SECONDARY_COMPATIBILITY_KEY); + map.put(ASSEMBLY_SOURCE_KEY, ASSEMBLY_SOURCE_PRIMARY); map.remove("TileStatisticHolder"); map.remove("openedFilename"); map.remove("linkedFastaPath"); @@ -450,6 +645,89 @@ public void addHandlersToRouter(final @NotNull Router router) { }); } + private static void closeChunkedFileWrapper(final ShareableWrappers.ChunkedFileWrapper wrapper) { + if (wrapper == null) { + return; + } + try { + wrapper.getChunkedFile().close(); + } catch (final Exception ignored) { + // no-op + } + } + + private static @NotNull SecondaryCompatibility analyzeSecondaryCompatibility(final @NotNull ChunkedFile primary, + final @NotNull ChunkedFile secondary) { + final var primaryResolutions = primary.getResolutions().clone(); + final var secondaryResolutions = secondary.getResolutions().clone(); + final var primaryMatrixSizeBins = primary.getMatrixSizeBins().clone(); + final var secondaryMatrixSizeBins = secondary.getMatrixSizeBins().clone(); + return new SecondaryCompatibility( + Arrays.equals(primaryResolutions, secondaryResolutions), + Arrays.equals(primaryMatrixSizeBins, secondaryMatrixSizeBins), + primaryMatrixSizeBins, + secondaryMatrixSizeBins + ); + } + + private io.vertx.core.json.JsonObject secondaryStatusJson(final @NotNull LocalMap map) { + final var attached = map.get(SECONDARY_CHUNKED_FILE_KEY) instanceof ShareableWrappers.ChunkedFileWrapper; + final var assemblySource = String.valueOf(map.getOrDefault(ASSEMBLY_SOURCE_KEY, ASSEMBLY_SOURCE_PRIMARY)); + final var filename = String.valueOf(map.getOrDefault(OPENED_SECONDARY_FILENAME_KEY, "")); + final var status = new io.vertx.core.json.JsonObject() + .put("attached", attached) + .put("filename", attached ? filename : "") + .put("assemblySource", assemblySource); + final var compatibility = map.get(SECONDARY_COMPATIBILITY_KEY); + if (attached && compatibility instanceof io.vertx.core.json.JsonObject compatibilityJson) { + status.put("compatibility", compatibilityJson.copy()); + } + return status; + } + + private record SecondaryCompatibility(boolean sameResolutions, + boolean sameMatrixSizes, + long[] primaryMatrixSizeBins, + long[] secondaryMatrixSizeBins) { + private boolean exactMatch() { + return sameResolutions && sameMatrixSizes; + } + + private io.vertx.core.json.JsonArray warningsAsJsonArray() { + final var warnings = new io.vertx.core.json.JsonArray(); + if (!sameResolutions) { + warnings.add("Primary and secondary sources expose different resolution sets."); + } + if (!sameMatrixSizes) { + warnings.add("Primary and secondary sources have different matrix sizes. Smaller source will be padded with background."); + } + return warnings; + } + + private io.vertx.core.json.JsonObject toJson() { + final var maxLength = Math.max(primaryMatrixSizeBins.length, secondaryMatrixSizeBins.length); + final var mismatchedOrders = new io.vertx.core.json.JsonArray(); + for (int idx = 0; idx < maxLength; idx++) { + final var primaryValue = idx < primaryMatrixSizeBins.length ? primaryMatrixSizeBins[idx] : -1L; + final var secondaryValue = idx < secondaryMatrixSizeBins.length ? secondaryMatrixSizeBins[idx] : -1L; + if (primaryValue != secondaryValue) { + mismatchedOrders.add(idx); + } + } + final var primaryMaxBins = Arrays.stream(primaryMatrixSizeBins).max().orElse(0L); + final var secondaryMaxBins = Arrays.stream(secondaryMatrixSizeBins).max().orElse(0L); + return new io.vertx.core.json.JsonObject() + .put("sameResolutions", sameResolutions) + .put("sameMatrixSizes", sameMatrixSizes) + .put("exactMatch", exactMatch()) + .put("primaryMaxBins", primaryMaxBins) + .put("secondaryMaxBins", secondaryMaxBins) + .put("primaryBinsByResolution", Arrays.stream(primaryMatrixSizeBins).boxed().toList()) + .put("secondaryBinsByResolution", Arrays.stream(secondaryMatrixSizeBins).boxed().toList()) + .put("mismatchedResolutionOrders", mismatchedOrders); + } + } + private RequestTaskScheduler getScheduler(final @NotNull io.vertx.ext.web.RoutingContext ctx) { final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); final var wrapper = (ShareableWrappers.RequestTaskSchedulerWrapper) map.get(RequestTaskScheduler.LOCAL_MAP_KEY); diff --git a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/files/FSHandlersHolder.java b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/files/FSHandlersHolder.java index 95f88ab..d061736 100644 --- a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/files/FSHandlersHolder.java +++ b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/files/FSHandlersHolder.java @@ -77,6 +77,35 @@ public void addHandlersToRouter(final @NotNull Router router) { ); }); + router.post("/list_files_detailed").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.UI_UX, + null, + () -> { + final var dataDirectoryWrapper = (ShareableWrappers.PathWrapper) vertx.sharedData().getLocalMap("hict_server").get("dataDirectory"); + if (dataDirectoryWrapper == null) { + throw new RuntimeException("Data directory is not present in local map"); + } + final var dataDirectory = dataDirectoryWrapper.getPath(); + try (final var fileStream = Files.walk(dataDirectory)) { + return fileStream + .filter(Files::isRegularFile) + .map(path -> toDetailedFileEntry(dataDirectory, path)) + .sorted(java.util.Comparator.comparing(FileEntry::path)) + .toList(); + } catch (final IOException e) { + throw new RuntimeException(e); + } + }, + files -> ctx.response().putHeader("content-type", "application/json").end(Json.encode(files)) + ); + }); + router.post("/list_agp_files").handler(ctx -> { final var scheduler = getScheduler(ctx); if (scheduler == null) { @@ -175,4 +204,26 @@ private static boolean isFastaFilename(final @NotNull String path) { final var lowered = path.toLowerCase(); return FASTA_SUFFIXES.stream().anyMatch(lowered::endsWith); } + + private static @NotNull FileEntry toDetailedFileEntry(final @NotNull java.nio.file.Path dataDirectory, + final @NotNull java.nio.file.Path path) { + final var relative = dataDirectory.relativize(path).toString(); + final var fileName = path.getFileName() == null ? relative : path.getFileName().toString(); + final var lowered = fileName.toLowerCase(); + final int dotIndex = lowered.lastIndexOf('.'); + final var extension = dotIndex >= 0 ? lowered.substring(dotIndex) : ""; + try { + final var attrs = Files.readAttributes(path, java.nio.file.attribute.BasicFileAttributes.class); + return new FileEntry(relative, fileName, attrs.size(), attrs.lastModifiedTime().toMillis(), extension); + } catch (final IOException e) { + return new FileEntry(relative, fileName, -1L, 0L, extension); + } + } + + private record FileEntry(@NotNull String path, + @NotNull String name, + long sizeBytes, + long modifiedAtMs, + @NotNull String extension) { + } } diff --git a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/info/ApiDocsHandlersHolder.java b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/info/ApiDocsHandlersHolder.java new file mode 100644 index 0000000..bd28af7 --- /dev/null +++ b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/info/ApiDocsHandlersHolder.java @@ -0,0 +1,129 @@ +package ru.itmo.ctlab.hict.hict_server.handlers.info; + +import io.vertx.ext.web.Router; +import org.jetbrains.annotations.NotNull; +import ru.itmo.ctlab.hict.hict_server.HandlersHolder; + +import java.io.BufferedReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.HexFormat; + +/** + * Serves interactive API documentation and the OpenAPI v1 specification. + */ +public class ApiDocsHandlersHolder extends HandlersHolder { + private static final String OPENAPI_SPEC_PATH = "/openapi/hict-api-v1.yaml"; + private static final String OPENAPI_YAML = readResourceUtf8(OPENAPI_SPEC_PATH); + private static final String OPENAPI_ETAG = computeWeakEtag(OPENAPI_YAML); + private static final String DOCS_HTML = """ + + + + + + HiCT API v1 + + + + +
+ + + + + """; + private static final String DOCS_ETAG = computeWeakEtag(DOCS_HTML); + + @Override + public void addHandlersToRouter(final @NotNull Router router) { + router.getWithRegex("^/api/v1$").handler(ctx -> ctx.response() + .setStatusCode(307) + .putHeader("location", "/api/v1/") + .end()); + + router.get("/api/v1/").handler(ctx -> { + if (etagMatches(ctx.request().getHeader("if-none-match"), DOCS_ETAG)) { + ctx.response().setStatusCode(304).end(); + return; + } + ctx.response() + .putHeader("content-type", "text/html; charset=utf-8") + .putHeader("cache-control", "public, max-age=300") + .putHeader("etag", DOCS_ETAG) + .end(DOCS_HTML); + }); + + router.getWithRegex("^/api/v1/openapi$").handler(ctx -> ctx.response() + .setStatusCode(307) + .putHeader("location", "/api/v1/openapi.yaml") + .end()); + + router.get("/api/v1/openapi.yaml").handler(ctx -> { + if (etagMatches(ctx.request().getHeader("if-none-match"), OPENAPI_ETAG)) { + ctx.response().setStatusCode(304).end(); + return; + } + ctx.response() + .putHeader("content-type", "application/yaml; charset=utf-8") + .putHeader("cache-control", "public, max-age=300") + .putHeader("etag", OPENAPI_ETAG) + .end(OPENAPI_YAML); + }); + } + + private static boolean etagMatches(final String ifNoneMatchHeader, final String expectedEtag) { + return ifNoneMatchHeader != null && ifNoneMatchHeader.trim().equals(expectedEtag); + } + + private static @NotNull String readResourceUtf8(final @NotNull String path) { + try (final InputStream stream = ApiDocsHandlersHolder.class.getResourceAsStream(path)) { + if (stream == null) { + throw new IllegalStateException("Resource not found: " + path); + } + try (final var reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) { + final var sb = new StringBuilder(); + String line; + while ((line = reader.readLine()) != null) { + sb.append(line).append('\n'); + } + return sb.toString(); + } + } catch (final Exception ex) { + throw new IllegalStateException("Failed to load resource " + path, ex); + } + } + + private static @NotNull String computeWeakEtag(final @NotNull String content) { + try { + final var digest = MessageDigest.getInstance("SHA-256") + .digest(content.getBytes(StandardCharsets.UTF_8)); + return "W/\"" + HexFormat.of().formatHex(digest) + "\""; + } catch (final NoSuchAlgorithmException ex) { + throw new IllegalStateException("SHA-256 is unavailable", ex); + } + } +} diff --git a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfig.java b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfig.java new file mode 100644 index 0000000..d0937bd --- /dev/null +++ b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfig.java @@ -0,0 +1,954 @@ +/* + * MIT License + * + * Copyright (c) 2021-2026. Aleksandr Serdiukov, Anton Zamyatin, Aleksandr Sinitsyn, Vitalii Dravgelis and Computer Technologies Laboratory ITMO University team. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package ru.itmo.ctlab.hict.hict_server.handlers.tiles; + +import io.vertx.core.json.JsonObject; +import org.jetbrains.annotations.NotNull; +import ru.itmo.ctlab.hict.hict_library.visualization.SimpleVisualizationOptions; +import ru.itmo.ctlab.hict.hict_library.visualization.colormap.gradient.SimpleLinearGradient; + +import java.awt.*; +import java.util.Collections; +import java.util.HashSet; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +public final class RenderPipelineConfig { + public static final String LOCAL_MAP_KEY = "renderPipelineConfig"; + public static final String BUILTIN_COOLER_WEIGHTS_TRACK_ID = "__builtin_cooler_weights__"; + + private final boolean enabled; + private final boolean swapUpperLower; + private final @NotNull JsonObject upperExpressionJson; + private final @NotNull JsonObject lowerExpressionJson; + private final @NotNull CompiledRootExpression upperExpression; + private final @NotNull CompiledRootExpression lowerExpression; + private final @NotNull Set requiredTrackBindings; + + private RenderPipelineConfig(final boolean enabled, + final boolean swapUpperLower, + final @NotNull JsonObject upperExpressionJson, + final @NotNull JsonObject lowerExpressionJson) { + this.enabled = enabled; + this.swapUpperLower = swapUpperLower; + this.upperExpressionJson = upperExpressionJson.copy(); + this.lowerExpressionJson = lowerExpressionJson.copy(); + final var required = new HashSet(); + this.upperExpression = compileRootExpression(this.upperExpressionJson, required); + this.lowerExpression = compileRootExpression(this.lowerExpressionJson, required); + this.requiredTrackBindings = Collections.unmodifiableSet(required); + } + + public static @NotNull RenderPipelineConfig disabled() { + final var defaultExpression = defaultColdHotExpression(); + return new RenderPipelineConfig(false, false, defaultExpression, defaultExpression.copy()); + } + + public static @NotNull RenderPipelineConfig fromVisualizationOptions(final @NotNull SimpleVisualizationOptions options, + final boolean enabled, + final boolean swapUpperLower) { + final var expression = buildExpressionFromVisualizationOptions(options); + return new RenderPipelineConfig( + enabled, + swapUpperLower, + expression, + expression.copy() + ); + } + + public static @NotNull RenderPipelineConfig fromJson(final JsonObject json) { + if (json == null) { + return disabled(); + } + final var enabled = json.getBoolean("enabled", false); + final var swapUpperLower = json.getBoolean("swapUpperLower", false); + final var upper = json.getJsonObject("upperExpression", json.getJsonObject("upper")); + final var lower = json.getJsonObject("lowerExpression", json.getJsonObject("lower")); + return new RenderPipelineConfig( + enabled, + swapUpperLower, + upper != null ? upper : defaultColdHotExpression(), + lower != null ? lower : defaultColdHotExpression() + ); + } + + public @NotNull JsonObject toJson() { + return new JsonObject() + .put("enabled", this.enabled) + .put("swapUpperLower", this.swapUpperLower) + .put("upperExpression", this.upperExpressionJson.copy()) + .put("lowerExpression", this.lowerExpressionJson.copy()); + } + + public boolean enabled() { + return this.enabled; + } + + public boolean swapUpperLower() { + return this.swapUpperLower; + } + + public @NotNull Set requiredTrackBindings() { + return this.requiredTrackBindings; + } + + public double evaluate(final boolean upperTriangle, final @NotNull MutablePixelContext context) { + final var root = selectRootExpression(upperTriangle); + final var raw = root.evalSignal(context); + if (Double.isFinite(raw)) { + return raw; + } + return 0.0d; + } + + public int evaluateArgb(final boolean upperTriangle, + final @NotNull MutablePixelContext context, + final @NotNull SimpleVisualizationOptions options) { + final var root = selectRootExpression(upperTriangle); + return root.evalArgb(context, options); + } + + private @NotNull CompiledRootExpression selectRootExpression(final boolean upperTriangle) { + final var useUpper = this.swapUpperLower ? !upperTriangle : upperTriangle; + return useUpper ? this.upperExpression : this.lowerExpression; + } + + private static @NotNull JsonObject defaultSourceNode(final @NotNull String source) { + return new JsonObject() + .put("type", "source") + .put("source", source); + } + + private static @NotNull JsonObject constantNode(final double value) { + return new JsonObject() + .put("type", "constant") + .put("value", value); + } + + private static @NotNull JsonObject dynamicNode(final @NotNull String field) { + return new JsonObject() + .put("type", "dynamic") + .put("field", field); + } + + private static @NotNull JsonObject trackNode(final @NotNull String trackId, + final @NotNull String axis) { + return new JsonObject() + .put("type", "track1d") + .put("trackId", trackId) + .put("axis", axis); + } + + private static @NotNull JsonObject unaryNode(final @NotNull String op, + final @NotNull JsonObject input) { + return new JsonObject() + .put("type", "unary") + .put("op", op) + .put("input", input.copy()); + } + + private static @NotNull JsonObject logNode(final @NotNull JsonObject input, + final double base) { + return new JsonObject() + .put("type", "log") + .put("input", input.copy()) + .put("base", base); + } + + private static @NotNull JsonObject logInputNode(final @NotNull JsonObject input, + final @NotNull JsonObject baseExpression) { + return new JsonObject() + .put("type", "log_input") + .put("input", input.copy()) + .put("base", baseExpression.copy()); + } + + private static @NotNull JsonObject binaryNode(final @NotNull String op, + final @NotNull JsonObject left, + final @NotNull JsonObject right) { + return new JsonObject() + .put("type", "binary") + .put("op", op) + .put("left", left.copy()) + .put("right", right.copy()); + } + + private static @NotNull JsonObject clampNode(final @NotNull JsonObject input, + final double minValue, + final double maxValue) { + return new JsonObject() + .put("type", "clamp") + .put("input", input.copy()) + .put("minValue", minValue) + .put("maxValue", maxValue); + } + + private static @NotNull JsonObject colormapNode(final @NotNull JsonObject input, + final @NotNull String startColor, + final @NotNull String endColor, + final double minSignal, + final double maxSignal) { + return new JsonObject() + .put("type", "colormap") + .put("input", input.copy()) + .put("mode", "LINEAR") + .put("startColor", startColor) + .put("endColor", endColor) + .put("minSignal", minSignal) + .put("maxSignal", maxSignal); + } + + private static @NotNull JsonObject applyLogByBase(final @NotNull JsonObject input, + final double base) { + if (!Double.isFinite(base) || base <= 0.0d || Math.abs(base - 1.0d) < 1e-9d) { + return input; + } + return logNode(input, base); + } + + private static @NotNull String colorToHex(final @NotNull Color color) { + if (color.getAlpha() >= 255) { + return String.format("#%02x%02x%02x", color.getRed(), color.getGreen(), color.getBlue()); + } + return String.format("#%02x%02x%02x%02x", color.getRed(), color.getGreen(), color.getBlue(), color.getAlpha()); + } + + private static @NotNull JsonObject buildExpressionFromVisualizationOptions(final @NotNull SimpleVisualizationOptions options) { + JsonObject signalExpression = defaultSourceNode("PRIMARY"); + + signalExpression = applyLogByBase(signalExpression, options.getPreLogBase()); + + if (options.isResolutionScaling()) { + signalExpression = binaryNode( + "MUL", + signalExpression, + dynamicNode("RESOLUTION_SCALING_COEFF") + ); + } + + if (options.isResolutionLinearScaling()) { + signalExpression = binaryNode( + "MUL", + signalExpression, + dynamicNode("RESOLUTION_LINEAR_SCALING_COEFF") + ); + } + + if (options.isApplyCoolerWeights()) { + final var coolerWeights = binaryNode( + "MUL", + trackNode(BUILTIN_COOLER_WEIGHTS_TRACK_ID, "ROW"), + trackNode(BUILTIN_COOLER_WEIGHTS_TRACK_ID, "COL") + ); + signalExpression = binaryNode( + "MUL", + signalExpression, + coolerWeights + ); + } + + signalExpression = applyLogByBase(signalExpression, options.getPostLogBase()); + + Color startColor = new Color(255, 255, 255, 0); + Color endColor = new Color(0, 96, 0, 255); + double minSignal = 0.0d; + double maxSignal = 1.0d; + if (options.getColormap() instanceof SimpleLinearGradient gradient) { + startColor = gradient.getStartColor(); + endColor = gradient.getEndColor(); + minSignal = gradient.getMinSignal(); + maxSignal = gradient.getMaxSignal(); + } + + return colormapNode( + signalExpression, + colorToHex(startColor), + colorToHex(endColor), + minSignal, + maxSignal + ); + } + + private static @NotNull JsonObject defaultColdHotExpression() { + var signalExpression = defaultSourceNode("PRIMARY"); + signalExpression = applyLogByBase(signalExpression, 10.0d); + final var coolerWeights = binaryNode( + "MUL", + trackNode(BUILTIN_COOLER_WEIGHTS_TRACK_ID, "ROW"), + trackNode(BUILTIN_COOLER_WEIGHTS_TRACK_ID, "COL") + ); + signalExpression = binaryNode( + "MUL", + signalExpression, + coolerWeights + ); + signalExpression = applyLogByBase(signalExpression, 5.0d); + return colormapNode( + signalExpression, + "#0013e300", + "#e80000ff", + 0.0d, + 0.75d + ); + } + + private static @NotNull CompiledRootExpression compileRootExpression(final @NotNull JsonObject node, + final @NotNull Set requiredTrackBindings) { + final var nodeType = node.getString("type", "source").trim().toUpperCase(Locale.ROOT); + if (isColorNode(nodeType)) { + final var colorExpression = compileColorExpression(node, requiredTrackBindings); + final CompiledNumericExpression fallbackSignalExpression = + "COLORMAP".equals(nodeType) + ? compileImplicitColormapSignalExpression(node, requiredTrackBindings) + : context -> 0.0d; + return new CompiledRootExpression(true, fallbackSignalExpression, colorExpression); + } + final var signalExpression = compileNumericExpression(node, requiredTrackBindings); + return new CompiledRootExpression(false, signalExpression, null); + } + + private static boolean isColorNode(final @NotNull String nodeType) { + return switch (nodeType) { + case "COLORMAP", "RGB", "HSL", "HSV" -> true; + default -> false; + }; + } + + private static @NotNull CompiledNumericExpression compileNumericExpression(final @NotNull JsonObject node, + final @NotNull Set requiredTrackBindings) { + final var nodeType = node.getString("type", "source").trim().toUpperCase(Locale.ROOT); + return switch (nodeType) { + case "SOURCE" -> { + final var sourceName = node.getString("source", "PRIMARY").trim().toUpperCase(Locale.ROOT); + yield switch (sourceName) { + case "SECONDARY" -> context -> context.secondaryValue; + case "PRIMARY" -> context -> context.primaryValue; + default -> throw new IllegalArgumentException("Unsupported source: " + sourceName); + }; + } + case "CONSTANT" -> { + final var value = node.getDouble("value", 0.0d); + yield context -> value; + } + case "DYNAMIC" -> { + final var field = node.getString("field", "PRIMARY").trim().toUpperCase(Locale.ROOT); + yield switch (field) { + case "ROW_BP" -> context -> context.rowBp; + case "COL_BP" -> context -> context.colBp; + case "ROW_BIN" -> context -> context.rowBin; + case "COL_BIN" -> context -> context.colBin; + case "ROW_PX" -> context -> context.rowPx; + case "COL_PX" -> context -> context.colPx; + case "ROW_WEIGHT" -> context -> context.rowWeight; + case "COL_WEIGHT" -> context -> context.colWeight; + case "RESOLUTION_SCALING_COEFF" -> context -> context.resolutionScalingCoeff; + case "RESOLUTION_LINEAR_SCALING_COEFF" -> context -> context.resolutionLinearScalingCoeff; + case "DIAG_BP_DISTANCE" -> context -> Math.abs(context.rowBp - context.colBp); + case "DIAG_BIN_DISTANCE" -> context -> Math.abs(context.rowBin - context.colBin); + case "DIAG_PX_DISTANCE" -> context -> Math.abs(context.rowPx - context.colPx); + case "BP_RESOLUTION" -> context -> context.bpResolution; + default -> throw new IllegalArgumentException("Unsupported dynamic field: " + field); + }; + } + case "TRACK1D" -> { + final var trackId = node.getString("trackId", "").trim(); + final var axis = parseTrackAxis(node.getString("axis", "ROW")); + if (isBuiltinCoolerWeightsTrackId(trackId)) { + yield axis == TrackAxis.ROW ? context -> context.rowWeight : context -> context.colWeight; + } + if (!trackId.isBlank()) { + requiredTrackBindings.add(new TrackBinding(trackId, axis)); + } + yield context -> context.sampleTrackValue(trackId, axis); + } + case "UNARY" -> { + final var op = node.getString("op", "ABS").trim().toUpperCase(Locale.ROOT); + final var inputNode = node.getJsonObject("input", defaultSourceNode("PRIMARY")); + final var inputExpression = compileNumericExpression(inputNode, requiredTrackBindings); + yield switch (op) { + case "ABS" -> context -> Math.abs(inputExpression.eval(context)); + case "LOG", "LOG1P" -> context -> Math.log1p(Math.max(0.0d, inputExpression.eval(context))); + case "EXP" -> context -> { + final var value = inputExpression.eval(context); + final var bounded = Math.max(-60.0d, Math.min(60.0d, value)); + return Math.exp(bounded); + }; + case "NEG" -> context -> -inputExpression.eval(context); + default -> throw new IllegalArgumentException("Unsupported unary operation: " + op); + }; + } + case "LOG" -> { + final var inputExpression = compileNumericExpression( + node.getJsonObject("input", defaultSourceNode("PRIMARY")), + requiredTrackBindings + ); + final var base = node.getDouble("base", Math.E); + yield context -> evalLogByBase(inputExpression.eval(context), base); + } + case "LOG_INPUT" -> { + final var inputExpression = compileNumericExpression( + node.getJsonObject("input", defaultSourceNode("PRIMARY")), + requiredTrackBindings + ); + final var baseExpression = compileNumericChildExpression( + node, + "base", + "baseValue", + Math.E, + requiredTrackBindings + ); + yield context -> evalLogByBase(inputExpression.eval(context), baseExpression.eval(context)); + } + case "BINARY" -> { + final var op = node.getString("op", "ADD").trim().toUpperCase(Locale.ROOT); + final var leftNode = node.getJsonObject("left", defaultSourceNode("PRIMARY")); + final var rightNode = node.getJsonObject("right", constantNode(0.0d)); + final var leftExpression = compileNumericExpression(leftNode, requiredTrackBindings); + final var rightExpression = compileNumericExpression(rightNode, requiredTrackBindings); + yield switch (op) { + case "ADD" -> context -> leftExpression.eval(context) + rightExpression.eval(context); + case "SUB" -> context -> leftExpression.eval(context) - rightExpression.eval(context); + case "MUL" -> context -> leftExpression.eval(context) * rightExpression.eval(context); + case "DIV" -> context -> { + final var denominator = rightExpression.eval(context); + if (!Double.isFinite(denominator) || Math.abs(denominator) < 1e-12) { + return 0.0d; + } + return leftExpression.eval(context) / denominator; + }; + case "MAX" -> context -> Math.max(leftExpression.eval(context), rightExpression.eval(context)); + case "MIN" -> context -> Math.min(leftExpression.eval(context), rightExpression.eval(context)); + default -> throw new IllegalArgumentException("Unsupported binary operation: " + op); + }; + } + case "CLAMP" -> { + final var inputExpression = compileNumericExpression( + node.getJsonObject("input", defaultSourceNode("PRIMARY")), + requiredTrackBindings + ); + final var minExpression = compileNumericChildExpression(node, "min", "minValue", 0.0d, requiredTrackBindings); + final var maxExpression = compileNumericChildExpression(node, "max", "maxValue", 1.0d, requiredTrackBindings); + yield context -> { + final var value = inputExpression.eval(context); + var minValue = minExpression.eval(context); + var maxValue = maxExpression.eval(context); + if (!Double.isFinite(minValue)) { + minValue = 0.0d; + } + if (!Double.isFinite(maxValue)) { + maxValue = 1.0d; + } + if (maxValue < minValue) { + final var tmp = maxValue; + maxValue = minValue; + minValue = tmp; + } + return Math.max(minValue, Math.min(maxValue, value)); + }; + } + case "COLORMAP" -> compileNumericExpression(node.getJsonObject("input", defaultSourceNode("PRIMARY")), requiredTrackBindings); + default -> throw new IllegalArgumentException("Unsupported expression type: " + nodeType); + }; + } + + private static double evalLogByBase(final double rawValue, + final double rawBase) { + final var value = Math.max(0.0d, rawValue); + final var base = rawBase; + if (!Double.isFinite(base) || base <= 0.0d || Math.abs(base - 1.0d) < 1e-9d) { + return Math.log1p(value); + } + final var denominator = Math.log(base); + if (!Double.isFinite(denominator) || Math.abs(denominator) < 1e-12d) { + return Math.log1p(value); + } + return Math.log1p(value) / denominator; + } + + private static boolean isBuiltinCoolerWeightsTrackId(final @NotNull String trackId) { + final var normalized = trackId.trim().toLowerCase(Locale.ROOT); + return BUILTIN_COOLER_WEIGHTS_TRACK_ID.equals(normalized) || "__builtin_cooler_weights".equals(normalized); + } + + private static @NotNull CompiledNumericExpression compileImplicitColormapSignalExpression( + final @NotNull JsonObject colormapNode, + final @NotNull Set requiredTrackBindings + ) { + final var clampNode = new JsonObject() + .put("type", "clamp") + .put("input", colormapNode.getJsonObject("input", defaultSourceNode("PRIMARY"))); + + final var minNode = colormapNode.getValue("min"); + final var maxNode = colormapNode.getValue("max"); + if (minNode instanceof JsonObject minExpression) { + clampNode.put("min", minExpression.copy()); + } else if (colormapNode.containsKey("minValue")) { + clampNode.put("minValue", colormapNode.getValue("minValue")); + } else { + clampNode.put("minValue", colormapNode.getDouble("minSignal", 0.0d)); + } + + if (maxNode instanceof JsonObject maxExpression) { + clampNode.put("max", maxExpression.copy()); + } else if (colormapNode.containsKey("maxValue")) { + clampNode.put("maxValue", colormapNode.getValue("maxValue")); + } else { + clampNode.put("maxValue", colormapNode.getDouble("maxSignal", 1.0d)); + } + + return compileNumericExpression(clampNode, requiredTrackBindings); + } + + private static @NotNull CompiledNumericExpression compileNumericChildExpression(final @NotNull JsonObject node, + final @NotNull String objectKey, + final @NotNull String valueKey, + final double fallbackValue, + final @NotNull Set requiredTrackBindings) { + final var objectValue = node.getValue(objectKey); + if (objectValue instanceof JsonObject objectNode) { + return compileNumericExpression(objectNode, requiredTrackBindings); + } + final var scalarFallback = node.containsKey(valueKey) + ? node.getDouble(valueKey, fallbackValue) + : node.getDouble(objectKey, fallbackValue); + return context -> scalarFallback; + } + + private static @NotNull CompiledNumericExpression compileNumericChildExpressionMulti(final @NotNull JsonObject node, + final @NotNull String[] objectKeys, + final @NotNull String[] valueKeys, + final double fallbackValue, + final @NotNull Set requiredTrackBindings) { + for (final var objectKey : objectKeys) { + final var objectValue = node.getValue(objectKey); + if (objectValue instanceof JsonObject objectNode) { + return compileNumericExpression(objectNode, requiredTrackBindings); + } + } + + for (final var valueKey : valueKeys) { + final var scalar = node.getValue(valueKey); + if (scalar instanceof Number number) { + return context -> number.doubleValue(); + } + } + + for (final var objectKey : objectKeys) { + final var scalar = node.getValue(objectKey); + if (scalar instanceof Number number) { + return context -> number.doubleValue(); + } + } + + return context -> fallbackValue; + } + + private static @NotNull CompiledColorExpression compileColorExpression(final @NotNull JsonObject node, + final @NotNull Set requiredTrackBindings) { + final var nodeType = node.getString("type", "colormap").trim().toUpperCase(Locale.ROOT); + return switch (nodeType) { + case "COLORMAP" -> { + final var inputExpression = compileNumericExpression( + node.getJsonObject("input", defaultSourceNode("PRIMARY")), + requiredTrackBindings + ); + final var minExpression = compileNumericChildExpression( + node, + "min", + "minValue", + node.getDouble("minSignal", 0.0d), + requiredTrackBindings + ); + final var maxExpression = compileNumericChildExpression( + node, + "max", + "maxValue", + node.getDouble("maxSignal", 1.0d), + requiredTrackBindings + ); + final var defaultStart = new Color(255, 255, 255, 0); + final var defaultEnd = new Color(0, 96, 0, 255); + final var startColor = parseColor(node.getString("startColor"), defaultStart); + final var endColor = parseColor(node.getString("endColor"), defaultEnd); + yield context -> { + final var value = inputExpression.eval(context); + final var minValue = minExpression.eval(context); + final var maxValue = maxExpression.eval(context); + return mapLinearColor(value, minValue, maxValue, startColor, endColor); + }; + } + case "RGB" -> { + final var rExpression = compileNumericChildExpressionMulti( + node, + new String[]{"r", "c1"}, + new String[]{"rValue"}, + 0.0d, + requiredTrackBindings + ); + final var gExpression = compileNumericChildExpressionMulti( + node, + new String[]{"g", "c2"}, + new String[]{"gValue"}, + 0.0d, + requiredTrackBindings + ); + final var bExpression = compileNumericChildExpressionMulti( + node, + new String[]{"b", "c3"}, + new String[]{"bValue"}, + 0.0d, + requiredTrackBindings + ); + final var aExpression = compileNumericChildExpressionMulti( + node, + new String[]{"a", "alpha"}, + new String[]{"aValue", "alphaValue"}, + 255.0d, + requiredTrackBindings + ); + yield context -> toArgb( + clampColorChannel(rExpression.eval(context)), + clampColorChannel(gExpression.eval(context)), + clampColorChannel(bExpression.eval(context)), + clampAlphaChannel(aExpression.eval(context)) + ); + } + case "HSL" -> { + final var hExpression = compileNumericChildExpressionMulti( + node, + new String[]{"h", "c1"}, + new String[]{"hValue"}, + 0.0d, + requiredTrackBindings + ); + final var sExpression = compileNumericChildExpressionMulti( + node, + new String[]{"s", "c2"}, + new String[]{"sValue"}, + 1.0d, + requiredTrackBindings + ); + final var lExpression = compileNumericChildExpressionMulti( + node, + new String[]{"l", "c3"}, + new String[]{"lValue"}, + 0.5d, + requiredTrackBindings + ); + final var aExpression = compileNumericChildExpressionMulti( + node, + new String[]{"a", "alpha"}, + new String[]{"aValue", "alphaValue"}, + 255.0d, + requiredTrackBindings + ); + yield context -> { + final var rgb = hslToRgb(hExpression.eval(context), sExpression.eval(context), lExpression.eval(context)); + return toArgb(rgb[0], rgb[1], rgb[2], clampAlphaChannel(aExpression.eval(context))); + }; + } + case "HSV" -> { + final var hExpression = compileNumericChildExpressionMulti( + node, + new String[]{"h", "c1"}, + new String[]{"hValue"}, + 0.0d, + requiredTrackBindings + ); + final var sExpression = compileNumericChildExpressionMulti( + node, + new String[]{"s", "c2"}, + new String[]{"sValue"}, + 1.0d, + requiredTrackBindings + ); + final var vExpression = compileNumericChildExpressionMulti( + node, + new String[]{"v", "c3"}, + new String[]{"vValue"}, + 1.0d, + requiredTrackBindings + ); + final var aExpression = compileNumericChildExpressionMulti( + node, + new String[]{"a", "alpha"}, + new String[]{"aValue", "alphaValue"}, + 255.0d, + requiredTrackBindings + ); + yield context -> { + final var hue = normalizeHue(hExpression.eval(context)); + final var sat = normalizeUnitInterval(sExpression.eval(context)); + final var val = normalizeUnitInterval(vExpression.eval(context)); + final var rgbInt = Color.HSBtoRGB((float) (hue / 360.0d), (float) sat, (float) val); + final var red = (rgbInt >> 16) & 0xFF; + final var green = (rgbInt >> 8) & 0xFF; + final var blue = rgbInt & 0xFF; + return toArgb(red, green, blue, clampAlphaChannel(aExpression.eval(context))); + }; + } + default -> throw new IllegalArgumentException("Unsupported color expression type: " + nodeType); + }; + } + + private static @NotNull TrackAxis parseTrackAxis(final String axisRaw) { + if (axisRaw == null) { + return TrackAxis.ROW; + } + final var normalized = axisRaw.trim().toUpperCase(Locale.ROOT); + return switch (normalized) { + case "COL", "COLUMN" -> TrackAxis.COL; + default -> TrackAxis.ROW; + }; + } + + private static int mapLinearColor(final double value, + final double minValue, + final double maxValue, + final @NotNull Color startColor, + final @NotNull Color endColor) { + final var safeMin = Double.isFinite(minValue) ? minValue : 0.0d; + final var safeMaxCandidate = Double.isFinite(maxValue) ? maxValue : 1.0d; + final var safeMax = safeMaxCandidate > safeMin ? safeMaxCandidate : safeMin + 1.0d; + final var standardized = Math.max(0.0d, Math.min((value - safeMin) / (safeMax - safeMin), 1.0d)); + final var red = interpolateColor(startColor.getRed(), endColor.getRed(), standardized); + final var green = interpolateColor(startColor.getGreen(), endColor.getGreen(), standardized); + final var blue = interpolateColor(startColor.getBlue(), endColor.getBlue(), standardized); + final var alpha = interpolateColor(startColor.getAlpha(), endColor.getAlpha(), standardized); + return toArgb(red, green, blue, alpha); + } + + private static int interpolateColor(final int start, + final int end, + final double factor) { + return (int) Math.round(start + (end - start) * factor); + } + + private static int clampColorChannel(final double value) { + final var safeValue = Double.isFinite(value) ? value : 0.0d; + return (int) Math.max(0, Math.min(255, Math.round(safeValue))); + } + + private static int clampAlphaChannel(final double value) { + final var safeValue = Double.isFinite(value) ? value : 255.0d; + if (safeValue >= 0.0d && safeValue <= 1.0d) { + return (int) Math.max(0, Math.min(255, Math.round(safeValue * 255.0d))); + } + return (int) Math.max(0, Math.min(255, Math.round(safeValue))); + } + + private static @NotNull Color parseColor(final String rawValue, + final @NotNull Color fallback) { + if (rawValue == null || rawValue.isBlank()) { + return fallback; + } + final var value = rawValue.trim(); + try { + if (value.startsWith("#")) { + final var hex = value.substring(1); + if (hex.length() == 6) { + final var rgb = Integer.parseInt(hex, 16); + return new Color((rgb >> 16) & 0xFF, (rgb >> 8) & 0xFF, rgb & 0xFF, 255); + } + if (hex.length() == 8) { + final var rgba = Long.parseLong(hex, 16); + return new Color( + (int) ((rgba >> 24) & 0xFF), + (int) ((rgba >> 16) & 0xFF), + (int) ((rgba >> 8) & 0xFF), + (int) (rgba & 0xFF) + ); + } + } + final var normalized = value.toLowerCase(Locale.ROOT); + if (normalized.startsWith("rgba(")) { + final var components = normalized.substring(5, normalized.length() - 1).split(","); + if (components.length == 4) { + final var r = clampColorChannel(Double.parseDouble(components[0].trim())); + final var g = clampColorChannel(Double.parseDouble(components[1].trim())); + final var b = clampColorChannel(Double.parseDouble(components[2].trim())); + final var a = clampAlphaChannel(Double.parseDouble(components[3].trim())); + return new Color(r, g, b, a); + } + } + if (normalized.startsWith("rgb(")) { + final var components = normalized.substring(4, normalized.length() - 1).split(","); + if (components.length == 3) { + final var r = clampColorChannel(Double.parseDouble(components[0].trim())); + final var g = clampColorChannel(Double.parseDouble(components[1].trim())); + final var b = clampColorChannel(Double.parseDouble(components[2].trim())); + return new Color(r, g, b, 255); + } + } + } catch (final RuntimeException ignored) { + // Fall through to fallback. + } + return fallback; + } + + private static int[] hslToRgb(final double hRaw, + final double sRaw, + final double lRaw) { + final var h = normalizeHue(hRaw) / 360.0d; + final var s = normalizeUnitInterval(sRaw); + final var l = normalizeUnitInterval(lRaw); + + if (s <= 0.0d) { + final var gray = clampColorChannel(l * 255.0d); + return new int[]{gray, gray, gray}; + } + + final var q = l < 0.5d ? l * (1.0d + s) : (l + s - l * s); + final var p = 2.0d * l - q; + final var r = hueToRgb(p, q, h + 1.0d / 3.0d); + final var g = hueToRgb(p, q, h); + final var b = hueToRgb(p, q, h - 1.0d / 3.0d); + return new int[]{ + clampColorChannel(r * 255.0d), + clampColorChannel(g * 255.0d), + clampColorChannel(b * 255.0d) + }; + } + + private static double hueToRgb(final double p, + final double q, + double t) { + if (t < 0.0d) { + t += 1.0d; + } + if (t > 1.0d) { + t -= 1.0d; + } + if (t < 1.0d / 6.0d) { + return p + (q - p) * 6.0d * t; + } + if (t < 1.0d / 2.0d) { + return q; + } + if (t < 2.0d / 3.0d) { + return p + (q - p) * (2.0d / 3.0d - t) * 6.0d; + } + return p; + } + + private static double normalizeHue(final double rawHue) { + final var safeHue = Double.isFinite(rawHue) ? rawHue : 0.0d; + final var wrapped = safeHue % 360.0d; + return wrapped < 0.0d ? wrapped + 360.0d : wrapped; + } + + private static double normalizeUnitInterval(final double value) { + final var safeValue = Double.isFinite(value) ? value : 0.0d; + final var normalized = safeValue > 1.0d ? (safeValue / 100.0d) : safeValue; + return Math.max(0.0d, Math.min(1.0d, normalized)); + } + + private static int toArgb(final int red, + final int green, + final int blue, + final int alpha) { + return ((alpha & 0xFF) << 24) + | ((red & 0xFF) << 16) + | ((green & 0xFF) << 8) + | (blue & 0xFF); + } + + @FunctionalInterface + private interface CompiledNumericExpression { + double eval(@NotNull MutablePixelContext context); + } + + @FunctionalInterface + private interface CompiledColorExpression { + int evalArgb(@NotNull MutablePixelContext context); + } + + private record CompiledRootExpression(boolean color, + @NotNull CompiledNumericExpression signalExpression, + CompiledColorExpression colorExpression) { + double evalSignal(final @NotNull MutablePixelContext context) { + final var value = this.signalExpression.eval(context); + return Double.isFinite(value) ? value : 0.0d; + } + + int evalArgb(final @NotNull MutablePixelContext context, + final @NotNull SimpleVisualizationOptions options) { + if (this.color && this.colorExpression != null) { + return this.colorExpression.evalArgb(context); + } + final var value = evalSignal(context); + final var color = options.getColormap().mapSignal(value); + return toArgb(color.getRed(), color.getGreen(), color.getBlue(), color.getAlpha()); + } + } + + public enum TrackAxis { + ROW, + COL + } + + public record TrackBinding(@NotNull String trackId, + @NotNull TrackAxis axis) { + } + + public static final class MutablePixelContext { + public double primaryValue; + public double secondaryValue; + public double rowWeight; + public double colWeight; + public double resolutionScalingCoeff; + public double resolutionLinearScalingCoeff; + public long rowPx; + public long colPx; + public long rowBin; + public long colBin; + public long rowBp; + public long colBp; + public long bpResolution; + public int rowLocalIndex; + public int colLocalIndex; + public @NotNull Map rowTrackValuesByTrackId = Map.of(); + public @NotNull Map colTrackValuesByTrackId = Map.of(); + + public double sampleTrackValue(final @NotNull String trackId, + final @NotNull TrackAxis axis) { + final var source = axis == TrackAxis.ROW ? this.rowTrackValuesByTrackId : this.colTrackValuesByTrackId; + if (source == null || source.isEmpty() || trackId.isBlank()) { + return 0.0d; + } + final var values = source.get(trackId); + if (values == null || values.length == 0) { + return 0.0d; + } + final var index = axis == TrackAxis.ROW ? this.rowLocalIndex : this.colLocalIndex; + if (index < 0 || index >= values.length) { + return 0.0d; + } + final var value = values[index]; + return Double.isFinite(value) ? value : 0.0d; + } + } +} diff --git a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/TileHandlersHolder.java b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/TileHandlersHolder.java index 7ab1e77..e9f4846 100644 --- a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/TileHandlersHolder.java +++ b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/TileHandlersHolder.java @@ -27,23 +27,34 @@ import io.vertx.core.Vertx; import io.vertx.core.buffer.Buffer; import io.vertx.core.json.Json; +import io.vertx.core.json.JsonObject; import io.vertx.core.shareddata.LocalMap; import io.vertx.ext.web.Router; import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import ru.itmo.ctlab.hict.hict_library.chunkedfile.ChunkedFile; +import ru.itmo.ctlab.hict.hict_library.domain.QueryLengthUnit; import ru.itmo.ctlab.hict.hict_library.chunkedfile.resolution.ResolutionDescriptor; import ru.itmo.ctlab.hict.hict_server.HandlersHolder; import ru.itmo.ctlab.hict.hict_server.concurrent.RequestTaskScheduler; import ru.itmo.ctlab.hict.hict_server.dto.symmetric.visualization.VisualizationOptionsDTO; import ru.itmo.ctlab.hict.hict_server.handlers.util.TileStatisticHolder; +import ru.itmo.ctlab.hict.hict_server.tracks.Track1DManager; import ru.itmo.ctlab.hict.hict_server.util.shareable.ShareableWrappers; import javax.imageio.ImageIO; +import java.awt.*; +import java.awt.image.BufferedImage; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.ArrayList; import java.util.Base64; +import java.util.HashMap; import java.util.Collections; import java.util.Map; import java.util.stream.Collectors; @@ -56,6 +67,7 @@ public class TileHandlersHolder extends HandlersHolder { private static final String TRANSPARENT_PNG_BASE64 = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mP8/x8AAwMCAO7Z3ioAAAAASUVORK5CYII="; private static final byte[] TRANSPARENT_PNG_BYTES = Base64.getDecoder().decode(TRANSPARENT_PNG_BASE64); + private static final int MAX_MATRIX_QUERY_ELEMENTS = Integer.getInteger("HICT_MATRIX_QUERY_MAX_ELEMENTS", 16_777_216); @Override public void addHandlersToRouter(final @NotNull Router router) { @@ -75,7 +87,21 @@ public void addHandlersToRouter(final @NotNull Router router) { () -> { final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); log.debug("Got map"); - map.put("visualizationOptions", new ShareableWrappers.SimpleVisualizationOptionsWrapper(request.toEntity())); + final var optionsEntity = request.toEntity(); + map.put("visualizationOptions", new ShareableWrappers.SimpleVisualizationOptionsWrapper(optionsEntity)); + final var previousPipelineWrapper = + (ShareableWrappers.RenderPipelineConfigWrapper) map.get(RenderPipelineConfig.LOCAL_MAP_KEY); + final var previousPipeline = + previousPipelineWrapper != null ? previousPipelineWrapper.getRenderPipelineConfig() : RenderPipelineConfig.disabled(); + final var syncedPipeline = RenderPipelineConfig.fromVisualizationOptions( + optionsEntity, + previousPipeline.enabled(), + previousPipeline.swapUpperLower() + ); + map.put( + RenderPipelineConfig.LOCAL_MAP_KEY, + new ShareableWrappers.RenderPipelineConfigWrapper(syncedPipeline) + ); final var chunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get("chunkedFile"))); if (chunkedFileWrapper == null) { throw new RuntimeException("Chunked file is not present in the local map, maybe the file is not yet opened?"); @@ -88,6 +114,7 @@ public void addHandlersToRouter(final @NotNull Router router) { throw new RuntimeException("Tile statistics is not present in the local map, maybe the file is not yet opened?"); } map.put("TileStatisticHolder", TileStatisticHolder.resetRangesKeepingVersion(stats, chunkedFile.getResolutions().length)); + scheduler.bumpGeneration(RequestTaskScheduler.CancellationDomain.TILE); final var visualizationOptionsWrapper = ((ShareableWrappers.SimpleVisualizationOptionsWrapper) (map.get("visualizationOptions"))); if (visualizationOptionsWrapper == null) { throw new RuntimeException("Visualization options are not present in the local map, maybe the file is not yet opened?"); @@ -134,6 +161,103 @@ public void addHandlersToRouter(final @NotNull Router router) { ); }); + router.post("/render_pipeline/get").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.UI_UX, + null, + () -> { + final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); + final var wrapper = (ShareableWrappers.RenderPipelineConfigWrapper) map.get(RenderPipelineConfig.LOCAL_MAP_KEY); + final var config = (wrapper != null) ? wrapper.getRenderPipelineConfig() : RenderPipelineConfig.disabled(); + return config.toJson(); + }, + response -> ctx.response() + .putHeader("content-type", "application/json") + .setStatusCode(200) + .end(response.encode()) + ); + }); + + router.post("/render_pipeline/set").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.ASSEMBLY, + null, + () -> { + final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); + final var requestBody = ctx.body().asJsonObject(); + final var config = RenderPipelineConfig.fromJson(requestBody); + map.put( + RenderPipelineConfig.LOCAL_MAP_KEY, + new ShareableWrappers.RenderPipelineConfigWrapper(config) + ); + final var chunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get("chunkedFile"))); + final var stats = (TileStatisticHolder) map.get("TileStatisticHolder"); + if (chunkedFileWrapper != null && stats != null) { + map.put( + "TileStatisticHolder", + TileStatisticHolder.resetRangesWithIncrementedVersion( + stats, + chunkedFileWrapper.getChunkedFile().getResolutions().length + ) + ); + } + scheduler.bumpGeneration(RequestTaskScheduler.CancellationDomain.TILE); + return config.toJson(); + }, + response -> ctx.response() + .putHeader("content-type", "application/json") + .setStatusCode(200) + .end(response.encode()) + ); + }); + + router.post("/render_pipeline/reset").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.ASSEMBLY, + null, + () -> { + final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); + final var config = RenderPipelineConfig.disabled(); + map.put( + RenderPipelineConfig.LOCAL_MAP_KEY, + new ShareableWrappers.RenderPipelineConfigWrapper(config) + ); + final var chunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get("chunkedFile"))); + final var stats = (TileStatisticHolder) map.get("TileStatisticHolder"); + if (chunkedFileWrapper != null && stats != null) { + map.put( + "TileStatisticHolder", + TileStatisticHolder.resetRangesWithIncrementedVersion( + stats, + chunkedFileWrapper.getChunkedFile().getResolutions().length + ) + ); + } + scheduler.bumpGeneration(RequestTaskScheduler.CancellationDomain.TILE); + return config.toJson(); + }, + response -> ctx.response() + .putHeader("content-type", "application/json") + .setStatusCode(200) + .end(response.encode()) + ); + }); + router.post("/tiles/reload").handler(ctx -> { final var scheduler = getScheduler(ctx); if (scheduler == null) { @@ -190,6 +314,33 @@ public void addHandlersToRouter(final @NotNull Router router) { () -> respondCancelledTile(ctx, format) ); }); + + router.post("/matrix/query").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + final var requestJson = ctx.body() != null && ctx.body().asJsonObject() != null + ? ctx.body().asJsonObject() + : new JsonObject(); + final var format = MatrixResponseFormat.fromRaw(requestJson.getString("format", MatrixResponseFormat.BINARY_FLOAT32.name())); + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.TILE, + RequestTaskScheduler.CancellationDomain.TILE, + () -> computeMatrixQueryResponse(requestJson), + response -> { + final var httpResponse = ctx.response().putHeader("content-type", response.contentType()); + response.headers().forEach(httpResponse::putHeader); + if (response.jsonBody() != null) { + httpResponse.end(response.jsonBody()); + } else { + httpResponse.end(response.binaryBody()); + } + }, + () -> respondCancelledMatrixQuery(ctx, format) + ); + }); } private TileResponsePayload computeTileResponse(final @NotNull io.vertx.ext.web.RoutingContext ctx) { @@ -201,16 +352,22 @@ private TileResponsePayload computeTileResponse(final @NotNull io.vertx.ext.web. final var format = TileFormat.valueOf(ctx.request().getParam("format", "JSON_PNG_WITH_RANGES")); final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); - final var chunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get("chunkedFile"))); - if (chunkedFileWrapper == null) { + final var primaryChunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get("chunkedFile"))); + if (primaryChunkedFileWrapper == null) { throw new RuntimeException("Chunked file is not present in the local map, maybe the file is not yet opened?"); } - final var chunkedFile = chunkedFileWrapper.getChunkedFile(); + final var chunkedFile = primaryChunkedFileWrapper.getChunkedFile(); + final var secondaryChunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get("chunkedFileSecondary"))); + final var secondaryChunkedFile = secondaryChunkedFileWrapper != null ? secondaryChunkedFileWrapper.getChunkedFile() : null; final var visualizationOptionsWrapper = ((ShareableWrappers.SimpleVisualizationOptionsWrapper) (map.get("visualizationOptions"))); if (visualizationOptionsWrapper == null) { throw new RuntimeException("Visualization options are not present in the local map, maybe the file is not yet opened?"); } final var options = visualizationOptionsWrapper.getSimpleVisualizationOptions(); + final var renderPipelineWrapper = (ShareableWrappers.RenderPipelineConfigWrapper) map.get(RenderPipelineConfig.LOCAL_MAP_KEY); + final var renderPipelineConfig = renderPipelineWrapper != null + ? renderPipelineWrapper.getRenderPipelineConfig() + : RenderPipelineConfig.disabled(); final var requestedBpResolutionParam = ctx.request().getParam("bpResolution"); final int level; @@ -256,8 +413,48 @@ private TileResponsePayload computeTileResponse(final @NotNull io.vertx.ext.web. endColPx = (col + 1) * tileWidth; } - final var matrixWithWeights = chunkedFile.matrixQueries().getSubmatrix(ResolutionDescriptor.fromResolutionOrder(level), startRowPx, startColPx, endRowPx, endColPx, true); - final var image = chunkedFile.tileVisualizationProcessor().visualizeTile(matrixWithWeights, options); + final var matrixWithWeights = chunkedFile.matrixQueries().getSubmatrix( + ResolutionDescriptor.fromResolutionOrder(level), + startRowPx, + startColPx, + endRowPx, + endColPx, + true + ); + final var secondaryMatrixWithWeights = querySecondarySubmatrix( + secondaryChunkedFile, + ResolutionDescriptor.fromResolutionOrder(level), + startRowPx, + startColPx, + endRowPx, + endColPx + ); + final var trackManagerWrapper = (ShareableWrappers.Track1DManagerWrapper) map.get("Track1DManager"); + final Track1DManager track1DManager = trackManagerWrapper != null ? trackManagerWrapper.getTrack1DManager() : null; + + final BufferedImage image; + if (renderPipelineConfig.enabled()) { + image = renderPipelineTile( + chunkedFile, + secondaryChunkedFile, + matrixWithWeights, + secondaryMatrixWithWeights, + options, + renderPipelineConfig, + track1DManager + ); + } else if (secondaryChunkedFile != null && secondaryMatrixWithWeights != null) { + image = renderTraditionalDualSourceTile( + chunkedFile, + secondaryChunkedFile, + matrixWithWeights, + secondaryMatrixWithWeights, + options, + renderPipelineConfig.swapUpperLower() + ); + } else { + image = chunkedFile.tileVisualizationProcessor().visualizeTile(matrixWithWeights, options); + } final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { @@ -299,6 +496,555 @@ private void respondCancelledTile(final @NotNull io.vertx.ext.web.RoutingContext .end(Buffer.buffer(TRANSPARENT_PNG_BYTES)); } + private void respondCancelledMatrixQuery(final @NotNull io.vertx.ext.web.RoutingContext ctx, + final @NotNull MatrixResponseFormat format) { + final var headers = Map.of( + "x-hict-rows", "0", + "x-hict-cols", "0", + "x-hict-dtype", format.defaultDtype(), + "x-hict-signal-mode", MatrixSignalMode.TRADITIONAL_NORMALIZED.name() + ); + if (format == MatrixResponseFormat.JSON) { + final var payload = new JsonObject() + .put("rows", 0) + .put("cols", 0) + .put("dtype", format.defaultDtype()) + .put("signalMode", MatrixSignalMode.TRADITIONAL_NORMALIZED.name()) + .put("values", new ArrayList<>()); + final var response = ctx.response().putHeader("content-type", "application/json"); + headers.forEach(response::putHeader); + response.end(payload.encode()); + } else { + final var response = ctx.response().putHeader("content-type", "application/octet-stream"); + headers.forEach(response::putHeader); + response.end(Buffer.buffer()); + } + } + + private MatrixResponsePayload computeMatrixQueryResponse(final @NotNull JsonObject request) { + final @NotNull @NonNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); + final var primaryChunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get("chunkedFile"))); + if (primaryChunkedFileWrapper == null) { + throw new RuntimeException("Chunked file is not present in the local map, maybe the file is not yet opened?"); + } + final var primaryChunkedFile = primaryChunkedFileWrapper.getChunkedFile(); + final var secondaryChunkedFileWrapper = ((ShareableWrappers.ChunkedFileWrapper) (map.get("chunkedFileSecondary"))); + final var secondaryChunkedFile = secondaryChunkedFileWrapper != null ? secondaryChunkedFileWrapper.getChunkedFile() : null; + final var visualizationOptionsWrapper = ((ShareableWrappers.SimpleVisualizationOptionsWrapper) (map.get("visualizationOptions"))); + if (visualizationOptionsWrapper == null) { + throw new RuntimeException("Visualization options are not present in the local map, maybe the file is not yet opened?"); + } + final var options = visualizationOptionsWrapper.getSimpleVisualizationOptions(); + final var renderPipelineWrapper = (ShareableWrappers.RenderPipelineConfigWrapper) map.get(RenderPipelineConfig.LOCAL_MAP_KEY); + final var renderPipelineConfig = renderPipelineWrapper != null + ? renderPipelineWrapper.getRenderPipelineConfig() + : RenderPipelineConfig.disabled(); + final var trackManagerWrapper = (ShareableWrappers.Track1DManagerWrapper) map.get("Track1DManager"); + final Track1DManager track1DManager = trackManagerWrapper != null ? trackManagerWrapper.getTrack1DManager() : null; + + final var bpResolution = request.getLong("bpResolution"); + if (bpResolution == null || bpResolution <= 0L) { + throw new IllegalArgumentException("Field 'bpResolution' must be a positive integer"); + } + final var resolutionOrder = primaryChunkedFile.getResolutionToIndex().get(bpResolution); + if (resolutionOrder == null) { + throw new IllegalArgumentException("Requested bpResolution is not present in opened file: " + bpResolution); + } + final var resolutionDescriptor = ResolutionDescriptor.fromResolutionOrder(resolutionOrder); + + final var units = parseUnits(request.getString("unit", request.getString("units", "PIXELS"))); + final var startRowInUnits = resolveRangeStart(request, Axis.ROW, units); + final var startColInUnits = resolveRangeStart(request, Axis.COL, units); + final var endRowInUnits = resolveRangeEnd(request, Axis.ROW, units, startRowInUnits); + final var endColInUnits = resolveRangeEnd(request, Axis.COL, units, startColInUnits); + if (endRowInUnits < startRowInUnits || endColInUnits < startColInUnits) { + throw new IllegalArgumentException("End coordinates must be greater than or equal to start coordinates"); + } + + final var startRowPx = convertToPixels(primaryChunkedFile, resolutionDescriptor, units, startRowInUnits); + final var startColPx = convertToPixels(primaryChunkedFile, resolutionDescriptor, units, startColInUnits); + final var endRowPx = convertToPixels(primaryChunkedFile, resolutionDescriptor, units, endRowInUnits); + final var endColPx = convertToPixels(primaryChunkedFile, resolutionDescriptor, units, endColInUnits); + + final var matrixWithWeights = primaryChunkedFile.matrixQueries().getSubmatrix( + resolutionDescriptor, + startRowPx, + startColPx, + endRowPx, + endColPx, + true + ); + final var rowCount = matrixWithWeights.matrix().length; + final var columnCount = rowCount > 0 ? matrixWithWeights.matrix()[0].length : 0; + final long elementCount = (long) rowCount * (long) columnCount; + if (elementCount > MAX_MATRIX_QUERY_ELEMENTS) { + throw new IllegalArgumentException( + "Requested matrix window is too large (" + elementCount + " elements); limit is " + MAX_MATRIX_QUERY_ELEMENTS + ); + } + + final var signalMode = MatrixSignalMode.fromRaw(request.getString("signalMode", MatrixSignalMode.TRADITIONAL_NORMALIZED.name())); + final var format = MatrixResponseFormat.fromRaw(request.getString("format", MatrixResponseFormat.BINARY_FLOAT32.name())); + final var includeWeights = request.getBoolean("includeWeights", false); + + final double[][] signalMatrix; + final long[][] rawMatrix = matrixWithWeights.matrix(); + switch (signalMode) { + case RAW_COUNTS -> signalMatrix = null; + case COOLER_WEIGHTED -> signalMatrix = computeCoolerWeightedSignal(rawMatrix, matrixWithWeights.rowWeights(), matrixWithWeights.colWeights()); + case TRADITIONAL_NORMALIZED -> signalMatrix = primaryChunkedFile.tileVisualizationProcessor().processTile(matrixWithWeights, options).values(); + case PIPELINE_SIGNAL -> { + final var secondaryMatrixWithWeights = querySecondarySubmatrix( + secondaryChunkedFile, + resolutionDescriptor, + matrixWithWeights.startRowIncl(), + matrixWithWeights.startColIncl(), + matrixWithWeights.startRowIncl() + rowCount, + matrixWithWeights.startColIncl() + columnCount + ); + signalMatrix = computePipelineSignalMatrix( + primaryChunkedFile, + secondaryChunkedFile, + matrixWithWeights, + secondaryMatrixWithWeights, + options, + renderPipelineConfig, + track1DManager + ); + } + default -> throw new IllegalStateException("Unsupported matrix signal mode: " + signalMode); + } + + final var headers = new HashMap(); + headers.put("x-hict-rows", Integer.toString(rowCount)); + headers.put("x-hict-cols", Integer.toString(columnCount)); + headers.put("x-hict-signal-mode", signalMode.name()); + headers.put("x-hict-unit", "PIXELS"); + headers.put("x-hict-start-row-px", Long.toString(matrixWithWeights.startRowIncl())); + headers.put("x-hict-end-row-px", Long.toString(matrixWithWeights.startRowIncl() + rowCount)); + headers.put("x-hict-start-col-px", Long.toString(matrixWithWeights.startColIncl())); + headers.put("x-hict-end-col-px", Long.toString(matrixWithWeights.startColIncl() + columnCount)); + + if (format == MatrixResponseFormat.JSON) { + final var payload = new JsonObject() + .put("rows", rowCount) + .put("cols", columnCount) + .put("signalMode", signalMode.name()) + .put("unit", "PIXELS") + .put("startRowPx", matrixWithWeights.startRowIncl()) + .put("endRowPx", matrixWithWeights.startRowIncl() + rowCount) + .put("startColPx", matrixWithWeights.startColIncl()) + .put("endColPx", matrixWithWeights.startColIncl() + columnCount); + if (signalMode == MatrixSignalMode.RAW_COUNTS) { + payload.put("dtype", "int64"); + payload.put("values", flattenLongMatrix(rawMatrix, rowCount, columnCount)); + } else { + payload.put("dtype", "float64"); + payload.put("values", flattenDoubleMatrix(signalMatrix, rowCount, columnCount)); + } + if (includeWeights) { + payload.put("rowWeights", toJsonArray(matrixWithWeights.rowWeights(), rowCount)); + payload.put("colWeights", toJsonArray(matrixWithWeights.colWeights(), columnCount)); + } + headers.put("x-hict-dtype", payload.getString("dtype", "float64")); + return new MatrixResponsePayload("application/json", payload.encode(), null, headers); + } + + switch (format) { + case BINARY_INT64 -> { + headers.put("x-hict-dtype", "int64"); + final var binary = encodeLongMatrixLittleEndian(rawMatrix, rowCount, columnCount); + return new MatrixResponsePayload("application/octet-stream", null, Buffer.buffer(binary), headers); + } + case BINARY_FLOAT64 -> { + headers.put("x-hict-dtype", "float64"); + final var source = signalMode == MatrixSignalMode.RAW_COUNTS + ? asDoubleMatrix(rawMatrix, rowCount, columnCount) + : signalMatrix; + final var binary = encodeDoubleMatrixLittleEndian(source, rowCount, columnCount); + return new MatrixResponsePayload("application/octet-stream", null, Buffer.buffer(binary), headers); + } + case BINARY_FLOAT32 -> { + headers.put("x-hict-dtype", "float32"); + final var source = signalMode == MatrixSignalMode.RAW_COUNTS + ? asDoubleMatrix(rawMatrix, rowCount, columnCount) + : signalMatrix; + final var binary = encodeFloatMatrixLittleEndian(source, rowCount, columnCount); + return new MatrixResponsePayload("application/octet-stream", null, Buffer.buffer(binary), headers); + } + default -> throw new IllegalStateException("Unsupported matrix response format: " + format); + } + } + + private static double[][] computeCoolerWeightedSignal(final long[][] rawMatrix, + final double[] rowWeights, + final double[] colWeights) { + final var rowCount = rawMatrix.length; + final var columnCount = rowCount > 0 ? rawMatrix[0].length : 0; + final var result = new double[rowCount][columnCount]; + for (int row = 0; row < rowCount; row++) { + final var rowWeight = rowWeights != null && row < rowWeights.length ? rowWeights[row] : 1.0d; + for (int col = 0; col < columnCount; col++) { + final var colWeight = colWeights != null && col < colWeights.length ? colWeights[col] : 1.0d; + result[row][col] = rawMatrix[row][col] * rowWeight * colWeight; + } + } + return result; + } + + private double[][] computePipelineSignalMatrix(final @NotNull ChunkedFile primaryChunkedFile, + final ChunkedFile secondaryChunkedFile, + final @NotNull ru.itmo.ctlab.hict.hict_library.chunkedfile.MatrixQueries.MatrixWithWeights primaryMatrixWithWeights, + final ru.itmo.ctlab.hict.hict_library.chunkedfile.MatrixQueries.MatrixWithWeights secondaryMatrixWithWeights, + final @NotNull ru.itmo.ctlab.hict.hict_library.visualization.SimpleVisualizationOptions options, + final @NotNull RenderPipelineConfig pipelineConfig, + final Track1DManager track1DManager) { + final var primaryValues = primaryMatrixWithWeights.matrix(); + final var rowCount = primaryValues.length; + final var columnCount = rowCount > 0 ? primaryValues[0].length : 0; + final var result = new double[rowCount][columnCount]; + if (rowCount == 0 || columnCount == 0) { + return result; + } + + final var secondaryValues = new double[rowCount][columnCount]; + if (secondaryMatrixWithWeights != null && secondaryChunkedFile != null) { + final var candidate = secondaryMatrixWithWeights.matrix(); + final var candidateRowCount = candidate.length; + final var candidateColCount = + candidateRowCount > 0 && candidate[0] != null ? candidate[0].length : 0; + final var rowOffset = + (int) (secondaryMatrixWithWeights.startRowIncl() - primaryMatrixWithWeights.startRowIncl()); + final var colOffset = + (int) (secondaryMatrixWithWeights.startColIncl() - primaryMatrixWithWeights.startColIncl()); + for (int row = 0; row < candidateRowCount; row++) { + final var dstRow = row + rowOffset; + if (dstRow < 0 || dstRow >= rowCount) { + continue; + } + final var sourceRow = candidate[row]; + if (sourceRow == null) { + continue; + } + final var sourceColCount = Math.min(sourceRow.length, candidateColCount); + for (int col = 0; col < sourceColCount; col++) { + final var dstCol = col + colOffset; + if (dstCol < 0 || dstCol >= columnCount) { + continue; + } + secondaryValues[dstRow][dstCol] = sourceRow[col]; + } + } + } + + final var resolutionDescriptor = primaryMatrixWithWeights.resolutionDescriptor(); + final var bpResolution = primaryChunkedFile.getResolutions()[resolutionDescriptor.getResolutionOrderInArray()]; + final var bpResolutionDescriptor = ResolutionDescriptor.fromResolutionOrder(0); + final var totalVisiblePixels = primaryChunkedFile.getContigTree().getLengthInUnits( + QueryLengthUnit.PIXELS, + resolutionDescriptor + ); + final var resolutionOrder = resolutionDescriptor.getResolutionOrderInArray(); + final var matrixSizeBins = primaryChunkedFile.getMatrixSizeBins(); + final var totalBinsAtResolution = + resolutionOrder >= 0 && resolutionOrder < matrixSizeBins.length + ? matrixSizeBins[resolutionOrder] + : Long.MAX_VALUE; + + final var rowPxValues = new long[rowCount]; + final var rowBinValues = new long[rowCount]; + final var rowBpValues = new long[rowCount]; + for (int row = 0; row < rowCount; ++row) { + final var rowPx = primaryMatrixWithWeights.startRowIncl() + row; + rowPxValues[row] = rowPx; + rowBinValues[row] = primaryChunkedFile.convertUnits( + rowPx, + resolutionDescriptor, + QueryLengthUnit.PIXELS, + resolutionDescriptor, + QueryLengthUnit.BINS + ); + rowBpValues[row] = primaryChunkedFile.convertUnits( + rowPx, + resolutionDescriptor, + QueryLengthUnit.PIXELS, + bpResolutionDescriptor, + QueryLengthUnit.BASE_PAIRS + ); + } + + final var colPxValues = new long[columnCount]; + final var colBinValues = new long[columnCount]; + final var colBpValues = new long[columnCount]; + for (int col = 0; col < columnCount; ++col) { + final var colPx = primaryMatrixWithWeights.startColIncl() + col; + colPxValues[col] = colPx; + colBinValues[col] = primaryChunkedFile.convertUnits( + colPx, + resolutionDescriptor, + QueryLengthUnit.PIXELS, + resolutionDescriptor, + QueryLengthUnit.BINS + ); + colBpValues[col] = primaryChunkedFile.convertUnits( + colPx, + resolutionDescriptor, + QueryLengthUnit.PIXELS, + bpResolutionDescriptor, + QueryLengthUnit.BASE_PAIRS + ); + } + + final var context = new RenderPipelineConfig.MutablePixelContext(); + final var rowWeights = primaryMatrixWithWeights.rowWeights(); + final var colWeights = primaryMatrixWithWeights.colWeights(); + final var resolutionScalingCoeffs = primaryChunkedFile.getResolutionScalingCoefficient(); + final var resolutionLinearScalingCoeffs = primaryChunkedFile.getResolutionLinearScalingCoefficient(); + final var resolutionScalingCoeff = + resolutionOrder >= 0 && resolutionOrder < resolutionScalingCoeffs.length + ? resolutionScalingCoeffs[resolutionOrder] + : 1.0d; + final var resolutionLinearScalingCoeff = + resolutionOrder >= 0 && resolutionOrder < resolutionLinearScalingCoeffs.length + ? resolutionLinearScalingCoeffs[resolutionOrder] + : 1.0d; + + final var rowTrackValuesByTrackId = new HashMap(); + final var colTrackValuesByTrackId = new HashMap(); + if (track1DManager != null) { + for (final var binding : pipelineConfig.requiredTrackBindings()) { + if (binding.axis() == RenderPipelineConfig.TrackAxis.ROW) { + if (!rowTrackValuesByTrackId.containsKey(binding.trackId())) { + final var sampled = track1DManager.sampleTrackValues( + primaryChunkedFile, + binding.trackId(), + primaryMatrixWithWeights.startRowIncl(), + primaryMatrixWithWeights.startRowIncl() + rowCount, + bpResolution, + QueryLengthUnit.PIXELS + ); + rowTrackValuesByTrackId.put(binding.trackId(), normalizeTrackValues(sampled, rowCount)); + } + } else { + if (!colTrackValuesByTrackId.containsKey(binding.trackId())) { + final var sampled = track1DManager.sampleTrackValues( + primaryChunkedFile, + binding.trackId(), + primaryMatrixWithWeights.startColIncl(), + primaryMatrixWithWeights.startColIncl() + columnCount, + bpResolution, + QueryLengthUnit.PIXELS + ); + colTrackValuesByTrackId.put(binding.trackId(), normalizeTrackValues(sampled, columnCount)); + } + } + } + } + context.rowTrackValuesByTrackId = rowTrackValuesByTrackId; + context.colTrackValuesByTrackId = colTrackValuesByTrackId; + + for (int row = 0; row < rowCount; ++row) { + final var rowWeight = rowWeights != null && row < rowWeights.length ? rowWeights[row] : 1.0d; + final var rowPx = rowPxValues[row]; + final var rowBin = rowBinValues[row]; + final var rowBp = rowBpValues[row]; + for (int col = 0; col < columnCount; ++col) { + final var primaryValue = (double) primaryValues[row][col]; + final var secondaryValue = secondaryValues[row][col]; + final var colPx = colPxValues[col]; + final var colBin = colBinValues[col]; + final var rowOutside = + rowPx < 0L || rowPx >= totalVisiblePixels || rowBin < 0L || rowBin >= totalBinsAtResolution; + final var colOutside = + colPx < 0L || colPx >= totalVisiblePixels || colBin < 0L || colBin >= totalBinsAtResolution; + if (rowOutside || colOutside) { + result[row][col] = 0.0d; + continue; + } + final var colWeight = colWeights != null && col < colWeights.length ? colWeights[col] : 1.0d; + context.primaryValue = Double.isFinite(primaryValue) ? primaryValue : 0.0d; + context.secondaryValue = Double.isFinite(secondaryValue) ? secondaryValue : 0.0d; + context.rowWeight = rowWeight; + context.colWeight = colWeight; + context.resolutionScalingCoeff = resolutionScalingCoeff; + context.resolutionLinearScalingCoeff = resolutionLinearScalingCoeff; + context.rowPx = rowPx; + context.colPx = colPx; + context.rowBin = rowBin; + context.colBin = colBin; + context.rowBp = rowBp; + context.colBp = colBpValues[col]; + context.bpResolution = bpResolution; + context.rowLocalIndex = row; + context.colLocalIndex = col; + result[row][col] = pipelineConfig.evaluate(context.rowPx <= context.colPx, context); + } + } + return result; + } + + private static byte[] encodeFloatMatrixLittleEndian(final double[][] matrix, final int rows, final int cols) { + final var bb = ByteBuffer.allocate(rows * cols * Float.BYTES).order(ByteOrder.LITTLE_ENDIAN); + for (int row = 0; row < rows; row++) { + final var sourceRow = matrix[row]; + for (int col = 0; col < cols; col++) { + bb.putFloat((float) sourceRow[col]); + } + } + return bb.array(); + } + + private static byte[] encodeDoubleMatrixLittleEndian(final double[][] matrix, final int rows, final int cols) { + final var bb = ByteBuffer.allocate(rows * cols * Double.BYTES).order(ByteOrder.LITTLE_ENDIAN); + for (int row = 0; row < rows; row++) { + final var sourceRow = matrix[row]; + for (int col = 0; col < cols; col++) { + bb.putDouble(sourceRow[col]); + } + } + return bb.array(); + } + + private static byte[] encodeLongMatrixLittleEndian(final long[][] matrix, final int rows, final int cols) { + final var bb = ByteBuffer.allocate(rows * cols * Long.BYTES).order(ByteOrder.LITTLE_ENDIAN); + for (int row = 0; row < rows; row++) { + final var sourceRow = matrix[row]; + for (int col = 0; col < cols; col++) { + bb.putLong(sourceRow[col]); + } + } + return bb.array(); + } + + private static double[][] asDoubleMatrix(final long[][] source, final int rows, final int cols) { + final var result = new double[rows][cols]; + for (int row = 0; row < rows; row++) { + final var src = source[row]; + final var dst = result[row]; + for (int col = 0; col < cols; col++) { + dst[col] = src[col]; + } + } + return result; + } + + private static ArrayList toJsonArray(final double[] values, final int expectedLength) { + final var result = new ArrayList(Math.max(0, expectedLength)); + for (int i = 0; i < expectedLength; i++) { + final var value = values != null && i < values.length ? values[i] : 1.0d; + result.add(value); + } + return result; + } + + private static ArrayList flattenLongMatrix(final long[][] matrix, final int rows, final int cols) { + final var result = new ArrayList(rows * cols); + for (int row = 0; row < rows; row++) { + final var sourceRow = matrix[row]; + for (int col = 0; col < cols; col++) { + result.add(sourceRow[col]); + } + } + return result; + } + + private static ArrayList flattenDoubleMatrix(final double[][] matrix, final int rows, final int cols) { + final var result = new ArrayList(rows * cols); + for (int row = 0; row < rows; row++) { + final var sourceRow = matrix[row]; + for (int col = 0; col < cols; col++) { + result.add(sourceRow[col]); + } + } + return result; + } + + private long resolveRangeStart(final @NotNull JsonObject request, + final @NotNull Axis axis, + final @NotNull QueryLengthUnit units) { + final var keySuffix = axis == Axis.ROW ? "Row" : "Col"; + final String[] keys = switch (units) { + case PIXELS -> new String[]{"start" + keySuffix + "Px", "start" + keySuffix, "startPx", "start"}; + case BINS -> new String[]{"start" + keySuffix + "Bin", "start" + keySuffix, "startBin", "start"}; + case BASE_PAIRS -> new String[]{"start" + keySuffix + "BP", "start" + keySuffix, "startBP", "start"}; + }; + return getLong(request, 0L, keys); + } + + private long resolveRangeEnd(final @NotNull JsonObject request, + final @NotNull Axis axis, + final @NotNull QueryLengthUnit units, + final long startValue) { + final var keySuffix = axis == Axis.ROW ? "Row" : "Col"; + final String[] endKeys = switch (units) { + case PIXELS -> new String[]{"end" + keySuffix + "Px", "end" + keySuffix, "endPx", "end"}; + case BINS -> new String[]{"end" + keySuffix + "Bin", "end" + keySuffix, "endBin", "end"}; + case BASE_PAIRS -> new String[]{"end" + keySuffix + "BP", "end" + keySuffix, "endBP", "end"}; + }; + final var explicitEnd = getOptionalLong(request, endKeys); + if (explicitEnd != null) { + return explicitEnd; + } + final String[] lengthKeys = axis == Axis.ROW + ? new String[]{"rows", "height", "rowCount"} + : new String[]{"cols", "width", "colCount"}; + final var length = getLong(request, 0L, lengthKeys); + return startValue + Math.max(0L, length); + } + + private static long convertToPixels(final @NotNull ChunkedFile chunkedFile, + final @NotNull ResolutionDescriptor resolutionDescriptor, + final @NotNull QueryLengthUnit units, + final long value) { + return switch (units) { + case PIXELS -> value; + case BINS -> chunkedFile.convertUnits( + value, + resolutionDescriptor, + QueryLengthUnit.BINS, + resolutionDescriptor, + QueryLengthUnit.PIXELS + ); + case BASE_PAIRS -> chunkedFile.convertUnits( + value, + ResolutionDescriptor.fromResolutionOrder(0), + QueryLengthUnit.BASE_PAIRS, + resolutionDescriptor, + QueryLengthUnit.PIXELS + ); + }; + } + + private static QueryLengthUnit parseUnits(final @NotNull String rawValue) { + final var normalized = rawValue.trim().toUpperCase(); + return switch (normalized) { + case "PIXEL", "PIXELS", "PX" -> QueryLengthUnit.PIXELS; + case "BIN", "BINS" -> QueryLengthUnit.BINS; + case "BP", "BASE_PAIRS", "BASEPAIR", "BASEPAIRS" -> QueryLengthUnit.BASE_PAIRS; + default -> throw new IllegalArgumentException( + "Unsupported unit '" + rawValue + "'. Use one of: PIXELS, BINS, BP." + ); + }; + } + + private static long getLong(final @NotNull JsonObject request, + final long fallback, + final @NotNull String... keys) { + final var value = getOptionalLong(request, keys); + return value != null ? value : fallback; + } + + private static Long getOptionalLong(final @NotNull JsonObject request, + final @NotNull String... keys) { + for (final var key : keys) { + final var value = request.getValue(key); + if (value instanceof Number number) { + return number.longValue(); + } + } + return null; + } + private TileSignalRanges buildSignalRanges(final @NotNull TileStatisticHolder stats, final @NotNull ru.itmo.ctlab.hict.hict_library.chunkedfile.ChunkedFile chunkedFile) { return new TileSignalRanges( @@ -339,4 +1085,371 @@ private record TileResponsePayload(@NotNull String contentType, String jsonBody, Buffer binaryBody) { } + + private record MatrixResponsePayload(@NotNull String contentType, + String jsonBody, + Buffer binaryBody, + @NotNull Map headers) { + } + + private enum Axis { + ROW, + COL + } + + public enum MatrixSignalMode { + RAW_COUNTS, + COOLER_WEIGHTED, + TRADITIONAL_NORMALIZED, + PIPELINE_SIGNAL; + + static @NotNull MatrixSignalMode fromRaw(final @NotNull String rawValue) { + final var normalized = rawValue.trim().toUpperCase(); + return switch (normalized) { + case "RAW", "RAW_COUNTS", "COUNTS" -> RAW_COUNTS; + case "COOLER_WEIGHTED", "WEIGHTED", "BALANCED" -> COOLER_WEIGHTED; + case "TRADITIONAL_NORMALIZED", "NORMALIZED", "VISUALIZATION_NORMALIZED" -> TRADITIONAL_NORMALIZED; + case "PIPELINE_SIGNAL", "PIPELINE", "RENDER_PIPELINE_SIGNAL" -> PIPELINE_SIGNAL; + default -> throw new IllegalArgumentException( + "Unsupported matrix signal mode '" + rawValue + "'. Use RAW_COUNTS, COOLER_WEIGHTED, TRADITIONAL_NORMALIZED or PIPELINE_SIGNAL." + ); + }; + } + } + + public enum MatrixResponseFormat { + JSON, + BINARY_FLOAT32, + BINARY_FLOAT64, + BINARY_INT64; + + static @NotNull MatrixResponseFormat fromRaw(final @NotNull String rawValue) { + final var normalized = rawValue.trim().toUpperCase(); + return switch (normalized) { + case "JSON", "JSON_FLAT" -> JSON; + case "BINARY_FLOAT32", "FLOAT32", "F32" -> BINARY_FLOAT32; + case "BINARY_FLOAT64", "FLOAT64", "F64" -> BINARY_FLOAT64; + case "BINARY_INT64", "INT64", "I64" -> BINARY_INT64; + default -> throw new IllegalArgumentException( + "Unsupported matrix response format '" + rawValue + "'. Use JSON, BINARY_FLOAT32, BINARY_FLOAT64 or BINARY_INT64." + ); + }; + } + + @NotNull String defaultDtype() { + return switch (this) { + case JSON -> "float64"; + case BINARY_FLOAT32 -> "float32"; + case BINARY_FLOAT64 -> "float64"; + case BINARY_INT64 -> "int64"; + }; + } + } + + private @NotNull BufferedImage renderTraditionalDualSourceTile(final @NotNull ChunkedFile primaryChunkedFile, + final @NotNull ChunkedFile secondaryChunkedFile, + final @NotNull ru.itmo.ctlab.hict.hict_library.chunkedfile.MatrixQueries.MatrixWithWeights primaryMatrixWithWeights, + final @NotNull ru.itmo.ctlab.hict.hict_library.chunkedfile.MatrixQueries.MatrixWithWeights secondaryMatrixWithWeights, + final @NotNull ru.itmo.ctlab.hict.hict_library.visualization.SimpleVisualizationOptions options, + final boolean swapUpperLower) { + final var primaryImage = primaryChunkedFile.tileVisualizationProcessor().visualizeTile(primaryMatrixWithWeights, options); + final var secondaryImage = secondaryChunkedFile.tileVisualizationProcessor().visualizeTile(secondaryMatrixWithWeights, options); + final var rowCount = primaryImage.getHeight(); + final var columnCount = primaryImage.getWidth(); + final var result = new BufferedImage(columnCount, rowCount, BufferedImage.TYPE_INT_ARGB); + if (rowCount <= 0 || columnCount <= 0) { + return result; + } + final var primaryRgba = primaryImage.getRGB(0, 0, columnCount, rowCount, null, 0, columnCount); + final var secondaryRgba = new int[rowCount * columnCount]; + final var secondaryRows = secondaryImage.getHeight(); + final var secondaryCols = secondaryImage.getWidth(); + final var secondaryOffsetRow = + Math.max(0, (int) (secondaryMatrixWithWeights.startRowIncl() - primaryMatrixWithWeights.startRowIncl())); + final var secondaryOffsetCol = + Math.max(0, (int) (secondaryMatrixWithWeights.startColIncl() - primaryMatrixWithWeights.startColIncl())); + if (secondaryRows > 0 && secondaryCols > 0) { + final var rawSecondaryRgba = secondaryImage.getRGB(0, 0, secondaryCols, secondaryRows, null, 0, secondaryCols); + for (int srcRow = 0; srcRow < secondaryRows; srcRow++) { + final var dstRow = secondaryOffsetRow + srcRow; + if (dstRow < 0 || dstRow >= rowCount) { + continue; + } + for (int srcCol = 0; srcCol < secondaryCols; srcCol++) { + final var dstCol = secondaryOffsetCol + srcCol; + if (dstCol < 0 || dstCol >= columnCount) { + continue; + } + secondaryRgba[dstRow * columnCount + dstCol] = rawSecondaryRgba[srcRow * secondaryCols + srcCol]; + } + } + } + final var merged = new int[rowCount * columnCount]; + final var rowStartPx = primaryMatrixWithWeights.startRowIncl(); + final var colStartPx = primaryMatrixWithWeights.startColIncl(); + int index = 0; + for (int row = 0; row < rowCount; row++) { + final long rowPx = rowStartPx + row; + for (int col = 0; col < columnCount; col++) { + final long colPx = colStartPx + col; + final boolean upperTriangle = rowPx <= colPx; + final boolean usePrimary = swapUpperLower ? !upperTriangle : upperTriangle; + merged[index] = usePrimary ? primaryRgba[index] : secondaryRgba[index]; + index++; + } + } + result.setRGB(0, 0, columnCount, rowCount, merged, 0, columnCount); + return result; + } + + private @NotNull BufferedImage renderPipelineTile(final @NotNull ChunkedFile primaryChunkedFile, + final ChunkedFile secondaryChunkedFile, + final @NotNull ru.itmo.ctlab.hict.hict_library.chunkedfile.MatrixQueries.MatrixWithWeights primaryMatrixWithWeights, + final ru.itmo.ctlab.hict.hict_library.chunkedfile.MatrixQueries.MatrixWithWeights secondaryMatrixWithWeights, + final @NotNull ru.itmo.ctlab.hict.hict_library.visualization.SimpleVisualizationOptions options, + final @NotNull RenderPipelineConfig pipelineConfig, + final Track1DManager track1DManager) { + final var primaryValues = primaryMatrixWithWeights.matrix(); + final var rowCount = primaryValues.length; + final var columnCount = rowCount > 0 ? primaryValues[0].length : 0; + final var image = new BufferedImage(columnCount, rowCount, BufferedImage.TYPE_INT_ARGB); + final var rgba = new int[Math.max(0, rowCount * columnCount)]; + if (rowCount == 0 || columnCount == 0) { + return image; + } + + final var secondaryValues = new double[rowCount][columnCount]; + if (secondaryMatrixWithWeights != null && secondaryChunkedFile != null) { + final var candidate = secondaryMatrixWithWeights.matrix(); + final var candidateRowCount = candidate.length; + final var candidateColCount = + candidateRowCount > 0 && candidate[0] != null ? candidate[0].length : 0; + final var rowOffset = + (int) (secondaryMatrixWithWeights.startRowIncl() - primaryMatrixWithWeights.startRowIncl()); + final var colOffset = + (int) (secondaryMatrixWithWeights.startColIncl() - primaryMatrixWithWeights.startColIncl()); + for (int row = 0; row < candidateRowCount; row++) { + final var dstRow = row + rowOffset; + if (dstRow < 0 || dstRow >= rowCount) { + continue; + } + final var sourceRow = candidate[row]; + if (sourceRow == null) { + continue; + } + final var sourceColCount = Math.min(sourceRow.length, candidateColCount); + for (int col = 0; col < sourceColCount; col++) { + final var dstCol = col + colOffset; + if (dstCol < 0 || dstCol >= columnCount) { + continue; + } + secondaryValues[dstRow][dstCol] = sourceRow[col]; + } + } + } + + final var resolutionDescriptor = primaryMatrixWithWeights.resolutionDescriptor(); + final var bpResolution = primaryChunkedFile.getResolutions()[resolutionDescriptor.getResolutionOrderInArray()]; + final var bpResolutionDescriptor = ResolutionDescriptor.fromResolutionOrder(0); + final var totalVisiblePixels = primaryChunkedFile.getContigTree().getLengthInUnits( + QueryLengthUnit.PIXELS, + resolutionDescriptor + ); + final var resolutionOrder = resolutionDescriptor.getResolutionOrderInArray(); + final var matrixSizeBins = primaryChunkedFile.getMatrixSizeBins(); + final var totalBinsAtResolution = + resolutionOrder >= 0 && resolutionOrder < matrixSizeBins.length + ? matrixSizeBins[resolutionOrder] + : Long.MAX_VALUE; + + final var rowPxValues = new long[rowCount]; + final var rowBinValues = new long[rowCount]; + final var rowBpValues = new long[rowCount]; + for (int row = 0; row < rowCount; ++row) { + final var rowPx = primaryMatrixWithWeights.startRowIncl() + row; + rowPxValues[row] = rowPx; + rowBinValues[row] = primaryChunkedFile.convertUnits( + rowPx, + resolutionDescriptor, + QueryLengthUnit.PIXELS, + resolutionDescriptor, + QueryLengthUnit.BINS + ); + rowBpValues[row] = primaryChunkedFile.convertUnits( + rowPx, + resolutionDescriptor, + QueryLengthUnit.PIXELS, + bpResolutionDescriptor, + QueryLengthUnit.BASE_PAIRS + ); + } + + final var colPxValues = new long[columnCount]; + final var colBinValues = new long[columnCount]; + final var colBpValues = new long[columnCount]; + for (int col = 0; col < columnCount; ++col) { + final var colPx = primaryMatrixWithWeights.startColIncl() + col; + colPxValues[col] = colPx; + colBinValues[col] = primaryChunkedFile.convertUnits( + colPx, + resolutionDescriptor, + QueryLengthUnit.PIXELS, + resolutionDescriptor, + QueryLengthUnit.BINS + ); + colBpValues[col] = primaryChunkedFile.convertUnits( + colPx, + resolutionDescriptor, + QueryLengthUnit.PIXELS, + bpResolutionDescriptor, + QueryLengthUnit.BASE_PAIRS + ); + } + + final var context = new RenderPipelineConfig.MutablePixelContext(); + final var rowWeights = primaryMatrixWithWeights.rowWeights(); + final var colWeights = primaryMatrixWithWeights.colWeights(); + final var resolutionScalingCoeffs = primaryChunkedFile.getResolutionScalingCoefficient(); + final var resolutionLinearScalingCoeffs = primaryChunkedFile.getResolutionLinearScalingCoefficient(); + final var resolutionScalingCoeff = + resolutionOrder >= 0 && resolutionOrder < resolutionScalingCoeffs.length + ? resolutionScalingCoeffs[resolutionOrder] + : 1.0d; + final var resolutionLinearScalingCoeff = + resolutionOrder >= 0 && resolutionOrder < resolutionLinearScalingCoeffs.length + ? resolutionLinearScalingCoeffs[resolutionOrder] + : 1.0d; + + final var rowTrackValuesByTrackId = new HashMap(); + final var colTrackValuesByTrackId = new HashMap(); + if (track1DManager != null) { + for (final var binding : pipelineConfig.requiredTrackBindings()) { + if (binding.axis() == RenderPipelineConfig.TrackAxis.ROW) { + if (!rowTrackValuesByTrackId.containsKey(binding.trackId())) { + final var sampled = track1DManager.sampleTrackValues( + primaryChunkedFile, + binding.trackId(), + primaryMatrixWithWeights.startRowIncl(), + primaryMatrixWithWeights.startRowIncl() + rowCount, + bpResolution, + QueryLengthUnit.PIXELS + ); + rowTrackValuesByTrackId.put(binding.trackId(), normalizeTrackValues(sampled, rowCount)); + } + } else { + if (!colTrackValuesByTrackId.containsKey(binding.trackId())) { + final var sampled = track1DManager.sampleTrackValues( + primaryChunkedFile, + binding.trackId(), + primaryMatrixWithWeights.startColIncl(), + primaryMatrixWithWeights.startColIncl() + columnCount, + bpResolution, + QueryLengthUnit.PIXELS + ); + colTrackValuesByTrackId.put(binding.trackId(), normalizeTrackValues(sampled, columnCount)); + } + } + } + } + context.rowTrackValuesByTrackId = rowTrackValuesByTrackId; + context.colTrackValuesByTrackId = colTrackValuesByTrackId; + + int pixelIndex = 0; + for (int row = 0; row < rowCount; ++row) { + final var rowWeight = rowWeights != null && row < rowWeights.length ? rowWeights[row] : 1.0d; + final var rowPx = rowPxValues[row]; + final var rowBin = rowBinValues[row]; + final var rowBp = rowBpValues[row]; + for (int col = 0; col < columnCount; ++col) { + final var primaryValue = (double) primaryValues[row][col]; + final var secondaryValue = secondaryValues[row][col]; + final var colPx = colPxValues[col]; + final var colBin = colBinValues[col]; + final var rowOutside = + rowPx < 0L || rowPx >= totalVisiblePixels || rowBin < 0L || rowBin >= totalBinsAtResolution; + final var colOutside = + colPx < 0L || colPx >= totalVisiblePixels || colBin < 0L || colBin >= totalBinsAtResolution; + if (rowOutside || colOutside) { + rgba[pixelIndex++] = 0x00000000; + continue; + } + final var colWeight = colWeights != null && col < colWeights.length ? colWeights[col] : 1.0d; + context.primaryValue = Double.isFinite(primaryValue) ? primaryValue : 0.0d; + context.secondaryValue = Double.isFinite(secondaryValue) ? secondaryValue : 0.0d; + context.rowWeight = rowWeight; + context.colWeight = colWeight; + context.resolutionScalingCoeff = resolutionScalingCoeff; + context.resolutionLinearScalingCoeff = resolutionLinearScalingCoeff; + context.rowPx = rowPx; + context.colPx = colPx; + context.rowBin = rowBin; + context.colBin = colBin; + context.rowBp = rowBp; + context.colBp = colBpValues[col]; + context.bpResolution = bpResolution; + context.rowLocalIndex = row; + context.colLocalIndex = col; + rgba[pixelIndex++] = pipelineConfig.evaluateArgb(context.rowPx <= context.colPx, context, options); + } + } + image.setRGB(0, 0, columnCount, rowCount, rgba, 0, columnCount); + return image; + } + + private double @NotNull [] normalizeTrackValues(final double[] sampled, + final int expectedLength) { + final var safeLength = Math.max(0, expectedLength); + if (safeLength == 0) { + return new double[0]; + } + if (sampled == null || sampled.length == 0) { + return new double[safeLength]; + } + if (sampled.length == safeLength) { + return sampled; + } + final var result = new double[safeLength]; + System.arraycopy(sampled, 0, result, 0, Math.min(sampled.length, safeLength)); + return result; + } + + private @Nullable ru.itmo.ctlab.hict.hict_library.chunkedfile.MatrixQueries.MatrixWithWeights querySecondarySubmatrix( + final @Nullable ChunkedFile secondaryChunkedFile, + final @NotNull ResolutionDescriptor resolutionDescriptor, + final long startRowPx, + final long startColPx, + final long endRowPx, + final long endColPx + ) { + if (secondaryChunkedFile == null) { + return null; + } + final var maxVisiblePixels = secondaryChunkedFile.getContigTree().getLengthInUnits( + QueryLengthUnit.PIXELS, + resolutionDescriptor + ); + if (maxVisiblePixels <= 0L) { + return null; + } + final var clampedStartRow = Math.max(0L, Math.min(startRowPx, maxVisiblePixels)); + final var clampedEndRow = Math.max(clampedStartRow, Math.min(endRowPx, maxVisiblePixels)); + final var clampedStartCol = Math.max(0L, Math.min(startColPx, maxVisiblePixels)); + final var clampedEndCol = Math.max(clampedStartCol, Math.min(endColPx, maxVisiblePixels)); + if (clampedEndRow <= clampedStartRow || clampedEndCol <= clampedStartCol) { + return null; + } + try { + return secondaryChunkedFile.matrixQueries().getSubmatrix( + resolutionDescriptor, + clampedStartRow, + clampedStartCol, + clampedEndRow, + clampedEndCol, + true + ); + } catch (final RuntimeException ex) { + log.debug("Failed to query secondary submatrix for requested window", ex); + return null; + } + } } diff --git a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/tracks/TrackHandlersHolder.java b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/tracks/TrackHandlersHolder.java index e7eff0b..d526317 100644 --- a/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/tracks/TrackHandlersHolder.java +++ b/src/main/java/ru/itmo/ctlab/hict/hict_server/handlers/tracks/TrackHandlersHolder.java @@ -106,6 +106,70 @@ public void addHandlersToRouter(final @NotNull Router router) { ); }); + router.post("/tracks/open_cooler_weights").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + final var manager = getTrackManager(ctx); + if (manager == null) { + return; + } + final var request = ctx.body() == null ? null : ctx.body().asJsonObject(); + final @NotNull @NonNull LocalMap map = this.vertx.sharedData().getLocalMap("hict_server"); + final var chunkedFile = extractChunkedFile(map, ctx); + if (chunkedFile == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.ASSEMBLY, + null, + () -> { + final var summary = manager.openCoolerWeightsTrack( + request == null ? null : request.getString("name"), + request == null ? null : request.getString("color") + ); + manager.startPrecompute(chunkedFile, summary.getTrackId(), false); + return summary; + }, + summary -> ctx.response() + .putHeader("content-type", "application/json") + .end(Json.encode(summary)) + ); + }); + + router.post("/tracks/probe").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + final var request = ctx.body().asJsonObject(); + final var filename = request.getString("filename"); + if (filename == null || filename.isBlank()) { + ctx.fail(new IllegalArgumentException("Track filename is required")); + return; + } + final var manager = getTrackManager(ctx); + if (manager == null) { + return; + } + final @NotNull @NonNull LocalMap map = this.vertx.sharedData().getLocalMap("hict_server"); + final var chunkedFile = extractChunkedFile(map, ctx); + if (chunkedFile == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.TRACK, + null, + () -> manager.probeTrackCompatibility(chunkedFile, filename), + report -> ctx.response() + .putHeader("content-type", "application/json") + .end(Json.encode(report)) + ); + }); + router.post("/tracks/list").handler(ctx -> { final var scheduler = getScheduler(ctx); if (scheduler == null) { @@ -149,7 +213,8 @@ public void addHandlersToRouter(final @NotNull Router router) { request.getString("color"), request.getString("name"), request.getString("renderMode"), - request.getString("aggregationMode") + request.getString("aggregationMode"), + request.containsKey("logScale") ? request.getBoolean("logScale") : null ), updated -> ctx.response() .putHeader("content-type", "application/json") @@ -186,6 +251,37 @@ public void addHandlersToRouter(final @NotNull Router router) { ); }); + router.post("/tracks/reorder").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + final var request = ctx.body().asJsonObject(); + final var trackId = request.getString("trackId"); + if (trackId == null || trackId.isBlank()) { + ctx.fail(new IllegalArgumentException("trackId is required")); + return; + } + if (!request.containsKey("targetIndex")) { + ctx.fail(new IllegalArgumentException("targetIndex is required")); + return; + } + final var targetIndex = request.getInteger("targetIndex", 0); + final var manager = getTrackManager(ctx); + if (manager == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.ASSEMBLY, + null, + () -> manager.reorderTrack(trackId, targetIndex), + tracks -> ctx.response() + .putHeader("content-type", "application/json") + .end(Json.encode(tracks)) + ); + }); + router.post("/tracks/precompute/status").handler(ctx -> { final var scheduler = getScheduler(ctx); if (scheduler == null) { @@ -282,6 +378,114 @@ public void addHandlersToRouter(final @NotNull Router router) { ))) ); }); + + router.post("/tracks/search_features").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + final var request = ctx.body().asJsonObject(); + final var query = request.getString("query", request.getString("q", "")); + final var limit = request.getInteger("limit", 50); + final var offset = request.getInteger("offset", 0); + final var trackId = request.getString("trackId"); + final var manager = getTrackManager(ctx); + if (manager == null) { + return; + } + final @NotNull @NonNull LocalMap map = this.vertx.sharedData().getLocalMap("hict_server"); + final var chunkedFile = extractChunkedFile(map, ctx); + if (chunkedFile == null) { + return; + } + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.TRACK, + RequestTaskScheduler.CancellationDomain.TRACK, + () -> manager.searchFeatures(chunkedFile, query, limit, offset, trackId), + result -> ctx.response() + .putHeader("content-type", "application/json") + .end(Json.encode(result)), + () -> ctx.response() + .putHeader("content-type", "application/json") + .end(Json.encode(new Track1DManager.FeatureSearchResponse( + query == null ? "" : query.trim(), + Math.max(1, limit), + Math.max(0, offset), + false, + List.of() + ))) + ); + }); + + router.post("/tracks/feature_context").handler(ctx -> { + final var scheduler = getScheduler(ctx); + if (scheduler == null) { + return; + } + final var request = ctx.body().asJsonObject(); + final var widthPx = request.getInteger("widthPx", 1024); + final var bpResolution = request.getLong("bpResolution", 1L); + final var marginScreens = request.getDouble("marginScreens", 1.0d); + final var manager = getTrackManager(ctx); + if (manager == null) { + return; + } + final @NotNull @NonNull LocalMap map = this.vertx.sharedData().getLocalMap("hict_server"); + final var chunkedFile = extractChunkedFile(map, ctx); + if (chunkedFile == null) { + return; + } + final var resolvedUnits = resolveUnits(request); + final var start = resolveStart(request, resolvedUnits); + final var end = resolveEnd(request, resolvedUnits, start + 1L); + scheduler.submit( + ctx, + RequestTaskScheduler.RequestPriority.TRACK, + RequestTaskScheduler.CancellationDomain.TRACK, + () -> { + final var initialQuery = manager.queryVisibleTracks( + chunkedFile, + start, + end, + Math.max(2, widthPx), + bpResolution, + resolvedUnits + ); + return manager.queryFeatureContext( + chunkedFile, + initialQuery.getStartBp(), + initialQuery.getEndBp(), + widthPx, + bpResolution, + marginScreens + ); + }, + result -> ctx.response() + .putHeader("content-type", "application/json") + .end(Json.encode(result)), + () -> ctx.response() + .putHeader("content-type", "application/json") + .end(Json.encode(new Track1DManager.FeatureContextResponse( + 0L, + 1L, + 0L, + 1L, + marginScreens, + Math.max(1, widthPx), + bpResolution, + new Track1DManager.QueryResult( + 0L, + 1L, + 0L, + 1L, + Math.max(1, widthPx), + bpResolution, + List.of() + ) + ))) + ); + }); } private static @NotNull QueryLengthUnit resolveUnits(final @NotNull io.vertx.core.json.JsonObject request) { diff --git a/src/main/java/ru/itmo/ctlab/hict/hict_server/tracks/Track1DManager.java b/src/main/java/ru/itmo/ctlab/hict/hict_server/tracks/Track1DManager.java index 41e2e62..f6668e6 100644 --- a/src/main/java/ru/itmo/ctlab/hict/hict_server/tracks/Track1DManager.java +++ b/src/main/java/ru/itmo/ctlab/hict/hict_server/tracks/Track1DManager.java @@ -43,6 +43,7 @@ import org.jetbrains.annotations.Nullable; import ru.itmo.ctlab.hict.hict_library.chunkedfile.ChunkedFile; import ru.itmo.ctlab.hict.hict_library.chunkedfile.resolution.ResolutionDescriptor; +import ru.itmo.ctlab.hict.hict_library.domain.ATUDirection; import ru.itmo.ctlab.hict.hict_library.domain.ContigDirection; import ru.itmo.ctlab.hict.hict_library.domain.ContigHideType; import ru.itmo.ctlab.hict.hict_library.domain.QueryLengthUnit; @@ -69,13 +70,18 @@ @Slf4j public class Track1DManager { private static final Set SUPPORTED_EXTENSIONS = Set.of( - ".bed", ".bed.gz", ".vcf", ".vcf.gz", ".bw", ".bigwig", ".bam" + ".bed", ".bed.gz", + ".vcf", ".vcf.gz", + ".gff", ".gff.gz", ".gff3", ".gff3.gz", ".gtf", ".gtf.gz", + ".bw", ".bigwig", ".bam" ); private static final List COLOR_PALETTE = List.of( "#4e79a7", "#f28e2b", "#e15759", "#76b7b2", "#59a14f", "#edc948", "#b07aa1", "#ff9da7", "#9c755f", "#bab0ab" ); private static final int MAX_FEATURES_PER_QUERY = 250_000; + private static final long BED_FEATURE_STYLE_MAX_FEATURES = 50_000L; + private static final String COOLER_WEIGHTS_SOURCE_FILE = "__internal__/cooler_weights"; private static final String PRECOMPUTE_CACHE_VERSION = "1"; private static final long MAX_PRECOMPUTE_VISIBLE_PIXELS = 2_000_000L; private static final int PRECOMPUTE_JOB_THREADS = resolveThreadCount("HICT_TRACK_PRECOMPUTE_JOB_THREADS", 2); @@ -177,7 +183,7 @@ private static int resolveThreadCount(final @NotNull String envKey, final int de final var trackType = TrackType.fromPath(resolvedPath); if (trackType == TrackType.UNSUPPORTED) { throw new IllegalArgumentException( - "Unsupported track format for " + relativeFilename + ". Supported: BED/VCF/BigWig/BAM." + "Unsupported track format for " + relativeFilename + ". Supported: BED/VCF/GFF/GTF/BigWig/BAM." ); } final var trackId = "trk_" + this.trackCounter.incrementAndGet(); @@ -185,13 +191,7 @@ private static int resolveThreadCount(final @NotNull String envKey, final int de ? resolvedPath.getFileName().toString() : requestedName.trim(); final var color = normalizeColor(requestedColor, colorForIndex((int) this.trackCounter.get() - 1)); - final TrackDataSource dataSource = switch (trackType) { - case BED -> InMemoryTrackDataSource.fromBed(resolvedPath); - case VCF -> InMemoryTrackDataSource.fromVcf(resolvedPath); - case BIGWIG -> new BigWigTrackDataSource(resolvedPath); - case BAM -> new BamTrackDataSource(resolvedPath); - case UNSUPPORTED -> throw new IllegalStateException("Unexpected unsupported track type"); - }; + final TrackDataSource dataSource = createDataSource(trackType, resolvedPath); final var state = new TrackState( trackId, resolvedName, @@ -201,7 +201,40 @@ private static int resolveThreadCount(final @NotNull String envKey, final int de true, dataSource, BamRenderMode.COVERAGE, - BigWigAggregationMode.MAX + BigWigAggregationMode.MAX, + false + ); + try { + this.lock.writeLock().lock(); + this.tracks.put(trackId, state); + } catch (final RuntimeException ex) { + closeDataSourceQuietly(dataSource); + throw ex; + } finally { + this.lock.writeLock().unlock(); + } + return state.toSummary(); + } + + public @NotNull TrackSummary openCoolerWeightsTrack(final String requestedName, + final String requestedColor) { + final var trackId = "trk_" + this.trackCounter.incrementAndGet(); + final var resolvedName = (requestedName == null || requestedName.isBlank()) + ? "Cooler weights" + : requestedName.trim(); + final var color = normalizeColor(requestedColor, colorForIndex((int) this.trackCounter.get() - 1)); + final TrackDataSource dataSource = new CoolerWeightsTrackDataSource(); + final var state = new TrackState( + trackId, + resolvedName, + TrackType.COOLER_WEIGHTS, + COOLER_WEIGHTS_SOURCE_FILE, + color, + true, + dataSource, + BamRenderMode.COVERAGE, + BigWigAggregationMode.MAX, + false ); try { this.lock.writeLock().lock(); @@ -215,6 +248,61 @@ private static int resolveThreadCount(final @NotNull String envKey, final int de return state.toSummary(); } + public @NotNull TrackCompatibilityReport probeTrackCompatibility(final @NotNull ChunkedFile chunkedFile, + final @NotNull String relativeFilename) { + final var resolvedPath = resolveDataPath(relativeFilename); + final var trackType = TrackType.fromPath(resolvedPath); + if (trackType == TrackType.UNSUPPORTED) { + throw new IllegalArgumentException( + "Unsupported track format for " + relativeFilename + ". Supported: BED/VCF/GFF/GTF/BigWig/BAM." + ); + } + final var sourceNameSet = buildSourceNameSet(chunkedFile, this.linkedFastaAliasesBySource); + final var assemblyNameSet = buildAssemblyNameSet(chunkedFile); + final TrackDataSource dataSource = createDataSource(trackType, resolvedPath); + try { + final var trackNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); + trackNames.addAll(dataSource.sourceNames()); + final int total = trackNames.size(); + int matchedSource = 0; + int matchedAssembly = 0; + int matchedAny = 0; + final var unknownNames = new ArrayList(); + for (final var name : trackNames) { + final var inSource = sourceNameSet.contains(name); + final var inAssembly = assemblyNameSet.contains(name); + if (inSource) { + matchedSource++; + } + if (inAssembly) { + matchedAssembly++; + } + if (inSource || inAssembly) { + matchedAny++; + } else if (unknownNames.size() < 32) { + unknownNames.add(name); + } + } + final var status = resolveCompatibilityStatus(total, matchedAny); + final var recommendation = matchedSource >= matchedAssembly ? "SOURCE" : "ASSEMBLY"; + final var message = buildCompatibilityMessage(trackType, total, matchedAny, unknownNames.size()); + return new TrackCompatibilityReport( + relativeFilename, + trackType.name(), + status, + total, + matchedSource, + matchedAssembly, + matchedAny, + unknownNames, + recommendation, + message + ); + } finally { + closeDataSourceQuietly(dataSource); + } + } + public @NotNull List listTracks() { try { this.lock.readLock().lock(); @@ -229,7 +317,8 @@ private static int resolveThreadCount(final @NotNull String envKey, final int de final String color, final String name, final String renderMode, - final String aggregationMode) { + final String aggregationMode, + final Boolean logScale) { try { this.lock.writeLock().lock(); final var current = this.tracks.get(trackId); @@ -241,7 +330,8 @@ private static int resolveThreadCount(final @NotNull String envKey, final int de (color == null || color.isBlank()) ? current.color : normalizeColor(color, current.color), (name == null || name.isBlank()) ? current.name : name.trim(), parseBamRenderMode(renderMode, current.bamRenderMode()), - parseBigWigAggregationMode(aggregationMode, current.bigWigAggregationMode()) + parseBigWigAggregationMode(aggregationMode, current.bigWigAggregationMode()), + logScale == null ? current.logScale() : logScale ); this.tracks.put(trackId, updated); return updated.toSummary(); @@ -264,6 +354,28 @@ public void removeTrack(final @NotNull String trackId) { } } + public @NotNull List reorderTrack(final @NotNull String trackId, + final int targetIndex) { + try { + this.lock.writeLock().lock(); + final var entries = new ArrayList<>(this.tracks.values()); + final var sourceIndex = findTrackIndex(entries, trackId); + if (sourceIndex < 0) { + throw new IllegalArgumentException("Unknown track id " + trackId); + } + final var moved = entries.remove(sourceIndex); + final var clampedTarget = Math.max(0, Math.min(targetIndex, entries.size())); + entries.add(clampedTarget, moved); + this.tracks.clear(); + for (final var entry : entries) { + this.tracks.put(entry.trackId(), entry); + } + return this.tracks.values().stream().map(TrackState::toSummary).toList(); + } finally { + this.lock.writeLock().unlock(); + } + } + public void close() { try { this.lock.writeLock().lock(); @@ -376,6 +488,220 @@ public void invalidateInMemoryCache() { return queryVisibleTracksInternal(chunkedFile, segmentsBuildResult, queryStartPx, queryEndPx, safeWidth, bpResolution); } + public @NotNull FeatureSearchResponse searchFeatures(final @NotNull ChunkedFile chunkedFile, + final @Nullable String query, + final int limit, + final int offset, + final @Nullable String trackId) { + final var normalizedQuery = query == null ? "" : query.trim(); + if (normalizedQuery.length() < 2) { + return new FeatureSearchResponse(normalizedQuery, 0, 0, false, List.of()); + } + final var safeLimit = Math.max(1, Math.min(500, limit)); + final var safeOffset = Math.max(0, offset); + final var queryLower = normalizedQuery.toLowerCase(Locale.ROOT); + final var sourceToAssemblySegments = buildSourceToAssemblyBpSegments( + chunkedFile, + this.linkedFastaAliasesBySource + ); + final List snapshot; + try { + this.lock.readLock().lock(); + if (trackId != null && !trackId.isBlank()) { + final var state = this.tracks.get(trackId); + snapshot = state == null ? List.of() : List.of(state); + } else { + snapshot = this.tracks.values().stream().toList(); + } + } finally { + this.lock.readLock().unlock(); + } + + final var hits = new ArrayList(safeLimit); + int skipped = 0; + boolean hasMore = false; + outer: + for (final var track : snapshot) { + if (!(track.dataSource() instanceof InMemoryTrackDataSource inMemoryDataSource)) { + continue; + } + final var trackNameMatches = track.name().toLowerCase(Locale.ROOT).contains(queryLower); + for (final var sourceEntry : inMemoryDataSource.featuresBySource().entrySet()) { + final var sourceName = sourceEntry.getKey(); + final var assemblySegments = sourceToAssemblySegments.get(sourceName); + if (assemblySegments == null || assemblySegments.isEmpty()) { + continue; + } + final var sourceNameMatches = sourceName.toLowerCase(Locale.ROOT).contains(queryLower); + for (final var feature : sourceEntry.getValue()) { + final var featureLabel = resolveFeatureSearchLabel(feature, sourceName); + final var featureType = normalizeBlankToNull(feature.featureType()); + final var strand = normalizeBlankToNull(feature.strand()); + final var featureLabelMatches = featureLabel.toLowerCase(Locale.ROOT).contains(queryLower); + final var featureTypeMatches = featureType != null + && featureType.toLowerCase(Locale.ROOT).contains(queryLower); + if (!(trackNameMatches || sourceNameMatches || featureLabelMatches || featureTypeMatches)) { + continue; + } + for (final var segment : assemblySegments) { + final var interval = projectSourceIntervalToAssemblyBp(segment, feature.start(), feature.end()); + if (interval.isEmpty()) { + continue; + } + if (skipped < safeOffset) { + skipped++; + continue; + } + if (hits.size() >= safeLimit) { + hasMore = true; + break outer; + } + final var projected = interval.get(); + hits.add(new FeatureSearchHit( + track.trackId(), + track.name(), + sourceName, + featureLabel, + featureType, + strand, + projected.startBp(), + projected.endBp() + )); + } + } + } + } + return new FeatureSearchResponse(normalizedQuery, safeLimit, safeOffset, hasMore, hits); + } + + public @NotNull FeatureContextResponse queryFeatureContext(final @NotNull ChunkedFile chunkedFile, + final long startBp, + final long endBp, + final int widthPx, + final long bpResolution, + final double marginScreens) { + final var safeStartBp = Math.max(0L, Math.min(startBp, endBp)); + final var safeEndBp = Math.max(safeStartBp + 1L, Math.max(startBp, endBp)); + final var safeMarginScreens = Double.isFinite(marginScreens) + ? Math.max(0.0d, Math.min(4.0d, marginScreens)) + : 1.0d; + final var safeWidthPx = Math.max(64, Math.min(4096, widthPx)); + final var featureSpanBp = Math.max(1L, safeEndBp - safeStartBp); + final var screenSpanBp = Math.max(featureSpanBp, safeWidthPx * Math.max(1L, bpResolution)); + final var marginBp = (long) Math.floor(screenSpanBp * safeMarginScreens); + final var centerBp = safeStartBp + ((safeEndBp - safeStartBp) >>> 1); + final var halfSpan = screenSpanBp >>> 1; + final var contextStartBp = Math.max(0L, centerBp - halfSpan - marginBp); + final var contextEndBp = Math.max( + contextStartBp + 1L, + centerBp + halfSpan + marginBp + 1L + ); + final var contextWidthPx = Math.max( + safeWidthPx, + Math.min(8192, (int) Math.ceil(safeWidthPx * (1.0d + 2.0d * safeMarginScreens))) + ); + final var contextQuery = queryVisibleTracks( + chunkedFile, + contextStartBp, + contextEndBp, + contextWidthPx, + bpResolution, + QueryLengthUnit.BASE_PAIRS + ); + return new FeatureContextResponse( + safeStartBp, + safeEndBp, + contextStartBp, + contextEndBp, + safeMarginScreens, + contextWidthPx, + bpResolution, + contextQuery + ); + } + + public double @NotNull [] sampleTrackValues(final @NotNull ChunkedFile chunkedFile, + final @NotNull String trackId, + final long start, + final long end, + final long bpResolution, + final @NotNull QueryLengthUnit units) { + final TrackState track; + try { + this.lock.readLock().lock(); + track = this.tracks.get(trackId); + } finally { + this.lock.readLock().unlock(); + } + if (track == null) { + return new double[0]; + } + + final var segmentsBuildResult = + buildSourceToAssemblySegments(chunkedFile, this.linkedFastaAliasesBySource, bpResolution); + final var totalVisiblePixels = segmentsBuildResult.totalVisiblePixels(); + if (totalVisiblePixels <= 0L) { + return new double[0]; + } + + final var pxRange = resolveQueryPxRange( + chunkedFile, + start, + end, + bpResolution, + units, + segmentsBuildResult.orderedSegments(), + totalVisiblePixels + ); + final var queryStartPx = pxRange.startPx(); + final var queryEndPx = pxRange.endPx(); + final int widthPx = (int) Math.max(1L, Math.min(Integer.MAX_VALUE, queryEndPx - queryStartPx)); + + final var bins = queryBinsForTrack( + track.type(), + chunkedFile, + track.dataSource(), + segmentsBuildResult.sourceToAssemblySegments(), + segmentsBuildResult.orderedSegments(), + queryStartPx, + queryEndPx, + widthPx, + bpResolution, + track.bamRenderMode(), + track.bigWigAggregationMode() + ); + + final var values = new double[widthPx]; + Arrays.fill(values, Double.NaN); + for (final var bin : bins) { + final long rawStartPx = bin.getStartPx() != null + ? bin.getStartPx() + : mapAssemblyBpToVisiblePx(bin.getStartBp(), segmentsBuildResult.orderedSegments(), bpResolution, totalVisiblePixels); + final long rawEndPx = bin.getEndPx() != null + ? bin.getEndPx() + : mapAssemblyBpToVisiblePx(Math.max(bin.getStartBp(), bin.getEndBp() - bpResolution), segmentsBuildResult.orderedSegments(), bpResolution, totalVisiblePixels) + 1L; + final int from = (int) Math.max(0L, Math.min(rawStartPx - queryStartPx, widthPx - 1L)); + final int to = (int) Math.max(from + 1L, Math.min(rawEndPx - queryStartPx, widthPx)); + final var value = bin.getValue(); + for (int idx = from; idx < to; idx++) { + if (!Double.isFinite(value)) { + continue; + } + if (Double.isNaN(values[idx])) { + values[idx] = value; + } else { + values[idx] = Math.max(values[idx], value); + } + } + } + for (int i = 0; i < values.length; i++) { + if (Double.isNaN(values[i]) || !Double.isFinite(values[i])) { + values[i] = 0.0d; + } + } + return values; + } + private @NotNull QueryResult queryVisibleTracksInternal(final @NotNull ChunkedFile chunkedFile, final @NotNull SegmentBuildResult segmentsBuildResult, final long queryStartPx, @@ -407,7 +733,15 @@ public void invalidateInMemoryCache() { if (maybePrecomputed != null) { trackRenders.add(maybePrecomputed); } else { - trackRenders.add(track.query(sourceToAssemblySegments, queryStartPx, queryEndPx, safeWidth, bpResolution)); + trackRenders.add(track.query( + chunkedFile, + sourceToAssemblySegments, + segmentsBuildResult.orderedSegments(), + queryStartPx, + queryEndPx, + safeWidth, + bpResolution + )); } } catch (final RuntimeException ex) { final var message = ex.getMessage() != null ? ex.getMessage() : ex.getClass().getSimpleName(); @@ -500,6 +834,9 @@ public void invalidateInMemoryCache() { private void maybeScheduleTrackPrecomputeFromQuery(final @NotNull ChunkedFile chunkedFile, final @NotNull TrackState track) { + if (track.dataSource().renderStyle() == RenderStyle.FEATURE) { + return; + } final var runtime = this.precomputeRuntimeByTrackId.get(track.trackId()); if (runtime != null && runtime.isActive()) { return; @@ -571,6 +908,9 @@ private void runTrackPrecompute(final @NotNull ChunkedFile chunkedFile, final @NotNull TrackState track, final @NotNull Path sidecarPath, final boolean force) { + if (track.dataSource().renderStyle() == RenderStyle.FEATURE) { + return List.of(); + } final var tasks = new ArrayList(); final var resolutions = Arrays.stream(chunkedFile.getResolutions()).boxed().sorted(Comparator.reverseOrder()).toList(); final var modeKeys = modeKeysForTrack(track); @@ -614,6 +954,9 @@ private void runTrackPrecompute(final @NotNull ChunkedFile chunkedFile, final int widthPx, final long bpResolution, final @NotNull String assemblySignature) { + if (track.dataSource().renderStyle() == RenderStyle.FEATURE) { + return null; + } final var totalVisiblePixels = orderedSegments.isEmpty() ? 0L : orderedSegments.get(orderedSegments.size() - 1).visiblePxEnd(); if (totalVisiblePixels <= 0L || totalVisiblePixels > MAX_PRECOMPUTE_VISIBLE_PIXELS) { return null; @@ -640,7 +983,16 @@ private void runTrackPrecompute(final @NotNull ChunkedFile chunkedFile, final var strategy = aggregationStrategy(track); final var bins = aggregatePrecomputedSeries(series, queryStartPx, queryEndPx, widthPx, strategy); final var maxValue = bins.stream().mapToDouble(TrackBin::getValue).max().orElse(0.0d); - return new TrackRender(track.trackId(), track.name(), track.type().name(), track.color(), bins, maxValue, null); + return new TrackRender( + track.trackId(), + track.name(), + track.type().name(), + track.color(), + track.dataSource().renderStyle().name(), + bins, + maxValue, + null + ); } private @NotNull PrecomputedSeries computePrecomputedSeries(final @NotNull ChunkedFile chunkedFile, @@ -652,8 +1004,10 @@ private void runTrackPrecompute(final @NotNull ChunkedFile chunkedFile, final var bigWigAggregationMode = bigWigModeForKey(track.bigWigAggregationMode(), task.modeKey()); final var bins = queryBinsForTrack( track.type(), + chunkedFile, track.dataSource(), segmentsBuildResult.sourceToAssemblySegments(), + segmentsBuildResult.orderedSegments(), 0L, task.totalVisiblePixels(), totalVisiblePixels, @@ -805,19 +1159,30 @@ private void persistPrecomputedSeries(final @NotNull Path sidecarPath, private @NotNull Path sidecarPathForTrackCache(final @NotNull ChunkedFile chunkedFile, final @NotNull TrackState track) { - final var trackSource = resolveDataPath(track.sourceFile()); final var hictPath = chunkedFile.getHdfFilePath(); final String fingerprint; try { - fingerprint = String.join("|", - PRECOMPUTE_CACHE_VERSION, - trackSource.toString(), - String.valueOf(Files.size(trackSource)), - String.valueOf(Files.getLastModifiedTime(trackSource).toMillis()), - hictPath.toString(), - String.valueOf(Files.size(hictPath)), - String.valueOf(Files.getLastModifiedTime(hictPath).toMillis()) - ); + if (track.type() == TrackType.COOLER_WEIGHTS) { + fingerprint = String.join("|", + PRECOMPUTE_CACHE_VERSION, + track.type().name(), + track.sourceFile(), + hictPath.toString(), + String.valueOf(Files.size(hictPath)), + String.valueOf(Files.getLastModifiedTime(hictPath).toMillis()) + ); + } else { + final var trackSource = resolveDataPath(track.sourceFile()); + fingerprint = String.join("|", + PRECOMPUTE_CACHE_VERSION, + trackSource.toString(), + String.valueOf(Files.size(trackSource)), + String.valueOf(Files.getLastModifiedTime(trackSource).toMillis()), + hictPath.toString(), + String.valueOf(Files.size(hictPath)), + String.valueOf(Files.getLastModifiedTime(hictPath).toMillis()) + ); + } } catch (final IOException e) { throw new RuntimeException("Cannot build precompute fingerprint for " + track.sourceFile(), e); } @@ -887,7 +1252,7 @@ private static long fnv1a(long hash, final long value) { return switch (track.type()) { case BIGWIG -> List.of("MAX", "MEAN", "SUM"); case BAM -> List.of("COVERAGE", "READ_DENSITY"); - case BED, VCF -> List.of("DEFAULT"); + case BED, VCF, GFF_GTF, COOLER_WEIGHTS -> List.of("DEFAULT"); case UNSUPPORTED -> List.of("DEFAULT"); }; } @@ -896,7 +1261,7 @@ private static long fnv1a(long hash, final long value) { return switch (track.type()) { case BIGWIG -> track.bigWigAggregationMode().name(); case BAM -> track.bamRenderMode().name(); - case BED, VCF, UNSUPPORTED -> "DEFAULT"; + case BED, VCF, GFF_GTF, COOLER_WEIGHTS, UNSUPPORTED -> "DEFAULT"; }; } @@ -930,7 +1295,7 @@ private static long fnv1a(long hash, final long value) { case COVERAGE -> PrecomputeAggregationStrategy.MEAN_ALL_PIXELS; case READ_DENSITY -> PrecomputeAggregationStrategy.SUM; }; - case BED, VCF, UNSUPPORTED -> PrecomputeAggregationStrategy.MAX; + case BED, VCF, GFF_GTF, COOLER_WEIGHTS, UNSUPPORTED -> PrecomputeAggregationStrategy.MAX; }; } @@ -950,6 +1315,8 @@ private static long fnv1a(long hash, final long value) { final ContigTree.ContigTuple tuple = contigs.get(contigIndex); final var descriptor = tuple.descriptor(); final var sourceName = descriptor.getContigNameInSourceFASTA(); + final var originalName = descriptor.getContigName(); + final var displayName = chunkedFile.getContigDisplayName(descriptor.getContigId()); final var sourceStart = descriptor.getOffsetInSourceFASTA(); final var sourceEnd = sourceStart + descriptor.getLengthBp(); final var assemblyStart = assemblyCursor; @@ -968,6 +1335,14 @@ private static long fnv1a(long hash, final long value) { ); orderedSegments.add(segment); sourceToAssemblySegments.computeIfAbsent(sourceName, key -> new ArrayList<>()).add(segment); + if (originalName != null && !originalName.isBlank() && !originalName.equals(sourceName)) { + sourceToAssemblySegments.computeIfAbsent(originalName, key -> new ArrayList<>()).add(segment); + } + if (displayName != null && !displayName.isBlank() + && !displayName.equals(sourceName) + && !displayName.equals(originalName)) { + sourceToAssemblySegments.computeIfAbsent(displayName, key -> new ArrayList<>()).add(segment); + } final var aliasName = linkedFastaAliasesBySource.get(sourceName); if (aliasName != null && !aliasName.equals(sourceName)) { sourceToAssemblySegments.computeIfAbsent(aliasName, key -> new ArrayList<>()).add(segment); @@ -1007,6 +1382,16 @@ private boolean isSupportedTrackPath(final @NotNull String path) { return fallback; } + private static int findTrackIndex(final @NotNull List entries, + final @NotNull String trackId) { + for (int i = 0; i < entries.size(); i++) { + if (entries.get(i).trackId().equals(trackId)) { + return i; + } + } + return -1; + } + private static @NotNull String colorForIndex(final int index) { if (COLOR_PALETTE.isEmpty()) { return "#4e79a7"; @@ -1014,6 +1399,76 @@ private boolean isSupportedTrackPath(final @NotNull String path) { return COLOR_PALETTE.get(Math.floorMod(index, COLOR_PALETTE.size())); } + private static @NotNull TrackDataSource createDataSource(final @NotNull TrackType trackType, + final @NotNull Path resolvedPath) { + return switch (trackType) { + case BED -> InMemoryTrackDataSource.fromBed(resolvedPath); + case VCF -> InMemoryTrackDataSource.fromVcf(resolvedPath); + case GFF_GTF -> InMemoryTrackDataSource.fromGffOrGtf(resolvedPath); + case BIGWIG -> new BigWigTrackDataSource(resolvedPath); + case BAM -> new BamTrackDataSource(resolvedPath); + case COOLER_WEIGHTS -> new CoolerWeightsTrackDataSource(); + case UNSUPPORTED -> throw new IllegalStateException("Unexpected unsupported track type"); + }; + } + + private static @NotNull Set buildSourceNameSet(final @NotNull ChunkedFile chunkedFile, + final @NotNull Map linkedFastaAliasesBySource) { + final var names = new TreeSet(String.CASE_INSENSITIVE_ORDER); + for (final var tuple : chunkedFile.getAssemblyInfo().contigs()) { + final var sourceName = tuple.descriptor().getContigNameInSourceFASTA(); + if (sourceName != null && !sourceName.isBlank()) { + names.add(sourceName); + } + final var alias = linkedFastaAliasesBySource.get(sourceName); + if (alias != null && !alias.isBlank()) { + names.add(alias); + } + } + return names; + } + + private static @NotNull Set buildAssemblyNameSet(final @NotNull ChunkedFile chunkedFile) { + final var names = new TreeSet(String.CASE_INSENSITIVE_ORDER); + for (final var tuple : chunkedFile.getAssemblyInfo().contigs()) { + final var descriptor = tuple.descriptor(); + final var originalName = descriptor.getContigName(); + if (originalName != null && !originalName.isBlank()) { + names.add(originalName); + } + final var displayName = chunkedFile.getContigDisplayName(descriptor.getContigId()); + if (displayName != null && !displayName.isBlank()) { + names.add(displayName); + } + } + return names; + } + + private static @NotNull String resolveCompatibilityStatus(final int totalNames, final int matchedAny) { + if (totalNames <= 0 || matchedAny >= totalNames) { + return "ok"; + } + final var ratio = matchedAny / (double) Math.max(1, totalNames); + if (ratio >= 0.5d) { + return "warning"; + } + return "error"; + } + + private static @NotNull String buildCompatibilityMessage(final @NotNull TrackType trackType, + final int totalNames, + final int matchedAny, + final int unknownNamesCount) { + if (totalNames <= 0) { + return "Track has no contig/chromosome names to validate."; + } + if (matchedAny >= totalNames) { + return "Track names are compatible with the current assembly."; + } + return "Track " + trackType.name() + " contains " + unknownNamesCount + + " names that do not match current/source assembly names."; + } + private static @NotNull BamRenderMode parseBamRenderMode(final String mode, final @NotNull BamRenderMode fallback) { if (mode == null || mode.isBlank()) { return fallback; @@ -1083,6 +1538,140 @@ private static boolean isBedStrandToken(final String token) { return "+".equals(trimmed) || "-".equals(trimmed) || ".".equals(trimmed); } + private static @Nullable String normalizeStrand(final String token) { + if (!isBedStrandToken(token)) { + return null; + } + final var trimmed = token.trim(); + return ".".equals(trimmed) ? null : trimmed; + } + + private static @NotNull Map parseGffAttributes(final @Nullable String rawAttributes, + final boolean gtfMode) { + if (rawAttributes == null || rawAttributes.isBlank() || ".".equals(rawAttributes.trim())) { + return Map.of(); + } + final var parsed = new LinkedHashMap(); + final var tokens = rawAttributes.split(";"); + for (final var token : tokens) { + if (token == null || token.isBlank()) { + continue; + } + final var trimmed = token.trim(); + if (gtfMode) { + final int firstSpace = trimmed.indexOf(' '); + if (firstSpace <= 0 || firstSpace >= trimmed.length() - 1) { + continue; + } + final var key = trimmed.substring(0, firstSpace).trim(); + var value = trimmed.substring(firstSpace + 1).trim(); + if (value.startsWith("\"") && value.endsWith("\"") && value.length() >= 2) { + value = value.substring(1, value.length() - 1); + } + if (!key.isBlank() && !value.isBlank()) { + parsed.putIfAbsent(key, value); + } + } else { + final int eqIndex = trimmed.indexOf('='); + if (eqIndex <= 0 || eqIndex >= trimmed.length() - 1) { + continue; + } + final var key = trimmed.substring(0, eqIndex).trim(); + final var value = trimmed.substring(eqIndex + 1).trim(); + if (!key.isBlank() && !value.isBlank()) { + parsed.putIfAbsent(key, value); + } + } + } + return parsed; + } + + private static @Nullable String firstNonBlank(final @Nullable String... candidates) { + if (candidates == null) { + return null; + } + for (final var candidate : candidates) { + if (candidate != null && !candidate.isBlank()) { + return candidate; + } + } + return null; + } + + private static boolean isGffBlockLikeFeature(final @NotNull String featureTypeLower) { + return switch (featureTypeLower) { + case "exon", "cds", "utr", "five_prime_utr", "three_prime_utr", "start_codon", "stop_codon" -> true; + default -> false; + }; + } + + private static boolean isGffTranscriptLikeFeature(final @NotNull String featureTypeLower) { + return switch (featureTypeLower) { + case "transcript", + "mrna", + "ncrna", + "trna", + "rrna", + "snrna", + "snorna", + "lncrna", + "mirna", + "pirna", + "guide_rna", + "primary_transcript", + "pseudogenic_transcript" -> true; + default -> false; + }; + } + + private static boolean isGffGeneLikeFeature(final @NotNull String featureTypeLower) { + return "gene".equals(featureTypeLower) || "pseudogene".equals(featureTypeLower); + } + + private static boolean isGffCodingFeature(final @NotNull String featureTypeLower) { + return "cds".equals(featureTypeLower) || "start_codon".equals(featureTypeLower) || "stop_codon".equals(featureTypeLower); + } + + private static @Nullable String sanitizeGffGroupToken(final @Nullable String token) { + if (token == null || token.isBlank()) { + return null; + } + final var primary = token.split(",")[0].trim(); + if (primary.isBlank()) { + return null; + } + if (primary.startsWith("\"") && primary.endsWith("\"") && primary.length() > 1) { + return primary.substring(1, primary.length() - 1); + } + return primary; + } + + private static @Nullable String resolveGffGroupKey(final @NotNull Map attributes, + final @NotNull String featureTypeLower) { + final var transcriptLike = sanitizeGffGroupToken(firstNonBlank( + attributes.get("transcript_id"), + attributes.get("transcriptId"), + attributes.get("Parent"), + attributes.get("ID") + )); + if (transcriptLike != null) { + return "tx:" + transcriptLike; + } + if (isGffGeneLikeFeature(featureTypeLower) || isGffTranscriptLikeFeature(featureTypeLower)) { + final var geneLike = sanitizeGffGroupToken(firstNonBlank( + attributes.get("gene_id"), + attributes.get("gene"), + attributes.get("gene_name"), + attributes.get("Name"), + attributes.get("ID") + )); + if (geneLike != null) { + return "gene:" + geneLike; + } + } + return null; + } + private static @NotNull BufferedReader openMaybeGzipReader(final @NotNull Path filePath) throws IOException { final InputStream baseStream = Files.newInputStream(filePath); final InputStream dataStream; @@ -1229,17 +1818,122 @@ private static long mapAssemblyBpToVisiblePx(final long assemblyBp, final long queryStartPx, final long queryEndPx, final long bpResolution) { - final var clippedSourceStart = Math.max(sourceStart, segment.sourceStart()); - final var clippedSourceEnd = Math.min(sourceEnd, segment.sourceEnd()); - if (clippedSourceEnd <= clippedSourceStart) { - return Optional.empty(); - } - final var segmentLength = segment.sourceEnd() - segment.sourceStart(); - final var localStart = clippedSourceStart - segment.sourceStart(); - final var localEnd = clippedSourceEnd - segment.sourceStart(); - - final long assemblyStart; - final long assemblyEnd; + return projectSourceIntervalOnSegmentRaw( + segment, + sourceStart, + sourceEnd, + value, + label, + queryStartPx, + queryEndPx, + bpResolution + ); + } + + private static @NotNull Optional projectSourceFeatureOnSegment(final @NotNull AssemblySegment segment, + final @NotNull FeatureRange feature, + final long queryStartPx, + final long queryEndPx, + final long bpResolution) { + final var projectedBase = projectSourceIntervalOnSegmentRaw( + segment, + feature.start(), + feature.end(), + feature.value(), + feature.label(), + queryStartPx, + queryEndPx, + bpResolution + ); + if (projectedBase.isEmpty()) { + return Optional.empty(); + } + Long thickStartBp = null; + Long thickEndBp = null; + Long thickStartPx = null; + Long thickEndPx = null; + if (feature.thickStart() != null && feature.thickEnd() != null && feature.thickEnd() > feature.thickStart()) { + final var projectedThick = projectSourceIntervalOnSegmentRaw( + segment, + feature.thickStart(), + feature.thickEnd(), + feature.value(), + null, + queryStartPx, + queryEndPx, + bpResolution + ); + if (projectedThick.isPresent()) { + thickStartBp = projectedThick.get().startBp(); + thickEndBp = projectedThick.get().endBp(); + thickStartPx = projectedThick.get().startPx(); + thickEndPx = projectedThick.get().endPx(); + } + } + final var projectedBlocks = new ArrayList(); + for (final var block : feature.blocks()) { + if (block == null || block.end() <= block.start()) { + continue; + } + final var projectedBlock = projectSourceIntervalOnSegmentRaw( + segment, + block.start(), + block.end(), + feature.value(), + null, + queryStartPx, + queryEndPx, + bpResolution + ); + if (projectedBlock.isPresent()) { + final var interval = projectedBlock.get(); + projectedBlocks.add(new ProjectedBlock( + interval.startBp(), + interval.endBp(), + interval.startPx(), + interval.endPx(), + block.coding() + )); + } + } + projectedBlocks.sort(Comparator.comparingLong(ProjectedBlock::startPx)); + final var projected = projectedBase.get(); + return Optional.of(new ProjectedFeature( + projected.startBp(), + projected.endBp(), + projected.startPx(), + projected.endPx(), + projected.value(), + projected.label(), + feature.strand(), + thickStartBp, + thickEndBp, + thickStartPx, + thickEndPx, + feature.featureType(), + projectedBlocks + )); + } + + private static @NotNull Optional projectSourceIntervalOnSegmentRaw(final @NotNull AssemblySegment segment, + final long sourceStart, + final long sourceEnd, + final double value, + final String label, + final long queryStartPx, + final long queryEndPx, + final long bpResolution) { + final var clippedSourceStart = Math.max(sourceStart, segment.sourceStart()); + final var clippedSourceEnd = Math.min(sourceEnd, segment.sourceEnd()); + if (clippedSourceEnd <= clippedSourceStart) { + return Optional.empty(); + } + final var segmentLength = segment.sourceEnd() - segment.sourceStart(); + final var localStart = clippedSourceStart - segment.sourceStart(); + final var localEnd = clippedSourceEnd - segment.sourceStart(); + + final long assemblyStart; + final long assemblyEnd; if (!segment.reversed()) { assemblyStart = segment.assemblyStart() + localStart; assemblyEnd = segment.assemblyStart() + localEnd; @@ -1272,7 +1966,14 @@ private static long mapAssemblyBpToVisiblePx(final long assemblyBp, clippedStartPx, clippedEndPx, Math.max(0.0d, value), - label + label, + null, + null, + null, + null, + null, + null, + List.of() )); } @@ -1295,6 +1996,96 @@ private static long mapAssemblyBpToVisiblePx(final long assemblyBp, return Optional.of(new SourceInterval(sourceStart, sourceEnd)); } + private static @NotNull Optional projectSourceIntervalToAssemblyBp(final @NotNull AssemblyBpSegment segment, + final long sourceStart, + final long sourceEnd) { + final var clippedSourceStart = Math.max(sourceStart, segment.sourceStart()); + final var clippedSourceEnd = Math.min(sourceEnd, segment.sourceEnd()); + if (clippedSourceEnd <= clippedSourceStart) { + return Optional.empty(); + } + final var segmentLength = segment.sourceEnd() - segment.sourceStart(); + final var localStart = clippedSourceStart - segment.sourceStart(); + final var localEnd = clippedSourceEnd - segment.sourceStart(); + final long assemblyStart; + final long assemblyEnd; + if (!segment.reversed()) { + assemblyStart = segment.assemblyStart() + localStart; + assemblyEnd = segment.assemblyStart() + localEnd; + } else { + assemblyStart = segment.assemblyStart() + (segmentLength - localEnd); + assemblyEnd = segment.assemblyStart() + (segmentLength - localStart); + } + final var safeStart = Math.min(assemblyStart, assemblyEnd); + final var safeEnd = Math.max(assemblyStart, assemblyEnd); + if (safeEnd <= safeStart) { + return Optional.empty(); + } + return Optional.of(new AssemblyBpInterval(safeStart, safeEnd)); + } + + private @NotNull Map> buildSourceToAssemblyBpSegments(final @NotNull ChunkedFile chunkedFile, + final @NotNull Map linkedFastaAliasesBySource) { + final var sourceToAssemblySegments = new HashMap>(); + final var contigs = chunkedFile.getAssemblyInfo().contigs(); + long assemblyCursor = 0L; + for (int contigIndex = 0; contigIndex < contigs.size(); ++contigIndex) { + final ContigTree.ContigTuple tuple = contigs.get(contigIndex); + final var descriptor = tuple.descriptor(); + final var sourceName = descriptor.getContigNameInSourceFASTA(); + final var originalName = descriptor.getContigName(); + final var displayName = chunkedFile.getContigDisplayName(descriptor.getContigId()); + final var sourceStart = descriptor.getOffsetInSourceFASTA(); + final var sourceEnd = sourceStart + descriptor.getLengthBp(); + final var assemblyStart = assemblyCursor; + final var assemblyEnd = assemblyCursor + descriptor.getLengthBp(); + final var segment = new AssemblyBpSegment( + sourceStart, + sourceEnd, + assemblyStart, + assemblyEnd, + tuple.direction() == ContigDirection.REVERSED + ); + sourceToAssemblySegments.computeIfAbsent(sourceName, key -> new ArrayList<>()).add(segment); + if (originalName != null && !originalName.isBlank() && !originalName.equals(sourceName)) { + sourceToAssemblySegments.computeIfAbsent(originalName, key -> new ArrayList<>()).add(segment); + } + if (displayName != null && !displayName.isBlank() + && !displayName.equals(sourceName) + && !displayName.equals(originalName)) { + sourceToAssemblySegments.computeIfAbsent(displayName, key -> new ArrayList<>()).add(segment); + } + final var aliasName = linkedFastaAliasesBySource.get(sourceName); + if (aliasName != null && !aliasName.equals(sourceName)) { + sourceToAssemblySegments.computeIfAbsent(aliasName, key -> new ArrayList<>()).add(segment); + } + assemblyCursor = assemblyEnd; + } + sourceToAssemblySegments.values().forEach(list -> list.sort(Comparator.comparingLong(AssemblyBpSegment::sourceStart))); + return sourceToAssemblySegments; + } + + private static @NotNull String resolveFeatureSearchLabel(final @NotNull FeatureRange feature, + final @NotNull String sourceName) { + final var preferredLabel = normalizeBlankToNull(feature.label()); + if (preferredLabel != null) { + return preferredLabel; + } + final var featureType = normalizeBlankToNull(feature.featureType()); + if (featureType != null) { + return featureType; + } + return sourceName + ":" + feature.start() + "-" + feature.end(); + } + + private static @Nullable String normalizeBlankToNull(final @Nullable String value) { + if (value == null) { + return null; + } + final var trimmed = value.trim(); + return trimmed.isEmpty() ? null : trimmed; + } + private static @NotNull List aggregateFeatures(final @NotNull List projectedFeatures, final long queryStartPx, final long queryEndPx, @@ -1559,13 +2350,38 @@ private static void accumulateReadDensityValue(final long featureStart, private static @NotNull List toBins(final @NotNull List projectedFeatures) { return projectedFeatures.stream() - .map(f -> new TrackBin(f.startBp(), f.endBp(), f.value(), 1L, f.label(), f.startPx(), f.endPx())) + .map(f -> new TrackBin( + f.startBp(), + f.endBp(), + f.value(), + 1L, + f.label(), + f.startPx(), + f.endPx(), + f.strand(), + f.thickStartBp(), + f.thickEndBp(), + f.thickStartPx(), + f.thickEndPx(), + f.featureType(), + f.blocks().stream() + .map(block -> new TrackBin.TrackBinBlock( + block.startBp(), + block.endBp(), + block.startPx(), + block.endPx(), + block.coding() + )) + .toList() + )) .toList(); } private static @NotNull List queryBinsForTrack(final @NotNull TrackType type, + final @NotNull ChunkedFile chunkedFile, final @NotNull TrackDataSource dataSource, final @NotNull Map> sourceToAssemblySegments, + final @NotNull List orderedSegments, final long queryStartPx, final long queryEndPx, final int widthPx, @@ -1592,7 +2408,17 @@ private static void accumulateReadDensityValue(final long featureStart, bigWigAggregationMode ); } - if (type == TrackType.BED) { + if (type == TrackType.COOLER_WEIGHTS && dataSource instanceof CoolerWeightsTrackDataSource coolerWeightsTrackDataSource) { + return coolerWeightsTrackDataSource.queryBins( + chunkedFile, + orderedSegments, + queryStartPx, + queryEndPx, + widthPx, + bpResolution + ); + } + if (type == TrackType.BED || type == TrackType.GFF_GTF) { if (dataSource instanceof InMemoryTrackDataSource inMemoryTrackDataSource) { return inMemoryTrackDataSource.queryBins( sourceToAssemblySegments, @@ -1626,8 +2452,10 @@ private static void accumulateReadDensityValue(final long featureStart, private enum TrackType { BED, VCF, + GFF_GTF, BIGWIG, BAM, + COOLER_WEIGHTS, UNSUPPORTED; private static @NotNull TrackType fromPath(final @NotNull Path path) { @@ -1638,6 +2466,14 @@ private enum TrackType { if (lowered.endsWith(".vcf") || lowered.endsWith(".vcf.gz")) { return VCF; } + if (lowered.endsWith(".gff") + || lowered.endsWith(".gff.gz") + || lowered.endsWith(".gff3") + || lowered.endsWith(".gff3.gz") + || lowered.endsWith(".gtf") + || lowered.endsWith(".gtf.gz")) { + return GFF_GTF; + } if (lowered.endsWith(".bw") || lowered.endsWith(".bigwig")) { return BIGWIG; } @@ -1659,9 +2495,22 @@ private enum BigWigAggregationMode { SUM } + private enum RenderStyle { + SIGNAL, + FEATURE + } + private interface TrackDataSource extends AutoCloseable { long featureCountHint(); + @NotNull + Set sourceNames(); + + @NotNull + default RenderStyle renderStyle() { + return RenderStyle.SIGNAL; + } + @NotNull List projectFeatures(@NotNull Map> sourceToAssemblySegments, long queryStartPx, @@ -1676,11 +2525,17 @@ default void close() throws Exception { private record InMemoryTrackDataSource(@NotNull Map> featuresBySource, long featureCount, - boolean hasSignalValues) implements TrackDataSource { + boolean hasSignalValues, + boolean hasStructuredFeatures, + @NotNull RenderStyle preferredRenderStyle) implements TrackDataSource { static @NotNull InMemoryTrackDataSource fromBed(final @NotNull Path filePath) { final var features = new HashMap>(); long total = 0L; boolean hasSignalValues = false; + boolean hasStructuredFeatures = false; + boolean hasStrandFeatures = false; + boolean hasThickFeatures = false; + boolean hasBed12Rows = false; try (final BufferedReader reader = openMaybeGzipReader(filePath)) { String line; long lineNo = 0L; @@ -1702,32 +2557,63 @@ private record InMemoryTrackDataSource(@NotNull Map> final var col4Numeric = fields.length >= 4 ? parseNullableDouble(fields[3]) : null; final var col5Numeric = fields.length >= 5 ? parseNullableDouble(fields[4]) : null; final var hasStrand = fields.length >= 6 && isBedStrandToken(fields[5]); - final String label; + final String strand = hasStrand ? normalizeStrand(fields[5]) : null; + final String label = (fields.length >= 4 && !fields[3].isBlank()) ? fields[3] : null; + Long thickStart = null; + Long thickEnd = null; + if (fields.length >= 8) { + try { + final var parsedThickStart = Long.parseLong(fields[6]); + final var parsedThickEnd = Long.parseLong(fields[7]); + final var clampedThickStart = Math.max(start, Math.min(parsedThickStart, end)); + final var clampedThickEnd = Math.max(clampedThickStart, Math.min(parsedThickEnd, end)); + if (clampedThickEnd > clampedThickStart) { + thickStart = clampedThickStart; + thickEnd = clampedThickEnd; + hasThickFeatures = true; + } + } catch (final NumberFormatException ignored) { + // Optional BED fields, ignore malformed thickStart/thickEnd. + } + } final double value; if (fields.length == 4 && col4Numeric != null) { - // BEDGraph-style row: chrom start end value - label = null; + // BEDGraph row: chrom start end value value = Math.max(0.0d, col4Numeric); } else if (hasStrand) { - // BED6 alignments: score is typically MAPQ-like, not quantitative signal; use unit coverage. - label = (fields.length >= 4 && !fields[3].isBlank()) ? fields[3] : null; + // BED6/BED12 annotation-like rows are rendered as features. value = 1.0d; } else { - label = (fields.length >= 4 && !fields[3].isBlank()) ? fields[3] : null; value = Math.max(0.0d, col5Numeric != null ? col5Numeric : 1.0d); } + final var featureType = (fields.length >= 12) ? "BED12" : (hasStrand ? "BED6" : "BED"); + if (fields.length >= 12) { + hasBed12Rows = true; + } features.computeIfAbsent(sourceName, ignored -> new ArrayList<>()) - .add(new FeatureRange(start, end, value, label)); + .add(new FeatureRange(start, end, value, label, strand, thickStart, thickEnd, featureType, List.of())); if (Math.abs(value - 1.0d) > 1e-9) { hasSignalValues = true; } + if (strand != null || (thickStart != null && thickEnd != null)) { + hasStructuredFeatures = true; + if (strand != null) { + hasStrandFeatures = true; + } + } total++; } } catch (final IOException e) { throw new RuntimeException("Failed to parse BED track " + filePath, e); } features.values().forEach(list -> list.sort(Comparator.comparingLong(FeatureRange::start))); - return new InMemoryTrackDataSource(features, total, hasSignalValues); + return new InMemoryTrackDataSource( + features, + total, + hasSignalValues, + hasStructuredFeatures, + resolveBedRenderStyle(total, hasStrandFeatures, hasThickFeatures, hasBed12Rows) + ); } static @NotNull InMemoryTrackDataSource fromVcf(final @NotNull Path filePath) { @@ -1754,14 +2640,100 @@ private record InMemoryTrackDataSource(@NotNull Map> final var alt = fields[4]; final var label = (id != null) ? id : (ref + ">" + alt); features.computeIfAbsent(sourceName, ignored -> new ArrayList<>()) - .add(new FeatureRange(start, end, 1.0d, label)); + .add(new FeatureRange(start, end, 1.0d, label, null, null, null, "VCF", List.of())); total++; } } catch (final IOException e) { throw new RuntimeException("Failed to parse VCF track " + filePath, e); } features.values().forEach(list -> list.sort(Comparator.comparingLong(FeatureRange::start))); - return new InMemoryTrackDataSource(features, total, false); + return new InMemoryTrackDataSource(features, total, false, true, RenderStyle.FEATURE); + } + + static @NotNull InMemoryTrackDataSource fromGffOrGtf(final @NotNull Path filePath) { + final var features = new HashMap>(); + final var groupedFeatures = new HashMap>(); + long total = 0L; + boolean hasSignalValues = false; + final var lowered = filePath.getFileName().toString().toLowerCase(Locale.ROOT); + final var gtfMode = lowered.endsWith(".gtf") || lowered.endsWith(".gtf.gz"); + try (final BufferedReader reader = openMaybeGzipReader(filePath)) { + String line; + long lineNo = 0L; + while ((line = reader.readLine()) != null) { + lineNo++; + if (line.isBlank() || line.startsWith("#")) { + continue; + } + final var fields = line.split("\t"); + if (fields.length < 9) { + continue; + } + final var sourceName = fields[0]; + final var featureType = (fields[2] == null || fields[2].isBlank()) ? "feature" : fields[2]; + final var featureTypeLower = featureType.trim().toLowerCase(Locale.ROOT); + final long start1Based = parseLongOrThrow(fields[3], "GFF/GTF start", lineNo); + final long end1Based = parseLongOrThrow(fields[4], "GFF/GTF end", lineNo); + final long start = Math.max(0L, start1Based - 1L); + final long end = Math.max(start + 1L, end1Based); + final var score = parseNullableDouble(fields[5]); + final double value = Math.max(0.0d, score != null ? score : 1.0d); + final var strand = normalizeStrand(fields[6]); + final var attributes = parseGffAttributes(fields[8], gtfMode); + final var label = firstNonBlank( + attributes.get("Name"), + attributes.get("gene_name"), + attributes.get("gene_id"), + attributes.get("transcript_id"), + attributes.get("ID"), + attributes.get("Parent"), + featureType + ); + final var groupKey = resolveGffGroupKey(attributes, featureTypeLower); + final boolean groupable = + groupKey != null && + (isGffBlockLikeFeature(featureTypeLower) || + isGffTranscriptLikeFeature(featureTypeLower)); + if (groupable) { + final var bySource = groupedFeatures.computeIfAbsent(sourceName, ignored -> new LinkedHashMap<>()); + final var builder = bySource.computeIfAbsent( + groupKey, + ignored -> new GffTranscriptFeatureBuilder(groupKey) + ); + builder.accept( + start, + end, + value, + label, + strand, + featureType, + featureTypeLower, + attributes + ); + } else { + features.computeIfAbsent(sourceName, ignored -> new ArrayList<>()) + .add(new FeatureRange(start, end, value, label, strand, null, null, featureType, List.of())); + } + if (Math.abs(value - 1.0d) > 1e-9) { + hasSignalValues = true; + } + total++; + } + } catch (final IOException e) { + throw new RuntimeException("Failed to parse GFF/GTF track " + filePath, e); + } + for (final var entry : groupedFeatures.entrySet()) { + final var sourceName = entry.getKey(); + final var destination = features.computeIfAbsent(sourceName, ignored -> new ArrayList<>()); + entry.getValue().values().forEach(builder -> { + final var aggregated = builder.toFeatureRange(); + if (aggregated != null) { + destination.add(aggregated); + } + }); + } + features.values().forEach(list -> list.sort(Comparator.comparingLong(FeatureRange::start))); + return new InMemoryTrackDataSource(features, total, hasSignalValues, true, RenderStyle.FEATURE); } @Override @@ -1769,63 +2741,34 @@ public long featureCountHint() { return this.featureCount; } + @Override + public @NotNull Set sourceNames() { + return this.featuresBySource.keySet(); + } + + @Override + public @NotNull RenderStyle renderStyle() { + return this.preferredRenderStyle; + } + @Override public @NotNull List projectFeatures(final @NotNull Map> sourceToAssemblySegments, final long queryStartPx, final long queryEndPx, final long bpResolution) { final var projected = new ArrayList(); - for (final var entry : this.featuresBySource.entrySet()) { - final var sourceName = entry.getKey(); - final var sourceFeatures = entry.getValue(); - if (sourceFeatures.isEmpty()) { - continue; - } - final var assemblySegments = sourceToAssemblySegments.get(sourceName); - if (assemblySegments == null || assemblySegments.isEmpty()) { - continue; - } - for (final var segment : assemblySegments) { - final var sourceIntervalOptional = mapVisiblePxIntervalToSegmentSource( - segment, - queryStartPx, - queryEndPx, - bpResolution - ); - if (sourceIntervalOptional.isEmpty()) { - continue; - } - final var sourceInterval = sourceIntervalOptional.get(); - int index = lowerBoundByStart(sourceFeatures, sourceInterval.start()); - if (index > 0) { - index--; - } - for (int i = index; i < sourceFeatures.size(); i++) { - final var feature = sourceFeatures.get(i); - if (feature.start() >= sourceInterval.end()) { - break; - } - if (feature.end() <= sourceInterval.start()) { - continue; - } - projectSourceIntervalOnSegment( - segment, - feature.start(), - feature.end(), - feature.value(), - feature.label(), - queryStartPx, - queryEndPx, - bpResolution - ).ifPresent(projected::add); - if (projected.size() > MAX_FEATURES_PER_QUERY) { - projected.sort(Comparator.comparingLong(ProjectedFeature::startPx)); - return projected; - } - } - } - } + forEachProjectedFeature( + sourceToAssemblySegments, + queryStartPx, + queryEndPx, + bpResolution, + feature -> projected.add(feature), + MAX_FEATURES_PER_QUERY + 1 + ); projected.sort(Comparator.comparingLong(ProjectedFeature::startPx)); + if (projected.size() > MAX_FEATURES_PER_QUERY) { + return projected.subList(0, MAX_FEATURES_PER_QUERY); + } return projected; } @@ -1837,6 +2780,17 @@ public long featureCountHint() { final var bucketCount = Math.max(1, widthPx); final var span = Math.max(1L, queryEndPx - queryStartPx); final var bucketSpan = Math.max(1.0d, span / (double) bucketCount); + + if (this.renderStyle() == RenderStyle.FEATURE) { + return queryFeatureBins( + sourceToAssemblySegments, + queryStartPx, + queryEndPx, + widthPx, + bpResolution + ); + } + if (this.hasSignalValues()) { final double[] maxValues = new double[bucketCount]; final double[] weightedSums = new double[bucketCount]; @@ -1858,7 +2812,8 @@ public long featureCountHint() { weightedSums, overlapSums, counts - ) + ), + Integer.MAX_VALUE ); return finalizeBigWigBins( queryStartPx, @@ -1871,6 +2826,7 @@ public long featureCountHint() { BigWigAggregationMode.MAX ); } + final double[] values = new double[bucketCount]; final long[] counts = new long[bucketCount]; forEachProjectedFeature( @@ -1886,16 +2842,154 @@ public long featureCountHint() { bucketSpan, values, counts - ) + ), + Integer.MAX_VALUE ); return finalizeBins(queryStartPx, queryEndPx, bucketSpan, values, counts); } + private @NotNull List queryFeatureBins(final @NotNull Map> sourceToAssemblySegments, + final long queryStartPx, + final long queryEndPx, + final int widthPx, + final long bpResolution) { + final int safeWidth = Math.max(1, widthPx); + final int maxDirectFeatures = Math.max( + 8192, + Math.min(MAX_FEATURES_PER_QUERY, safeWidth * 48) + ); + final var projected = new ArrayList(Math.min(maxDirectFeatures, 8192)); + forEachProjectedFeature( + sourceToAssemblySegments, + queryStartPx, + queryEndPx, + bpResolution, + projected::add, + maxDirectFeatures + 1 + ); + projected.sort( + Comparator.comparingLong(ProjectedFeature::startPx) + .thenComparingLong(ProjectedFeature::endPx) + ); + if (projected.size() <= maxDirectFeatures) { + return toBins(projected); + } + return downsampleFeatureBins(projected, queryStartPx, queryEndPx, safeWidth); + } + + private static @NotNull List downsampleFeatureBins(final @NotNull List projected, + final long queryStartPx, + final long queryEndPx, + final int widthPx) { + if (projected.isEmpty()) { + return List.of(); + } + final int bucketCount = Math.max(1, widthPx); + final double bucketSpan = Math.max(1.0d, Math.max(1L, queryEndPx - queryStartPx) / (double) bucketCount); + final int maxFeaturesPerBucket = 3; + @SuppressWarnings("unchecked") + final ArrayList[] byBucket = new ArrayList[bucketCount]; + for (final var feature : projected) { + if (feature.endPx() <= queryStartPx || feature.startPx() >= queryEndPx) { + continue; + } + final var centerPx = feature.startPx() + Math.max(0L, (feature.endPx() - feature.startPx()) / 2L); + final int bucket = Math.max( + 0, + Math.min( + bucketCount - 1, + (int) Math.floor((centerPx - queryStartPx) / bucketSpan) + ) + ); + var list = byBucket[bucket]; + if (list == null) { + list = new ArrayList<>(maxFeaturesPerBucket + 1); + byBucket[bucket] = list; + } + list.add(feature); + } + + final var selected = new ArrayList(bucketCount * maxFeaturesPerBucket); + for (final var bucketFeatures : byBucket) { + if (bucketFeatures == null || bucketFeatures.isEmpty()) { + continue; + } + bucketFeatures.sort(InMemoryTrackDataSource::compareProjectedFeaturesForDisplay); + for (int idx = 0; idx < Math.min(maxFeaturesPerBucket, bucketFeatures.size()); idx++) { + selected.add(bucketFeatures.get(idx)); + } + } + selected.sort( + Comparator.comparingLong(ProjectedFeature::startPx) + .thenComparingInt(feature -> featureHierarchyDepth(feature.featureType())) + .thenComparingLong(ProjectedFeature::endPx) + ); + return toBins(selected); + } + + private static int compareProjectedFeaturesForDisplay(final @NotNull ProjectedFeature left, + final @NotNull ProjectedFeature right) { + final int depthCmp = Integer.compare( + featureHierarchyDepth(right.featureType()), + featureHierarchyDepth(left.featureType()) + ); + if (depthCmp != 0) { + return depthCmp; + } + final int blockCmp = Integer.compare(right.blocks().size(), left.blocks().size()); + if (blockCmp != 0) { + return blockCmp; + } + final long leftSpan = Math.max(1L, left.endPx() - left.startPx()); + final long rightSpan = Math.max(1L, right.endPx() - right.startPx()); + final int spanCmp = Long.compare(rightSpan, leftSpan); + if (spanCmp != 0) { + return spanCmp; + } + final int startCmp = Long.compare(left.startPx(), right.startPx()); + if (startCmp != 0) { + return startCmp; + } + return Long.compare(left.endPx(), right.endPx()); + } + + private static int featureHierarchyDepth(final @Nullable String featureType) { + if (featureType == null || featureType.isBlank()) { + return 1; + } + final var normalized = featureType.trim().toLowerCase(Locale.ROOT); + if (isGffGeneLikeFeature(normalized)) { + return 0; + } + if (isGffTranscriptLikeFeature(normalized)) { + return 1; + } + if (isGffBlockLikeFeature(normalized)) { + return 2; + } + return 1; + } + + private static @NotNull RenderStyle resolveBedRenderStyle(final long featureCount, + final boolean hasStrandFeatures, + final boolean hasThickFeatures, + final boolean hasBed12Rows) { + if (hasThickFeatures || hasBed12Rows) { + return RenderStyle.FEATURE; + } + if (!hasStrandFeatures) { + return RenderStyle.SIGNAL; + } + return featureCount <= BED_FEATURE_STYLE_MAX_FEATURES ? RenderStyle.FEATURE : RenderStyle.SIGNAL; + } + private void forEachProjectedFeature(final @NotNull Map> sourceToAssemblySegments, final long queryStartPx, final long queryEndPx, final long bpResolution, - final @NotNull java.util.function.Consumer consumer) { + final @NotNull java.util.function.Consumer consumer, + final int maxFeatures) { + int emitted = 0; for (final var entry : this.featuresBySource.entrySet()) { final var sourceName = entry.getKey(); final var sourceFeatures = entry.getValue(); @@ -1929,16 +3023,21 @@ private void forEachProjectedFeature(final @NotNull Map= maxFeatures) { + return; + } } } } @@ -1959,6 +3058,136 @@ private static int lowerBoundByStart(final @NotNull List features, } } + private static final class CoolerWeightsTrackDataSource implements TrackDataSource { + @Override + public long featureCountHint() { + return -1L; + } + + @Override + public @NotNull Set sourceNames() { + return Set.of(); + } + + @Override + public @NotNull List projectFeatures(final @NotNull Map> sourceToAssemblySegments, + final long queryStartPx, + final long queryEndPx, + final long bpResolution) { + return List.of(); + } + + public @NotNull List queryBins(final @NotNull ChunkedFile chunkedFile, + final @NotNull List orderedSegments, + final long queryStartPx, + final long queryEndPx, + final int widthPx, + final long bpResolution) { + final var resolutionOrder = chunkedFile.getResolutionToIndex().get(bpResolution); + if (resolutionOrder == null || resolutionOrder <= 0) { + return List.of(); + } + final var resolutionDescriptor = ResolutionDescriptor.fromResolutionOrder(resolutionOrder); + final var atus = chunkedFile.matrixQueries().getATUsForRange( + resolutionDescriptor, + queryStartPx, + queryEndPx, + true + ); + if (atus.isEmpty()) { + return List.of(); + } + + final int bucketCount = Math.max(1, widthPx); + final long span = Math.max(1L, queryEndPx - queryStartPx); + final double bucketSpan = Math.max(1.0d, span / (double) bucketCount); + final double[] maxValues = new double[bucketCount]; + final long[] counts = new long[bucketCount]; + + long pxCursor = queryStartPx; + for (final var atu : atus) { + final var weights = atu.getStripeDescriptor().bin_weights(); + final var start = atu.getStartIndexInStripeIncl(); + final var end = atu.getEndIndexInStripeExcl(); + if (start < 0 || end <= start || end > weights.length) { + continue; + } + if (atu.getDirection() == ATUDirection.FORWARD) { + for (int i = start; i < end; i++) { + accumulateCoolerWeightValue( + pxCursor, + weights[i], + queryStartPx, + bucketSpan, + maxValues, + counts + ); + pxCursor++; + } + } else { + for (int i = end - 1; i >= start; i--) { + accumulateCoolerWeightValue( + pxCursor, + weights[i], + queryStartPx, + bucketSpan, + maxValues, + counts + ); + pxCursor++; + } + } + } + return finalizeCoolerWeightBins( + queryStartPx, + queryEndPx, + bucketSpan, + orderedSegments, + bpResolution, + maxValues, + counts + ); + } + + private static void accumulateCoolerWeightValue(final long valuePx, + final double value, + final long queryStartPx, + final double bucketSpan, + final double[] maxValues, + final long[] counts) { + if (!Double.isFinite(value)) { + return; + } + final var safeValue = Math.max(0.0d, value); + int idx = (int) Math.floor((valuePx - queryStartPx) / bucketSpan); + idx = Math.max(0, Math.min(idx, maxValues.length - 1)); + maxValues[idx] = Math.max(maxValues[idx], safeValue); + counts[idx]++; + } + + private static @NotNull List finalizeCoolerWeightBins(final long queryStartPx, + final long queryEndPx, + final double bucketSpan, + final @NotNull List orderedSegments, + final long bpResolution, + final double[] maxValues, + final long[] counts) { + final var bins = new ArrayList(counts.length); + for (int i = 0; i < counts.length; i++) { + if (counts[i] <= 0L) { + continue; + } + final var startPx = queryStartPx + (long) Math.floor(i * bucketSpan); + final var endPx = Math.min(queryEndPx, queryStartPx + (long) Math.ceil((i + 1) * bucketSpan)); + final var safeEndPx = Math.max(startPx + 1L, endPx); + final var startBp = mapVisiblePxToAssemblyBp(startPx, orderedSegments, bpResolution); + final var endBp = mapVisiblePxToAssemblyBp(Math.max(startPx, safeEndPx - 1L), orderedSegments, bpResolution) + bpResolution; + bins.add(new TrackBin(startBp, endBp, maxValues[i], counts[i], null, startPx, safeEndPx)); + } + return bins; + } + } + private static final class BigWigTrackDataSource implements TrackDataSource { private final @NotNull Path path; private final @NotNull BBFileReader reader; @@ -1983,6 +3212,11 @@ public long featureCountHint() { return -1L; } + @Override + public @NotNull Set sourceNames() { + return this.sourceNames; + } + @Override public synchronized @NotNull List projectFeatures(final @NotNull Map> sourceToAssemblySegments, final long queryStartPx, @@ -2189,6 +3423,11 @@ public long featureCountHint() { return -1L; } + @Override + public @NotNull Set sourceNames() { + return this.sequenceNames; + } + @Override public synchronized @NotNull List projectFeatures(final @NotNull Map> sourceToAssemblySegments, final long queryStartPx, @@ -2338,6 +3577,9 @@ private static int clampToInt(final long value) { private record SourceInterval(long start, long end) { } + private record AssemblyBpInterval(long startBp, long endBp) { + } + private record QueryPxRange(long startPx, long endPx) { } @@ -2373,6 +3615,13 @@ private record SegmentBuildResult(@NotNull Map> so long totalVisiblePixels) { } + private record AssemblyBpSegment(long sourceStart, + long sourceEnd, + long assemblyStart, + long assemblyEnd, + boolean reversed) { + } + private record AssemblySegment(long sourceStart, long sourceEnd, long assemblyStart, @@ -2382,7 +3631,196 @@ private record AssemblySegment(long sourceStart, long visiblePxEnd) { } - private record FeatureRange(long start, long end, double value, String label) { + private static final class GffTranscriptFeatureBuilder { + private final String groupKey; + private final List exonIntervals = new ArrayList<>(); + private final List codingIntervals = new ArrayList<>(); + private long start = Long.MAX_VALUE; + private long end = Long.MIN_VALUE; + private String label; + private String strand; + private String featureType; + private double maxValue = 1.0d; + private boolean hasCustomValue = false; + + private GffTranscriptFeatureBuilder(final @NotNull String groupKey) { + this.groupKey = groupKey; + } + + private void accept(final long start, + final long end, + final double value, + final String fallbackLabel, + final String strand, + final String featureType, + final String featureTypeLower, + final @NotNull Map attributes) { + this.start = Math.min(this.start, start); + this.end = Math.max(this.end, end); + if (strand != null && this.strand == null) { + this.strand = strand; + } + final var resolvedLabel = firstNonBlank( + attributes.get("Name"), + attributes.get("transcript_name"), + attributes.get("gene_name"), + attributes.get("gene_id"), + attributes.get("transcript_id"), + attributes.get("ID"), + attributes.get("Parent"), + fallbackLabel + ); + if (shouldPreferLabel(this.label, resolvedLabel, this.featureType)) { + this.label = resolvedLabel; + } + if (this.featureType == null || isGffTranscriptLikeFeature(featureTypeLower) || isGffGeneLikeFeature(featureTypeLower)) { + this.featureType = featureType; + } + if (Math.abs(value - 1.0d) > 1e-9) { + this.maxValue = Math.max(this.maxValue, value); + this.hasCustomValue = true; + } + if (isGffBlockLikeFeature(featureTypeLower)) { + final var interval = new LongInterval(start, end); + this.exonIntervals.add(interval); + if (isGffCodingFeature(featureTypeLower)) { + this.codingIntervals.add(interval); + } + } + } + + private @Nullable FeatureRange toFeatureRange() { + final var mergedExons = mergeIntervals(this.exonIntervals); + final var mergedCoding = mergeIntervals(this.codingIntervals); + final var blockIntervals = mergedExons.isEmpty() ? mergedCoding : mergedExons; + final var blocks = new ArrayList(blockIntervals.size()); + for (final var interval : blockIntervals) { + blocks.add(new FeatureBlock( + interval.start(), + interval.end(), + overlapsAny(mergedCoding, interval) + )); + } + long effectiveStart = this.start; + long effectiveEnd = this.end; + if (effectiveStart == Long.MAX_VALUE || effectiveEnd <= effectiveStart) { + if (!blockIntervals.isEmpty()) { + effectiveStart = blockIntervals.get(0).start(); + effectiveEnd = blockIntervals.get(blockIntervals.size() - 1).end(); + } else { + return null; + } + } + Long thickStart = null; + Long thickEnd = null; + if (!mergedCoding.isEmpty()) { + thickStart = mergedCoding.get(0).start(); + thickEnd = mergedCoding.get(mergedCoding.size() - 1).end(); + } + final var resolvedFeatureType = firstNonBlank(this.featureType, "transcript"); + final var resolvedLabel = firstNonBlank(this.label, this.groupKey, resolvedFeatureType); + final var resolvedValue = this.hasCustomValue ? this.maxValue : 1.0d; + return new FeatureRange( + effectiveStart, + Math.max(effectiveStart + 1L, effectiveEnd), + resolvedValue, + resolvedLabel, + this.strand, + thickStart, + thickEnd, + resolvedFeatureType, + blocks + ); + } + + private static @NotNull List mergeIntervals(final @NotNull List intervals) { + if (intervals.isEmpty()) { + return List.of(); + } + final var sorted = new ArrayList<>(intervals); + sorted.sort(Comparator.comparingLong(LongInterval::start).thenComparingLong(LongInterval::end)); + final var merged = new ArrayList(sorted.size()); + long currentStart = sorted.get(0).start(); + long currentEnd = sorted.get(0).end(); + for (int i = 1; i < sorted.size(); i++) { + final var interval = sorted.get(i); + if (interval.start() <= currentEnd) { + currentEnd = Math.max(currentEnd, interval.end()); + continue; + } + merged.add(new LongInterval(currentStart, currentEnd)); + currentStart = interval.start(); + currentEnd = interval.end(); + } + merged.add(new LongInterval(currentStart, currentEnd)); + return merged; + } + + private static boolean overlapsAny(final @NotNull List codingIntervals, + final @NotNull LongInterval target) { + for (final var coding : codingIntervals) { + if (coding.end() <= target.start()) { + continue; + } + if (coding.start() >= target.end()) { + break; + } + return true; + } + return false; + } + + private static boolean shouldPreferLabel(final @Nullable String current, + final @Nullable String candidate, + final @Nullable String featureType) { + if (candidate == null || candidate.isBlank()) { + return false; + } + if (current == null || current.isBlank()) { + return true; + } + final var currentLower = current.trim().toLowerCase(Locale.ROOT); + final var candidateLower = candidate.trim().toLowerCase(Locale.ROOT); + if (currentLower.equals(candidateLower)) { + return false; + } + final var featureTypeLower = featureType == null ? "" : featureType.trim().toLowerCase(Locale.ROOT); + final boolean currentGeneric = + currentLower.equals(featureTypeLower) || + currentLower.startsWith("tx:") || + currentLower.startsWith("gene:"); + final boolean candidateGeneric = + candidateLower.equals(featureTypeLower) || + candidateLower.startsWith("tx:") || + candidateLower.startsWith("gene:"); + return currentGeneric && !candidateGeneric; + } + } + + private record LongInterval(long start, long end) { + } + + private record FeatureBlock(long start, + long end, + boolean coding) { + } + + private record FeatureRange(long start, + long end, + double value, + String label, + String strand, + Long thickStart, + Long thickEnd, + String featureType, + @NotNull List blocks) { + } + + private record ProjectedBlock(long startBp, + long endBp, + long startPx, + long endPx, + boolean coding) { } private record ProjectedFeature(long startBp, @@ -2390,7 +3828,14 @@ private record ProjectedFeature(long startBp, long startPx, long endPx, double value, - String label) { + String label, + String strand, + Long thickStartBp, + Long thickEndBp, + Long thickStartPx, + Long thickEndPx, + String featureType, + @NotNull List blocks) { } @Getter @@ -2415,6 +3860,57 @@ public static final class TracksPrecomputeStatus { private final @NotNull String processedDirectory; } + @Getter + @RequiredArgsConstructor + public static final class TrackCompatibilityReport { + private final @NotNull String filename; + private final @NotNull String trackType; + private final @NotNull String status; + private final int totalNames; + private final int matchedSourceNames; + private final int matchedAssemblyNames; + private final int matchedAnyNames; + private final @NotNull List unknownNames; + private final @NotNull String recommendation; + private final @NotNull String message; + } + + @Getter + @RequiredArgsConstructor + public static final class FeatureSearchResponse { + private final @NotNull String query; + private final int limit; + private final int offset; + private final boolean hasMore; + private final @NotNull List hits; + } + + @Getter + @RequiredArgsConstructor + public static final class FeatureSearchHit { + private final @NotNull String trackId; + private final @NotNull String trackName; + private final @NotNull String sourceName; + private final @NotNull String label; + private final String featureType; + private final String strand; + private final long startBp; + private final long endBp; + } + + @Getter + @RequiredArgsConstructor + public static final class FeatureContextResponse { + private final long startBp; + private final long endBp; + private final long contextStartBp; + private final long contextEndBp; + private final double marginScreens; + private final int contextWidthPx; + private final long bpResolution; + private final @NotNull QueryResult query; + } + private static final class TrackPrecomputeRuntime { private final String trackId; private volatile String trackName; @@ -2527,8 +4023,10 @@ public static final class TrackSummary { private final @NotNull String color; private final boolean visible; private final long featureCount; + private final @NotNull String renderStyle; private final @NotNull String renderMode; private final @NotNull String aggregationMode; + private final boolean logScale; } @Getter @@ -2538,6 +4036,7 @@ public static final class TrackRender { private final @NotNull String name; private final @NotNull String type; private final @NotNull String color; + private final @NotNull String renderStyle; private final @NotNull List bins; private final double maxValue; private final String error; @@ -2552,13 +4051,20 @@ public static final class TrackBin { private final String label; private final Long startPx; private final Long endPx; + private final String strand; + private final Long thickStartBp; + private final Long thickEndBp; + private final Long thickStartPx; + private final Long thickEndPx; + private final String featureType; + private final List blocks; public TrackBin(final long startBp, final long endBp, final double value, final long count, final String label) { - this(startBp, endBp, value, count, label, null, null); + this(startBp, endBp, value, count, label, null, null, null, null, null, null, null, null, List.of()); } public TrackBin(final long startBp, @@ -2568,6 +4074,54 @@ public TrackBin(final long startBp, final String label, final Long startPx, final Long endPx) { + this(startBp, endBp, value, count, label, startPx, endPx, null, null, null, null, null, null, List.of()); + } + + public TrackBin(final long startBp, + final long endBp, + final double value, + final long count, + final String label, + final Long startPx, + final Long endPx, + final String strand, + final Long thickStartBp, + final Long thickEndBp, + final Long thickStartPx, + final Long thickEndPx, + final String featureType) { + this( + startBp, + endBp, + value, + count, + label, + startPx, + endPx, + strand, + thickStartBp, + thickEndBp, + thickStartPx, + thickEndPx, + featureType, + List.of() + ); + } + + public TrackBin(final long startBp, + final long endBp, + final double value, + final long count, + final String label, + final Long startPx, + final Long endPx, + final String strand, + final Long thickStartBp, + final Long thickEndBp, + final Long thickStartPx, + final Long thickEndPx, + final String featureType, + final List blocks) { this.startBp = startBp; this.endBp = endBp; this.value = value; @@ -2575,6 +4129,23 @@ public TrackBin(final long startBp, this.label = label; this.startPx = startPx; this.endPx = endPx; + this.strand = strand; + this.thickStartBp = thickStartBp; + this.thickEndBp = thickEndBp; + this.thickStartPx = thickStartPx; + this.thickEndPx = thickEndPx; + this.featureType = featureType; + this.blocks = blocks == null ? List.of() : blocks; + } + + @Getter + @RequiredArgsConstructor + public static final class TrackBinBlock { + private final long startBp; + private final long endBp; + private final long startPx; + private final long endPx; + private final boolean coding; } } @@ -2586,7 +4157,8 @@ private record TrackState(@NotNull String trackId, boolean visible, @NotNull TrackDataSource dataSource, @NotNull BamRenderMode bamRenderMode, - @NotNull BigWigAggregationMode bigWigAggregationMode) { + @NotNull BigWigAggregationMode bigWigAggregationMode, + boolean logScale) { private TrackSummary toSummary() { return new TrackSummary( trackId, @@ -2596,8 +4168,10 @@ private TrackSummary toSummary() { color, visible, dataSource.featureCountHint(), + dataSource.renderStyle().name(), bamRenderMode.name(), - bigWigAggregationMode.name() + bigWigAggregationMode.name(), + logScale ); } @@ -2607,6 +4181,7 @@ private TrackRender toErrorRender(final @NotNull String message) { name, type.name(), color, + dataSource.renderStyle().name(), List.of(), 0.0d, message @@ -2617,7 +4192,8 @@ private TrackState withUpdated(final boolean newVisible, final @NotNull String newColor, final @NotNull String newName, final @NotNull BamRenderMode newBamRenderMode, - final @NotNull BigWigAggregationMode newBigWigAggregationMode) { + final @NotNull BigWigAggregationMode newBigWigAggregationMode, + final boolean newLogScale) { return new TrackState( trackId, newName, @@ -2627,19 +4203,24 @@ private TrackState withUpdated(final boolean newVisible, newVisible, dataSource, newBamRenderMode, - newBigWigAggregationMode + newBigWigAggregationMode, + newLogScale ); } - private TrackRender query(final @NotNull Map> sourceToAssemblySegments, + private TrackRender query(final @NotNull ChunkedFile chunkedFile, + final @NotNull Map> sourceToAssemblySegments, + final @NotNull List orderedSegments, final long queryStartPx, final long queryEndPx, final int widthPx, final long bpResolution) { final var bins = queryBinsForTrack( type, + chunkedFile, dataSource, sourceToAssemblySegments, + orderedSegments, queryStartPx, queryEndPx, widthPx, @@ -2648,7 +4229,16 @@ private TrackRender query(final @NotNull Map> sour bigWigAggregationMode ); final var maxValue = bins.stream().mapToDouble(TrackBin::getValue).max().orElse(0.0d); - return new TrackRender(trackId, name, type.name(), color, bins, maxValue, null); + return new TrackRender( + trackId, + name, + type.name(), + color, + dataSource.renderStyle().name(), + bins, + maxValue, + null + ); } } } diff --git a/src/main/java/ru/itmo/ctlab/hict/hict_server/util/shareable/ShareableWrappers.java b/src/main/java/ru/itmo/ctlab/hict/hict_server/util/shareable/ShareableWrappers.java index a6f6d9a..1c055c5 100644 --- a/src/main/java/ru/itmo/ctlab/hict/hict_server/util/shareable/ShareableWrappers.java +++ b/src/main/java/ru/itmo/ctlab/hict/hict_server/util/shareable/ShareableWrappers.java @@ -31,6 +31,7 @@ import ru.itmo.ctlab.hict.hict_library.chunkedfile.ChunkedFile; import ru.itmo.ctlab.hict.hict_library.visualization.SimpleVisualizationOptions; import ru.itmo.ctlab.hict.hict_server.concurrent.RequestTaskScheduler; +import ru.itmo.ctlab.hict.hict_server.handlers.tiles.RenderPipelineConfig; import ru.itmo.ctlab.hict.hict_server.tracks.Track1DManager; import java.nio.file.Path; @@ -65,4 +66,10 @@ public static class Track1DManagerWrapper implements Shareable { public static class RequestTaskSchedulerWrapper implements Shareable { private final @NotNull RequestTaskScheduler requestTaskScheduler; } + + @Getter + @RequiredArgsConstructor + public static class RenderPipelineConfigWrapper implements Shareable { + private final @NotNull RenderPipelineConfig renderPipelineConfig; + } } diff --git a/src/main/resources/openapi/hict-api-v1.yaml b/src/main/resources/openapi/hict-api-v1.yaml new file mode 100644 index 0000000..b391aff --- /dev/null +++ b/src/main/resources/openapi/hict-api-v1.yaml @@ -0,0 +1,1552 @@ +openapi: 3.0.3 +info: + title: HiCT JVM API + version: v1 + description: | + HTTP API for HiCT_JVM server. + + This specification is intended for WebUI integration, Python `hict`/`hict_jvm_api` + clients and programmatic data-processing pipelines. +servers: + - url: / +tags: + - name: Info + - name: Files + - name: Secondary source + - name: Tiles + - name: Rendering + - name: Scaffolding + - name: Names + - name: Tracks + - name: Conversion +paths: + /version: + get: + tags: [Info] + summary: Get backend/server version metadata + operationId: getVersion + responses: + '200': + description: Version metadata + content: + application/json: + schema: + $ref: '#/components/schemas/VersionResponse' + /diagnostics/workers: + post: + tags: [Info] + summary: Get worker scheduler diagnostics snapshot + operationId: getWorkerDiagnostics + requestBody: + required: false + content: + application/json: + schema: + $ref: '#/components/schemas/EmptyObject' + responses: + '200': + description: Scheduler diagnostics + content: + application/json: + schema: + $ref: '#/components/schemas/GenericMap' + /api/v1: + get: + tags: [Info] + summary: Redirect to interactive API docs + operationId: apiDocsRedirect + responses: + '307': + description: Temporary redirect to `/api/v1/` + /api/v1/: + get: + tags: [Info] + summary: Interactive API docs (Swagger UI HTML) + operationId: apiDocsHtml + responses: + '200': + description: HTML page + content: + text/html: + schema: + type: string + /api/v1/openapi: + get: + tags: [Info] + summary: Redirect to OpenAPI source document + operationId: apiSpecRedirect + responses: + '307': + description: Temporary redirect to `/api/v1/openapi.yaml` + /api/v1/openapi.yaml: + get: + tags: [Info] + summary: OpenAPI v1 source specification + operationId: apiSpecYaml + responses: + '200': + description: OpenAPI YAML + content: + application/yaml: + schema: + type: string + + /list_files: + post: + tags: [Files] + summary: List openable HiCT files in DATA_DIR + operationId: listFiles + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Relative paths + content: + application/json: + schema: + type: array + items: { type: string } + /list_files_detailed: + post: + tags: [Files] + summary: List DATA_DIR files with metadata + operationId: listFilesDetailed + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: File entries + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/FileEntry' + /list_fasta_files: + post: + tags: [Files] + summary: List FASTA files in DATA_DIR + operationId: listFastaFiles + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: FASTA paths + content: + application/json: + schema: + type: array + items: { type: string } + /list_agp_files: + post: + tags: [Files] + summary: List AGP files in DATA_DIR + operationId: listAgpFiles + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: AGP paths + content: + application/json: + schema: + type: array + items: { type: string } + /list_coolers: + post: + tags: [Files] + summary: List cooler/mcool files in DATA_DIR + operationId: listCoolers + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Cooler paths + content: + application/json: + schema: + type: array + items: { type: string } + + /open: + post: + tags: [Files] + summary: Open primary HiCT source + operationId: openPrimarySource + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/OpenFileRequest' + example: + filename: build/quad/combined_ind2_4DN.hict.hdf5 + fastaFilename: build/quad/quad_combined_ind2.fasta + responses: + '200': + description: Opened file metadata + content: + application/json: + schema: + $ref: '#/components/schemas/OpenFileResponse' + /open_progress: + post: + tags: [Files] + summary: Get progress of current open operation + operationId: getOpenProgress + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Open progress + content: + application/json: + schema: + $ref: '#/components/schemas/OpenProgressResponse' + /attach: + post: + tags: [Files] + summary: Attach to currently opened session + operationId: attachSession + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Attached session info + content: + application/json: + schema: + $ref: '#/components/schemas/AttachResponse' + /close: + post: + tags: [Files] + summary: Close current session + operationId: closeSession + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Session close status + content: + application/json: + schema: + $ref: '#/components/schemas/GenericMap' + + /secondary/status: + post: + tags: [Secondary source] + summary: Get secondary source status + operationId: getSecondaryStatus + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Secondary source status + content: + application/json: + schema: + $ref: '#/components/schemas/SecondarySourceStatus' + /secondary/open: + post: + tags: [Secondary source] + summary: Open secondary source + operationId: openSecondarySource + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/OpenSecondaryRequest' + example: + filename: build/quad/combined_ind1_4DN.hict.hdf5 + allowMismatch: false + responses: + '200': + description: Secondary source status + content: + application/json: + schema: + $ref: '#/components/schemas/SecondarySourceStatus' + /secondary/close: + post: + tags: [Secondary source] + summary: Close secondary source + operationId: closeSecondarySource + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Secondary source status + content: + application/json: + schema: + $ref: '#/components/schemas/SecondarySourceStatus' + /secondary/set_assembly_source: + post: + tags: [Secondary source] + summary: Choose source of assembly descriptors (PRIMARY/SECONDARY) + operationId: setAssemblySource + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/SetAssemblySourceRequest' + responses: + '200': + description: New assembly source and assembly payload + content: + application/json: + schema: + $ref: '#/components/schemas/GenericMap' + + /set_visualization_options: + post: + tags: [Rendering] + summary: Set visualization options and synchronize default rendering pipeline + operationId: setVisualizationOptions + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/VisualizationOptionsRequest' + responses: + '200': + description: Effective visualization options + content: + application/json: + schema: + $ref: '#/components/schemas/GenericMap' + /get_visualization_options: + post: + tags: [Rendering] + summary: Get current visualization options + operationId: getVisualizationOptions + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Current visualization options + content: + application/json: + schema: + $ref: '#/components/schemas/GenericMap' + /render_pipeline/get: + post: + tags: [Rendering] + summary: Get current rendering pipeline graph configuration + operationId: getRenderPipeline + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Render pipeline config + content: + application/json: + schema: + $ref: '#/components/schemas/RenderPipelineConfig' + /render_pipeline/set: + post: + tags: [Rendering] + summary: Replace rendering pipeline graph configuration + operationId: setRenderPipeline + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/RenderPipelineConfig' + responses: + '200': + description: Render pipeline config + content: + application/json: + schema: + $ref: '#/components/schemas/RenderPipelineConfig' + /render_pipeline/reset: + post: + tags: [Rendering] + summary: Reset rendering pipeline to disabled/default state + operationId: resetRenderPipeline + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Render pipeline config + content: + application/json: + schema: + $ref: '#/components/schemas/RenderPipelineConfig' + + /tiles/reload: + post: + tags: [Tiles] + summary: Drop tile/track caches and increment generation + operationId: reloadTiles + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: New generation version + content: + application/json: + schema: + type: object + properties: + version: + type: integer + format: int64 + required: [version] + /get_tile: + get: + tags: [Tiles] + summary: Fetch map tile or arbitrary pixel rectangle + operationId: getTile + parameters: + - in: query + name: row + required: true + schema: { type: integer, format: int64 } + description: Tile row index, or start row in pixels for PNG_BY_PIXELS. + - in: query + name: col + required: true + schema: { type: integer, format: int64 } + description: Tile column index, or start col in pixels for PNG_BY_PIXELS. + - in: query + name: version + required: false + schema: { type: integer, format: int64, default: 0 } + - in: query + name: format + required: false + schema: + type: string + enum: [JSON_PNG_WITH_RANGES, PNG, PNG_BY_PIXELS] + default: JSON_PNG_WITH_RANGES + - in: query + name: bpResolution + required: false + schema: { type: integer, format: int64 } + description: Exact bp resolution. Optional alternative to `level`. + - in: query + name: level + required: false + schema: { type: integer } + description: Resolution level in reversed order. Optional alternative to `bpResolution`. + - in: query + name: tile_size + required: false + schema: { type: integer, default: 256 } + description: Tile edge size for tile-based queries. + - in: query + name: rows + required: false + schema: { type: integer } + description: Height in pixels for PNG_BY_PIXELS. + - in: query + name: cols + required: false + schema: { type: integer } + description: Width in pixels for PNG_BY_PIXELS. + responses: + '200': + description: Tile payload + content: + application/json: + schema: + $ref: '#/components/schemas/TileWithRangesResponse' + image/png: + schema: + type: string + format: binary + /matrix/query: + post: + tags: [Tiles] + summary: Fetch rectangular submatrix as numeric tensor-ready payload + operationId: queryMatrix + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/MatrixQueryRequest' } + examples: + normalizedPixels: + summary: Traditional normalized signal in visible pixels + value: + bpResolution: 50000 + unit: PIXELS + startRowPx: 0 + endRowPx: 256 + startColPx: 0 + endColPx: 256 + signalMode: TRADITIONAL_NORMALIZED + format: BINARY_FLOAT32 + rawBins: + summary: Raw counts in bins as int64 JSON + value: + bpResolution: 50000 + unit: BINS + startRowBin: 0 + endRowBin: 32 + startColBin: 0 + endColBin: 32 + signalMode: RAW_COUNTS + format: JSON + responses: + '200': + description: Matrix payload + headers: + x-hict-rows: + schema: { type: integer } + x-hict-cols: + schema: { type: integer } + x-hict-dtype: + schema: { type: string, enum: [float32, float64, int64] } + x-hict-signal-mode: + schema: { type: string } + content: + application/json: + schema: + $ref: '#/components/schemas/MatrixQueryJsonResponse' + application/octet-stream: + schema: + type: string + format: binary + + /reverse_selection_range: + post: + tags: [Scaffolding] + summary: Reverse orientation of selected assembly range + operationId: reverseSelectionRange + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/RangeRequest' } + responses: + '200': + description: Updated assembly info with version + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /move_selection_range: + post: + tags: [Scaffolding] + summary: Move selected range to a new target position + operationId: moveSelectionRange + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/MoveSelectionRequest' } + responses: + '200': + description: Updated assembly info with version + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /split_contig_at_bin: + post: + tags: [Scaffolding] + summary: Split contig at a given position + operationId: splitContigAtBin + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/SplitContigRequest' } + responses: + '200': + description: Updated assembly info with version + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /group_contigs_into_scaffold: + post: + tags: [Scaffolding] + summary: Group selected range into scaffold + operationId: groupIntoScaffold + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/GroupContigsRequest' } + responses: + '200': + description: Updated assembly info with version + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /ungroup_contigs_from_scaffold: + post: + tags: [Scaffolding] + summary: Ungroup selected scaffold segment + operationId: ungroupFromScaffold + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/RangeRequest' } + responses: + '200': + description: Updated assembly info with version + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /move_selection_to_debris: + post: + tags: [Scaffolding] + summary: Move selected range to debris + operationId: moveSelectionToDebris + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/RangeRequest' } + responses: + '200': + description: Updated assembly info with version + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + + /link_fasta: + post: + tags: [Files] + summary: Link FASTA file to opened assembly + operationId: linkFasta + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/LinkFastaRequest' } + responses: + '200': + description: FASTA link validation report + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /get_fasta_for_assembly: + post: + tags: [Files] + summary: Export FASTA for current full assembly + operationId: exportFastaAssembly + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: FASTA text + content: + text/plain: + schema: + type: string + /get_fasta_for_selection: + post: + tags: [Files] + summary: Export FASTA for current selection + operationId: exportFastaSelection + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/FastaSelectionRequest' } + responses: + '200': + description: FASTA text + content: + text/plain: + schema: + type: string + /get_agp_for_assembly: + post: + tags: [Files] + summary: Export AGP for current assembly + operationId: exportAgpAssembly + requestBody: + required: false + content: + application/json: + schema: { $ref: '#/components/schemas/GetAgpRequest' } + responses: + '200': + description: AGP text + content: + text/plain: + schema: + type: string + /load_agp: + post: + tags: [Files] + summary: Apply AGP to opened assembly + operationId: loadAgp + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/LoadAgpRequest' } + responses: + '200': + description: Updated assembly info + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + + /names/contig: + post: + tags: [Names] + summary: Rename contig display name + operationId: renameContig + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/RenameContigRequest' } + responses: + '200': + description: Updated assembly info with version + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /names/scaffold: + post: + tags: [Names] + summary: Rename scaffold display name + operationId: renameScaffold + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/RenameScaffoldRequest' } + responses: + '200': + description: Updated assembly info with version + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /names/export: + get: + tags: [Names] + summary: Export name mappings + operationId: exportNameMappings + responses: + '200': + description: Name mappings payload + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /names/import: + post: + tags: [Names] + summary: Import contig/scaffold name mappings + operationId: importNameMappings + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/ImportNameMappingRequest' } + responses: + '200': + description: Updated assembly info with version + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + + /tracks/list_files: + post: + tags: [Tracks] + summary: List available 1D track files in DATA_DIR + operationId: listTrackFiles + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Track filenames + content: + application/json: + schema: + type: array + items: { type: string } + /tracks/open: + post: + tags: [Tracks] + summary: Open 1D track (BigWig/BED/BAM/GFF/GTF) + operationId: openTrack + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/OpenTrackRequest' } + responses: + '200': + description: Track summary + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /tracks/open_cooler_weights: + post: + tags: [Tracks] + summary: Open Cooler weights virtual 1D track + operationId: openCoolerWeightsTrack + requestBody: + required: false + content: + application/json: + schema: { $ref: '#/components/schemas/OpenCoolerWeightsTrackRequest' } + responses: + '200': + description: Track summary + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /tracks/probe: + post: + tags: [Tracks] + summary: Probe track compatibility with opened assembly + operationId: probeTrackCompatibility + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/ProbeTrackRequest' } + responses: + '200': + description: Compatibility report + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /tracks/list: + post: + tags: [Tracks] + summary: List currently opened tracks + operationId: listTracks + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Track summaries + content: + application/json: + schema: + type: array + items: { $ref: '#/components/schemas/GenericMap' } + /tracks/update: + post: + tags: [Tracks] + summary: Update track configuration + operationId: updateTrack + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/UpdateTrackRequest' } + responses: + '200': + description: Updated track summary + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /tracks/remove: + post: + tags: [Tracks] + summary: Remove opened track + operationId: removeTrack + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/TrackIdRequest' } + responses: + '200': + description: Remove status + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /tracks/reorder: + post: + tags: [Tracks] + summary: Reorder loaded 1D tracks + operationId: reorderTracks + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/TrackReorderRequest' } + responses: + '200': + description: Updated track list in render order + content: + application/json: + schema: + type: array + items: { $ref: '#/components/schemas/GenericMap' } + /tracks/precompute/status: + post: + tags: [Tracks] + summary: Get 1D track precompute status + operationId: getTrackPrecomputeStatus + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Precompute status + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /tracks/precompute/start: + post: + tags: [Tracks] + summary: Start 1D track precompute job + operationId: startTrackPrecompute + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/TrackPrecomputeStartRequest' } + responses: + '200': + description: Precompute status + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /tracks/query_1d: + post: + tags: [Tracks] + summary: Query visible 1D track segments in given unit/range + operationId: queryTracks1D + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/TrackQueryRequest' } + responses: + '200': + description: Queried tracks values and ranges + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /tracks/search_features: + post: + tags: [Tracks] + summary: Whole-genome feature search across loaded 1D feature tracks + operationId: searchTrackFeatures + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/TrackFeatureSearchRequest' } + responses: + '200': + description: Search results with pagination hint + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /tracks/feature_context: + post: + tags: [Tracks] + summary: Query 1D tracks around selected feature with screen-margin prefetch window + operationId: getTrackFeatureContext + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/TrackFeatureContextRequest' } + responses: + '200': + description: Feature context window with queried tracks payload + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + + /convert/upload: + post: + tags: [Conversion] + summary: Upload and convert one file (multipart) + operationId: uploadAndConvert + requestBody: + required: true + content: + multipart/form-data: + schema: + type: object + properties: + file: + type: string + format: binary + responses: + '200': + description: Conversion submission status + content: + application/json: + schema: { $ref: '#/components/schemas/ConversionSubmitResponse' } + /convert/jobs: + post: + tags: [Conversion] + summary: Start conversion job from DATA_DIR source + operationId: startConversionJob + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/StartConversionJobRequest' } + responses: + '200': + description: Conversion submission status + content: + application/json: + schema: { $ref: '#/components/schemas/ConversionSubmitResponse' } + get: + tags: [Conversion] + summary: List conversion jobs + operationId: listConversionJobsGet + responses: + '200': + description: Job list + content: + application/json: + schema: + type: array + items: { $ref: '#/components/schemas/GenericMap' } + /convert/jobs/list: + post: + tags: [Conversion] + summary: List conversion jobs (POST variant) + operationId: listConversionJobsPost + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Job list + content: + application/json: + schema: + type: array + items: { $ref: '#/components/schemas/GenericMap' } + /convert/jobs/batch: + post: + tags: [Conversion] + summary: Start batch conversion jobs + operationId: startBatchConversionJobs + requestBody: + required: true + content: + application/json: + schema: { $ref: '#/components/schemas/StartBatchConversionRequest' } + responses: + '200': + description: Batch submission status + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /convert/jobs/{jobId}: + get: + tags: [Conversion] + summary: Get conversion job details + operationId: getConversionJob + parameters: + - $ref: '#/components/parameters/jobId' + responses: + '200': + description: Job status + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + post: + tags: [Conversion] + summary: Get conversion job details (POST variant) + operationId: getConversionJobPost + parameters: + - $ref: '#/components/parameters/jobId' + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Job status + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /convert/jobs/{jobId}/stop: + post: + tags: [Conversion] + summary: Request conversion job cancellation + operationId: stopConversionJob + parameters: + - $ref: '#/components/parameters/jobId' + requestBody: { $ref: '#/components/requestBodies/EmptyBody' } + responses: + '200': + description: Stop status + content: + application/json: + schema: { $ref: '#/components/schemas/GenericMap' } + /convert/download/{jobId}: + get: + tags: [Conversion] + summary: Download conversion result file for completed job + operationId: downloadConversionResult + parameters: + - $ref: '#/components/parameters/jobId' + responses: + '200': + description: Converted artifact stream + content: + application/octet-stream: + schema: + type: string + format: binary + +components: + parameters: + jobId: + name: jobId + in: path + required: true + schema: { type: string } + requestBodies: + EmptyBody: + required: false + content: + application/json: + schema: + $ref: '#/components/schemas/EmptyObject' + example: {} + schemas: + EmptyObject: + type: object + additionalProperties: false + GenericMap: + type: object + additionalProperties: true + ErrorResponse: + type: object + properties: + error: { type: string } + required: [error] + + VersionResponse: + type: object + properties: + version: { type: string } + webuiVersion: { type: string } + required: [version, webuiVersion] + FileEntry: + type: object + properties: + path: { type: string } + name: { type: string } + sizeBytes: { type: integer, format: int64 } + modifiedAtMs: { type: integer, format: int64 } + extension: { type: string } + + OpenFileRequest: + type: object + properties: + filename: { type: string } + fastaFilename: { type: string } + required: [filename] + OpenFileResponse: + type: object + properties: + status: { type: string } + dtype: { type: string } + resolutions: { type: array, items: { type: integer, format: int64 } } + resolutionScales: { type: array, items: { type: number } } + denseBlockSize: { type: integer } + assemblyInfo: { type: object, additionalProperties: true } + matrixSizeBins: { type: array, items: { type: integer } } + required: [status, resolutions, assemblyInfo] + OpenProgressResponse: + type: object + properties: + stage: { type: string } + progress: { type: number } + required: [stage, progress] + AttachResponse: + type: object + properties: + filename: { type: string } + fastaFilename: { type: string } + openFileResponse: { $ref: '#/components/schemas/OpenFileResponse' } + + OpenSecondaryRequest: + type: object + properties: + filename: { type: string } + allowMismatch: { type: boolean, default: false } + required: [filename] + SetAssemblySourceRequest: + type: object + properties: + assemblySource: + type: string + enum: [PRIMARY, SECONDARY] + required: [assemblySource] + SecondarySourceStatus: + type: object + properties: + attached: { type: boolean } + filename: { type: string } + assemblySource: { type: string, enum: [PRIMARY, SECONDARY] } + requiresConfirmation: { type: boolean } + requestedFilename: { type: string } + warnings: + type: array + items: { type: string } + compatibility: + $ref: '#/components/schemas/SecondaryCompatibility' + required: [attached, filename, assemblySource] + SecondaryCompatibility: + type: object + properties: + sameResolutions: { type: boolean } + sameMatrixSizes: { type: boolean } + exactMatch: { type: boolean } + primaryMaxBins: { type: integer, format: int64 } + secondaryMaxBins: { type: integer, format: int64 } + primaryBinsByResolution: + type: array + items: { type: integer, format: int64 } + secondaryBinsByResolution: + type: array + items: { type: integer, format: int64 } + mismatchedResolutionOrders: + type: array + items: { type: integer } + + VisualizationOptionsRequest: + type: object + additionalProperties: true + RenderPipelineConfig: + type: object + additionalProperties: true + + TileWithRangesResponse: + type: object + properties: + image: + type: string + description: data:image/png;base64,... + ranges: + type: object + properties: + lowerBounds: + type: object + additionalProperties: { type: number } + upperBounds: + type: object + additionalProperties: { type: number } + + MatrixQueryRequest: + type: object + properties: + bpResolution: + type: integer + format: int64 + description: Resolution in base pairs for matrix extraction. + unit: + type: string + enum: [PIXELS, BINS, BP] + default: PIXELS + units: + type: string + enum: [PIXELS, BINS, BP] + description: Alias for `unit`. + startRow: + type: integer + format: int64 + endRow: + type: integer + format: int64 + startCol: + type: integer + format: int64 + endCol: + type: integer + format: int64 + startRowPx: + type: integer + format: int64 + endRowPx: + type: integer + format: int64 + startColPx: + type: integer + format: int64 + endColPx: + type: integer + format: int64 + startRowBin: + type: integer + format: int64 + endRowBin: + type: integer + format: int64 + startColBin: + type: integer + format: int64 + endColBin: + type: integer + format: int64 + startRowBP: + type: integer + format: int64 + endRowBP: + type: integer + format: int64 + startColBP: + type: integer + format: int64 + endColBP: + type: integer + format: int64 + rows: + type: integer + description: Optional height if end is not provided. + cols: + type: integer + description: Optional width if end is not provided. + signalMode: + type: string + enum: [RAW_COUNTS, COOLER_WEIGHTED, TRADITIONAL_NORMALIZED, PIPELINE_SIGNAL] + default: TRADITIONAL_NORMALIZED + format: + type: string + enum: [JSON, BINARY_FLOAT32, BINARY_FLOAT64, BINARY_INT64] + default: BINARY_FLOAT32 + includeWeights: + type: boolean + default: false + required: [bpResolution] + + MatrixQueryJsonResponse: + type: object + properties: + rows: { type: integer } + cols: { type: integer } + dtype: { type: string, enum: [float64, int64] } + signalMode: { type: string } + unit: { type: string, enum: [PIXELS] } + startRowPx: { type: integer, format: int64 } + endRowPx: { type: integer, format: int64 } + startColPx: { type: integer, format: int64 } + endColPx: { type: integer, format: int64 } + values: + type: array + items: + oneOf: + - type: number + - type: integer + rowWeights: + type: array + items: { type: number } + colWeights: + type: array + items: { type: number } + + RangeRequest: + type: object + properties: + startBP: { type: integer, format: int64 } + endBP: { type: integer, format: int64 } + required: [startBP, endBP] + MoveSelectionRequest: + type: object + properties: + startBP: { type: integer, format: int64 } + endBP: { type: integer, format: int64 } + targetStartBP: { type: integer, format: int64 } + required: [startBP, endBP, targetStartBP] + SplitContigRequest: + type: object + properties: + splitPx: { type: integer, format: int64 } + bpResolution: { type: integer, format: int64 } + required: [splitPx, bpResolution] + GroupContigsRequest: + type: object + properties: + startBP: { type: integer, format: int64 } + endBP: { type: integer, format: int64 } + newScaffoldName: { type: string } + spacerLength: { type: integer, format: int64 } + required: [startBP, endBP] + + LinkFastaRequest: + type: object + properties: + fastaFilename: { type: string } + allowMismatch: { type: boolean, default: false } + required: [fastaFilename] + FastaSelectionRequest: + type: object + properties: + fromBpX: { type: integer, format: int64 } + fromBpY: { type: integer, format: int64 } + toBpX: { type: integer, format: int64 } + toBpY: { type: integer, format: int64 } + required: [fromBpX, fromBpY, toBpX, toBpY] + GetAgpRequest: + type: object + properties: + defaultSpacerLength: { type: integer, format: int64, default: 1000 } + LoadAgpRequest: + type: object + properties: + agpFilename: { type: string } + required: [agpFilename] + + RenameContigRequest: + type: object + properties: + contigId: { type: integer } + newName: + type: string + nullable: true + required: [contigId] + RenameScaffoldRequest: + type: object + properties: + scaffoldId: { type: integer, format: int64 } + newName: + type: string + nullable: true + required: [scaffoldId] + ImportNameMappingRequest: + type: object + properties: + contigs: + type: array + items: + type: object + properties: + contigId: { type: integer } + name: { type: string } + required: [contigId, name] + scaffolds: + type: array + items: + type: object + properties: + scaffoldId: { type: integer, format: int64 } + name: { type: string } + required: [scaffoldId, name] + required: [contigs, scaffolds] + + OpenTrackRequest: + type: object + properties: + filename: { type: string } + name: { type: string } + color: { type: string, description: CSS color string } + required: [filename] + OpenCoolerWeightsTrackRequest: + type: object + properties: + name: { type: string } + color: { type: string } + ProbeTrackRequest: + type: object + properties: + filename: { type: string } + required: [filename] + UpdateTrackRequest: + type: object + properties: + trackId: { type: string } + visible: { type: boolean } + color: { type: string } + name: { type: string } + renderMode: { type: string } + aggregationMode: { type: string } + logScale: { type: boolean } + required: [trackId] + TrackIdRequest: + type: object + properties: + trackId: { type: string } + required: [trackId] + TrackReorderRequest: + type: object + properties: + trackId: { type: string } + targetIndex: { type: integer, minimum: 0 } + required: [trackId, targetIndex] + TrackPrecomputeStartRequest: + type: object + properties: + trackId: { type: string, description: Optional; all tracks if omitted } + force: { type: boolean, default: false } + TrackQueryRequest: + type: object + properties: + widthPx: { type: integer, default: 512 } + bpResolution: { type: integer, format: int64 } + unit: + type: string + enum: [PIXELS, BINS, BP] + units: + type: string + enum: [PIXELS, BINS, BP] + start: { type: integer, format: int64 } + end: { type: integer, format: int64 } + startPx: { type: integer, format: int64 } + endPx: { type: integer, format: int64 } + startBin: { type: integer, format: int64 } + endBin: { type: integer, format: int64 } + startBP: { type: integer, format: int64 } + endBP: { type: integer, format: int64 } + required: [bpResolution] + TrackFeatureSearchRequest: + type: object + properties: + query: { type: string, minLength: 1 } + limit: { type: integer, minimum: 1, default: 50 } + offset: { type: integer, minimum: 0, default: 0 } + trackId: { type: string, description: Optional track filter } + required: [query] + TrackFeatureContextRequest: + type: object + properties: + widthPx: { type: integer, minimum: 1, default: 1024 } + bpResolution: { type: integer, format: int64 } + marginScreens: { type: number, format: double, default: 1.0 } + unit: + type: string + enum: [PIXELS, BINS, BP] + units: + type: string + enum: [PIXELS, BINS, BP] + start: { type: integer, format: int64 } + end: { type: integer, format: int64 } + startPx: { type: integer, format: int64 } + endPx: { type: integer, format: int64 } + startBin: { type: integer, format: int64 } + endBin: { type: integer, format: int64 } + startBP: { type: integer, format: int64 } + endBP: { type: integer, format: int64 } + required: [bpResolution, widthPx] + + StartConversionJobRequest: + type: object + properties: + filename: { type: string } + direction: + type: string + enum: [mcool-to-hict, hict-to-mcool] + default: mcool-to-hict + overwrite: { type: boolean, default: false } + resolutions: + type: string + description: Comma-separated resolution list, e.g. "1000,5000,10000" + compression: { type: integer, default: 6 } + compressionAlgorithm: + type: string + enum: [deflate, lzf, zstd, none] + default: deflate + chunkSize: { type: integer, default: 8192 } + parallelism: { type: integer } + required: [filename] + StartBatchConversionRequest: + type: object + properties: + files: + type: array + items: { type: string } + parallelJobs: { type: integer, default: 1 } + parallelism: { type: integer } + overwrite: { type: boolean, default: false } + resolutions: { type: string } + compression: { type: integer, default: 6 } + compressionAlgorithm: + type: string + enum: [deflate, lzf, zstd, none] + default: deflate + chunkSize: { type: integer, default: 8192 } + required: [files] + ConversionSubmitResponse: + type: object + properties: + status: { type: string } + jobId: { type: string } + required: [status] diff --git a/src/test/java/ru/itmo/ctlab/hict/hict_server/ApiHttpIntegrationTest.java b/src/test/java/ru/itmo/ctlab/hict/hict_server/ApiHttpIntegrationTest.java new file mode 100644 index 0000000..2b0933d --- /dev/null +++ b/src/test/java/ru/itmo/ctlab/hict/hict_server/ApiHttpIntegrationTest.java @@ -0,0 +1,247 @@ +/* + * MIT License + * + * Copyright (c) 2021-2026. Aleksandr Serdiukov, Anton Zamyatin, Aleksandr Sinitsyn, Vitalii Dravgelis and Computer Technologies Laboratory ITMO University team. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package ru.itmo.ctlab.hict.hict_server; + +import io.vertx.core.Vertx; +import io.vertx.core.shareddata.LocalMap; +import io.vertx.ext.web.Router; +import io.vertx.ext.web.handler.BodyHandler; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import ru.itmo.ctlab.hict.hict_server.concurrent.RequestTaskScheduler; +import ru.itmo.ctlab.hict.hict_server.handlers.files.FSHandlersHolder; +import ru.itmo.ctlab.hict.hict_server.handlers.info.ApiDocsHandlersHolder; +import ru.itmo.ctlab.hict.hict_server.handlers.info.InfoHandlersHolder; +import ru.itmo.ctlab.hict.hict_server.util.shareable.ShareableWrappers; + +import java.io.IOException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.EnumMap; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class ApiHttpIntegrationTest { + @TempDir + Path tempDataDir; + + private Vertx vertx; + private RequestTaskScheduler scheduler; + private io.vertx.core.http.HttpServer server; + private int port; + private final HttpClient httpClient = HttpClient.newHttpClient(); + + @AfterEach + void tearDown() throws Exception { + if (server != null) { + final var closeFuture = new CompletableFuture(); + server.close(ar -> { + if (ar.succeeded()) { + closeFuture.complete(null); + } else { + closeFuture.completeExceptionally(ar.cause()); + } + }); + closeFuture.get(10, TimeUnit.SECONDS); + server = null; + } + if (scheduler != null) { + scheduler.close(); + scheduler = null; + } + if (vertx != null) { + final var closeFuture = new CompletableFuture(); + vertx.close(ar -> { + if (ar.succeeded()) { + closeFuture.complete(null); + } else { + closeFuture.completeExceptionally(ar.cause()); + } + }); + closeFuture.get(10, TimeUnit.SECONDS); + vertx = null; + } + } + + @Test + void apiDocsEndpointsServeSpecAndSupportCaching() throws Exception { + startServerWithInfoAndFileHandlers(); + + final var docsResponse = get("/api/v1/"); + assertEquals(200, docsResponse.statusCode()); + assertTrue(docsResponse.body().contains("SwaggerUIBundle")); + assertTrue(docsResponse.body().contains("/api/v1/openapi.yaml")); + final var docsEtag = docsResponse.headers().firstValue("etag").orElse(""); + assertTrue(!docsEtag.isBlank()); + + final var docsCached = get("/api/v1/", Map.of("If-None-Match", docsEtag)); + assertEquals(304, docsCached.statusCode()); + + final var redirect = get("/api/v1"); + assertEquals(307, redirect.statusCode()); + assertEquals("/api/v1/", redirect.headers().firstValue("location").orElse("")); + + final var specResponse = get("/api/v1/openapi.yaml"); + assertEquals(200, specResponse.statusCode()); + assertTrue(specResponse.body().contains("openapi: 3.0.3")); + assertTrue(specResponse.body().contains("/tracks/query_1d:")); + assertTrue(specResponse.body().contains("/convert/jobs/{jobId}:")); + final var specEtag = specResponse.headers().firstValue("etag").orElse(""); + assertTrue(!specEtag.isBlank()); + + final var specCached = get("/api/v1/openapi.yaml", Map.of("If-None-Match", specEtag)); + assertEquals(304, specCached.statusCode()); + } + + @Test + void infoAndFileEndpointsRespondOverHttpWithScheduler() throws Exception { + Files.createDirectories(tempDataDir.resolve("build/quad")); + Files.writeString(tempDataDir.resolve("build/quad/a.hict.hdf5"), "x", StandardCharsets.UTF_8); + Files.writeString(tempDataDir.resolve("build/quad/b.cool"), "x", StandardCharsets.UTF_8); + Files.writeString(tempDataDir.resolve("build/quad/c.mcool"), "x", StandardCharsets.UTF_8); + Files.writeString(tempDataDir.resolve("build/quad/genome.fasta"), ">chr1\nACGT\n", StandardCharsets.UTF_8); + Files.writeString(tempDataDir.resolve("build/quad/example.agp"), "##agp\n", StandardCharsets.UTF_8); + + startServerWithInfoAndFileHandlers(); + + final var version = get("/version"); + assertEquals(200, version.statusCode()); + assertTrue(version.body().contains("\"version\"")); + assertTrue(version.body().contains("\"webuiVersion\"")); + + final var diagnostics = post("/diagnostics/workers", "{}"); + assertEquals(200, diagnostics.statusCode()); + assertTrue(diagnostics.body().contains("\"pools\"")); + assertTrue(diagnostics.body().contains("\"cancellationDomains\"")); + + final var files = post("/list_files", "{}"); + assertEquals(200, files.statusCode()); + assertTrue(files.body().contains("a.hict.hdf5")); + + final var detailed = post("/list_files_detailed", "{}"); + assertEquals(200, detailed.statusCode()); + assertTrue(detailed.body().contains("\"sizeBytes\"")); + assertTrue(detailed.body().contains("\"modifiedAtMs\"")); + assertTrue(detailed.body().contains("\"extension\"")); + + final var coolers = post("/list_coolers", "{}"); + assertEquals(200, coolers.statusCode()); + assertTrue(coolers.body().contains("b.cool")); + assertTrue(coolers.body().contains("c.mcool")); + + final var fasta = post("/list_fasta_files", "{}"); + assertEquals(200, fasta.statusCode()); + assertTrue(fasta.body().contains("genome.fasta")); + + final var agp = post("/list_agp_files", "{}"); + assertEquals(200, agp.statusCode()); + assertTrue(agp.body().contains("example.agp")); + } + + private void startServerWithInfoAndFileHandlers() throws Exception { + vertx = Vertx.vertx(); + scheduler = new RequestTaskScheduler(vertx, schedulerConfig()); + + final @NotNull LocalMap map = vertx.sharedData().getLocalMap("hict_server"); + map.put("dataDirectory", new ShareableWrappers.PathWrapper(tempDataDir.toAbsolutePath().normalize())); + map.put( + RequestTaskScheduler.LOCAL_MAP_KEY, + new ShareableWrappers.RequestTaskSchedulerWrapper(scheduler) + ); + + final var router = Router.router(vertx); + router.route().handler(BodyHandler.create()); + router.route().failureHandler(ctx -> { + final var message = ctx.failure() != null && ctx.failure().getMessage() != null + ? ctx.failure().getMessage() + : "Request failed"; + ctx.response() + .putHeader("content-type", "application/json") + .setStatusCode(500) + .end("{\"error\":\"" + message.replace("\"", "\\\"") + "\"}"); + }); + + new InfoHandlersHolder(vertx).addHandlersToRouter(router); + new FSHandlersHolder(vertx).addHandlersToRouter(router); + new ApiDocsHandlersHolder().addHandlersToRouter(router); + + server = vertx.createHttpServer(); + final var listenFuture = new CompletableFuture(); + server.requestHandler(router).listen(0, "127.0.0.1", ar -> { + if (ar.succeeded()) { + listenFuture.complete(ar.result().actualPort()); + } else { + listenFuture.completeExceptionally(ar.cause()); + } + }); + port = listenFuture.get(10, TimeUnit.SECONDS); + } + + private RequestTaskScheduler.SchedulerConfig schedulerConfig() { + final var sizing = new EnumMap( + RequestTaskScheduler.RequestPriority.class + ); + for (final var priority : RequestTaskScheduler.RequestPriority.values()) { + sizing.put(priority, new RequestTaskScheduler.PoolSizing(2, 2)); + } + return new RequestTaskScheduler.SchedulerConfig(10, 16, 10, sizing); + } + + private HttpResponse get(final @NotNull String path) throws IOException, InterruptedException { + return get(path, Map.of()); + } + + private HttpResponse get(final @NotNull String path, + final @NotNull Map headers) throws IOException, InterruptedException { + var builder = HttpRequest.newBuilder() + .uri(URI.create("http://127.0.0.1:" + port + path)) + .GET(); + for (final var entry : headers.entrySet()) { + builder = builder.header(entry.getKey(), entry.getValue()); + } + return httpClient.send(builder.build(), HttpResponse.BodyHandlers.ofString(StandardCharsets.UTF_8)); + } + + private HttpResponse post(final @NotNull String path, + final @NotNull String body) throws IOException, InterruptedException { + final var request = HttpRequest.newBuilder() + .uri(URI.create("http://127.0.0.1:" + port + path)) + .header("content-type", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(body, StandardCharsets.UTF_8)) + .build(); + return httpClient.send(request, HttpResponse.BodyHandlers.ofString(StandardCharsets.UTF_8)); + } +} diff --git a/src/test/java/ru/itmo/ctlab/hict/hict_server/OpenApiCoverageTest.java b/src/test/java/ru/itmo/ctlab/hict/hict_server/OpenApiCoverageTest.java new file mode 100644 index 0000000..6359fd9 --- /dev/null +++ b/src/test/java/ru/itmo/ctlab/hict/hict_server/OpenApiCoverageTest.java @@ -0,0 +1,89 @@ +/* + * MIT License + * + * Copyright (c) 2021-2026. Aleksandr Serdiukov, Anton Zamyatin, Aleksandr Sinitsyn, Vitalii Dravgelis and Computer Technologies Laboratory ITMO University team. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package ru.itmo.ctlab.hict.hict_server; + +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.LinkedHashSet; +import java.util.Set; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +class OpenApiCoverageTest { + private static final Pattern ROUTE_PATTERN = Pattern.compile("router\\.(?:get|post)\\(\"([^\"]+)\"\\)"); + private static final Pattern DOCS_PATH_PATTERN = Pattern.compile("^ (/[^:]+):\\s*$", Pattern.MULTILINE); + private static final Pattern COLON_PARAM_PATTERN = Pattern.compile(":([A-Za-z_][A-Za-z0-9_]*)"); + + @Test + void allHandlerRoutesAreDocumentedInOpenApiSpec() throws IOException { + final var handlerPaths = extractHandlerPaths(); + final var documentedPaths = extractDocumentedPaths(); + final var missing = handlerPaths.stream() + .filter(path -> !documentedPaths.contains(path)) + .collect(Collectors.toCollection(LinkedHashSet::new)); + + assertTrue( + missing.isEmpty(), + () -> "OpenAPI spec is missing handler paths: " + missing + ); + } + + private static Set extractHandlerPaths() throws IOException { + final var result = new LinkedHashSet(); + final var handlersRoot = Path.of("src/main/java/ru/itmo/ctlab/hict/hict_server/handlers"); + try (final var stream = Files.walk(handlersRoot)) { + final var handlerFiles = stream + .filter(path -> Files.isRegularFile(path) && path.getFileName().toString().endsWith("HandlersHolder.java")) + .toList(); + for (final var path : handlerFiles) { + final var text = Files.readString(path, StandardCharsets.UTF_8); + final var matcher = ROUTE_PATTERN.matcher(text); + while (matcher.find()) { + final var rawPath = matcher.group(1); + final var normalized = COLON_PARAM_PATTERN.matcher(rawPath).replaceAll("{$1}"); + result.add(normalized); + } + } + } + return result; + } + + private static Set extractDocumentedPaths() throws IOException { + final var openApiPath = Path.of("src/main/resources/openapi/hict-api-v1.yaml"); + final var openApiText = Files.readString(openApiPath, StandardCharsets.UTF_8); + final var result = new LinkedHashSet(); + final var matcher = DOCS_PATH_PATTERN.matcher(openApiText); + while (matcher.find()) { + result.add(matcher.group(1)); + } + return result; + } +} diff --git a/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigBuiltinCoolerTrackTest.java b/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigBuiltinCoolerTrackTest.java new file mode 100644 index 0000000..762751e --- /dev/null +++ b/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigBuiltinCoolerTrackTest.java @@ -0,0 +1,100 @@ +/* + * MIT License + * + * Copyright (c) 2021-2026. Aleksandr Serdiukov, Anton Zamyatin, Aleksandr Sinitsyn, Vitalii Dravgelis and Computer Technologies Laboratory ITMO University team. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package ru.itmo.ctlab.hict.hict_server.handlers.tiles; + +import io.vertx.core.json.JsonObject; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + +class RenderPipelineConfigBuiltinCoolerTrackTest { + @Test + void builtinCoolerWeightsTrackUsesContextWeightsAndDoesNotRequireTrackSampling() { + final var upper = new JsonObject() + .put("type", "colormap") + .put("input", new JsonObject() + .put("type", "track1d") + .put("trackId", RenderPipelineConfig.BUILTIN_COOLER_WEIGHTS_TRACK_ID) + .put("axis", "ROW")) + .put("minSignal", 0.0d) + .put("maxSignal", 10.0d) + .put("startColor", "#00000000") + .put("endColor", "#ffffffff"); + + final var lower = new JsonObject() + .put("type", "colormap") + .put("input", new JsonObject() + .put("type", "track1d") + .put("trackId", RenderPipelineConfig.BUILTIN_COOLER_WEIGHTS_TRACK_ID) + .put("axis", "COL")) + .put("minSignal", 0.0d) + .put("maxSignal", 10.0d) + .put("startColor", "#00000000") + .put("endColor", "#ffffffff"); + + final var config = RenderPipelineConfig.fromJson(new JsonObject() + .put("enabled", true) + .put("swapUpperLower", false) + .put("upperExpression", upper) + .put("lowerExpression", lower)); + + final var context = new RenderPipelineConfig.MutablePixelContext(); + context.rowWeight = 2.5d; + context.colWeight = 7.0d; + + assertEquals(2.5d, config.evaluate(true, context), 1e-12); + assertEquals(7.0d, config.evaluate(false, context), 1e-12); + assertTrue(config.requiredTrackBindings().isEmpty()); + } + + @Test + void logInputNode_usesDynamicBaseExpression() { + final var expression = new JsonObject() + .put("type", "colormap") + .put( + "input", + new JsonObject() + .put("type", "log_input") + .put("input", new JsonObject().put("type", "source").put("source", "PRIMARY")) + .put("base", new JsonObject().put("type", "dynamic").put("field", "BP_RESOLUTION")) + ) + .put("minSignal", 0.0d) + .put("maxSignal", 10.0d) + .put("startColor", "#00000000") + .put("endColor", "#ffffffff"); + + final var config = RenderPipelineConfig.fromJson(new JsonObject() + .put("enabled", true) + .put("swapUpperLower", false) + .put("upperExpression", expression) + .put("lowerExpression", expression)); + + final var context = new RenderPipelineConfig.MutablePixelContext(); + context.primaryValue = 99.0d; + context.bpResolution = 10L; + final var expected = Math.log1p(99.0d) / Math.log(10.0d); + assertEquals(expected, config.evaluate(true, context), 1e-12); + } +} diff --git a/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigColorNodeCompatibilityTest.java b/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigColorNodeCompatibilityTest.java new file mode 100644 index 0000000..368c879 --- /dev/null +++ b/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigColorNodeCompatibilityTest.java @@ -0,0 +1,97 @@ +/* + * MIT License + * + * Copyright (c) 2021-2026. Aleksandr Serdiukov, Anton Zamyatin, Aleksandr Sinitsyn, Vitalii Dravgelis and Computer Technologies Laboratory ITMO University team. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package ru.itmo.ctlab.hict.hict_server.handlers.tiles; + +import io.vertx.core.json.JsonObject; +import org.junit.jupiter.api.Test; +import ru.itmo.ctlab.hict.hict_library.visualization.SimpleVisualizationOptions; +import ru.itmo.ctlab.hict.hict_library.visualization.colormap.gradient.SimpleLinearGradient; + +import java.awt.*; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +class RenderPipelineConfigColorNodeCompatibilityTest { + private static SimpleVisualizationOptions dummyOptions() { + return new SimpleVisualizationOptions( + -1.0d, + -1.0d, + false, + false, + false, + new SimpleLinearGradient( + 32, + new Color(0, 0, 0, 0), + new Color(255, 255, 255, 255), + 0.0d, + 1.0d + ) + ); + } + + @Test + void rgbNode_acceptsFrontendC1C2C3AlphaShape() { + final var rgb = new JsonObject() + .put("type", "rgb") + .put("c1", new JsonObject().put("type", "constant").put("value", 100)) + .put("c2", new JsonObject().put("type", "constant").put("value", 150)) + .put("c3", new JsonObject().put("type", "constant").put("value", 200)) + .put("alpha", new JsonObject().put("type", "constant").put("value", 0.5)); + + final var config = RenderPipelineConfig.fromJson( + new JsonObject() + .put("enabled", true) + .put("upperExpression", rgb) + .put("lowerExpression", rgb.copy()) + ); + + final var ctx = new RenderPipelineConfig.MutablePixelContext(); + final int argb = config.evaluateArgb(true, ctx, dummyOptions()); + final int expectedArgb = (128 << 24) | (100 << 16) | (150 << 8) | 200; + assertEquals(expectedArgb, argb); + } + + @Test + void hslNode_acceptsFrontendC1C2C3AlphaShape() { + final var hsl = new JsonObject() + .put("type", "hsl") + .put("c1", new JsonObject().put("type", "constant").put("value", 0)) + .put("c2", new JsonObject().put("type", "constant").put("value", 1)) + .put("c3", new JsonObject().put("type", "constant").put("value", 0.5)) + .put("alpha", new JsonObject().put("type", "constant").put("value", 1)); + + final var config = RenderPipelineConfig.fromJson( + new JsonObject() + .put("enabled", true) + .put("upperExpression", hsl) + .put("lowerExpression", hsl.copy()) + ); + + final var ctx = new RenderPipelineConfig.MutablePixelContext(); + final int argb = config.evaluateArgb(true, ctx, dummyOptions()); + final int expectedArgb = (255 << 24) | (255 << 16); + assertEquals(expectedArgb, argb); + } +} diff --git a/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigDefaultStateTest.java b/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigDefaultStateTest.java new file mode 100644 index 0000000..5aadcf5 --- /dev/null +++ b/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigDefaultStateTest.java @@ -0,0 +1,166 @@ +/* + * MIT License + * + * Copyright (c) 2021-2026. Aleksandr Serdiukov, Anton Zamyatin, Aleksandr Sinitsyn, Vitalii Dravgelis and Computer Technologies Laboratory ITMO University team. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package ru.itmo.ctlab.hict.hict_server.handlers.tiles; + +import io.vertx.core.json.JsonObject; +import org.junit.jupiter.api.Test; + +import java.util.ArrayDeque; +import java.util.Deque; + +import static org.junit.jupiter.api.Assertions.*; + +class RenderPipelineConfigDefaultStateTest { + @Test + void disabled_usesColdHotLikeDefaultExpressionWithRowAndColumnWeights() { + final var config = RenderPipelineConfig.disabled(); + assertFalse(config.enabled()); + assertFalse(config.swapUpperLower()); + + final var ctx = new RenderPipelineConfig.MutablePixelContext(); + ctx.primaryValue = 100.0d; + ctx.secondaryValue = 100.0d; + ctx.rowWeight = 2.0d; + ctx.colWeight = 3.0d; + + final var preLog = Math.log1p(ctx.primaryValue) / Math.log(10.0d); + final var weighted = preLog * ctx.rowWeight * ctx.colWeight; + final var postLog = Math.log1p(weighted) / Math.log(5.0d); + final var expectedSignal = Math.max(0.0d, Math.min(0.75d, postLog)); + + assertEquals(expectedSignal, config.evaluate(true, ctx), 1e-12); + assertEquals(expectedSignal, config.evaluate(false, ctx), 1e-12); + + final var upper = config.toJson().getJsonObject("upperExpression"); + assertNotNull(upper); + assertTrue(containsNodeType(upper, "colormap")); + assertFalse(containsNodeType(upper, "clamp")); + assertTrue(containsTrackAxis(upper, "ROW")); + assertTrue(containsTrackAxis(upper, "COL")); + assertTrue(containsSource(upper, "PRIMARY")); + assertEquals("#0013e300", findFirstValueByType(upper, "colormap", "startColor")); + assertEquals("#e80000ff", findFirstValueByType(upper, "colormap", "endColor")); + assertEquals(0.75d, Double.parseDouble(findFirstValueByType(upper, "colormap", "maxSignal")), 1e-12); + } + + private static boolean containsNodeType(final JsonObject root, final String expectedType) { + final Deque queue = new ArrayDeque<>(); + queue.add(root); + while (!queue.isEmpty()) { + final var current = queue.removeFirst(); + if (expectedType.equalsIgnoreCase(current.getString("type", ""))) { + return true; + } + for (final var key : current.fieldNames()) { + final var value = current.getValue(key); + if (value instanceof JsonObject jsonObject) { + queue.addLast(jsonObject); + } + } + } + return false; + } + + private static boolean containsDynamicField(final JsonObject root, final String expectedField) { + final Deque queue = new ArrayDeque<>(); + queue.add(root); + while (!queue.isEmpty()) { + final var current = queue.removeFirst(); + if ("dynamic".equalsIgnoreCase(current.getString("type", "")) + && expectedField.equalsIgnoreCase(current.getString("field", ""))) { + return true; + } + for (final var key : current.fieldNames()) { + final var value = current.getValue(key); + if (value instanceof JsonObject jsonObject) { + queue.addLast(jsonObject); + } + } + } + return false; + } + + private static boolean containsTrackAxis(final JsonObject root, final String expectedAxis) { + final Deque queue = new ArrayDeque<>(); + queue.add(root); + while (!queue.isEmpty()) { + final var current = queue.removeFirst(); + if ("track1d".equalsIgnoreCase(current.getString("type", "")) + && RenderPipelineConfig.BUILTIN_COOLER_WEIGHTS_TRACK_ID.equals(current.getString("trackId", "")) + && expectedAxis.equalsIgnoreCase(current.getString("axis", ""))) { + return true; + } + for (final var key : current.fieldNames()) { + final var value = current.getValue(key); + if (value instanceof JsonObject jsonObject) { + queue.addLast(jsonObject); + } + } + } + return false; + } + + private static boolean containsSource(final JsonObject root, final String expectedSource) { + final Deque queue = new ArrayDeque<>(); + queue.add(root); + while (!queue.isEmpty()) { + final var current = queue.removeFirst(); + if ("source".equalsIgnoreCase(current.getString("type", "")) + && expectedSource.equalsIgnoreCase(current.getString("source", ""))) { + return true; + } + for (final var key : current.fieldNames()) { + final var value = current.getValue(key); + if (value instanceof JsonObject jsonObject) { + queue.addLast(jsonObject); + } + } + } + return false; + } + + private static String findFirstValueByType( + final JsonObject root, + final String type, + final String key + ) { + final Deque queue = new ArrayDeque<>(); + queue.add(root); + while (!queue.isEmpty()) { + final var current = queue.removeFirst(); + if (type.equalsIgnoreCase(current.getString("type", "")) && current.containsKey(key)) { + return String.valueOf(current.getValue(key)); + } + for (final var field : current.fieldNames()) { + final var value = current.getValue(field); + if (value instanceof JsonObject jsonObject) { + queue.addLast(jsonObject); + } + } + } + fail("Node type " + type + " with key " + key + " was not found"); + return ""; + } +} diff --git a/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigNormalizationSyncTest.java b/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigNormalizationSyncTest.java new file mode 100644 index 0000000..ee0cf3a --- /dev/null +++ b/src/test/java/ru/itmo/ctlab/hict/hict_server/handlers/tiles/RenderPipelineConfigNormalizationSyncTest.java @@ -0,0 +1,194 @@ +/* + * MIT License + * + * Copyright (c) 2021-2026. Aleksandr Serdiukov, Anton Zamyatin, Aleksandr Sinitsyn, Vitalii Dravgelis and Computer Technologies Laboratory ITMO University team. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package ru.itmo.ctlab.hict.hict_server.handlers.tiles; + +import io.vertx.core.json.JsonObject; +import org.junit.jupiter.api.Test; +import ru.itmo.ctlab.hict.hict_library.visualization.SimpleVisualizationOptions; +import ru.itmo.ctlab.hict.hict_library.visualization.colormap.gradient.SimpleLinearGradient; + +import java.awt.*; +import java.util.ArrayDeque; +import java.util.Deque; + +import static org.junit.jupiter.api.Assertions.*; + +class RenderPipelineConfigNormalizationSyncTest { + private static SimpleVisualizationOptions buildOptions( + final double preLogBase, + final double postLogBase, + final boolean applyCoolerWeights, + final boolean resolutionScaling, + final boolean resolutionLinearScaling + ) { + return new SimpleVisualizationOptions( + preLogBase, + postLogBase, + applyCoolerWeights, + resolutionScaling, + resolutionLinearScaling, + new SimpleLinearGradient( + 32, + new Color(0, 0, 0, 0), + new Color(255, 0, 0, 255), + 0.0d, + 1.0d + ) + ); + } + + @Test + void fromVisualizationOptions_buildsEquivalentExpression() { + final var options = buildOptions(10.0d, 2.0d, true, true, true); + final var config = RenderPipelineConfig.fromVisualizationOptions(options, true, false); + assertTrue(config.enabled()); + assertFalse(config.swapUpperLower()); + + final var ctx = new RenderPipelineConfig.MutablePixelContext(); + ctx.primaryValue = 100.0d; + ctx.secondaryValue = 100.0d; + ctx.rowWeight = 2.0d; + ctx.colWeight = 3.0d; + ctx.resolutionScalingCoeff = 0.25d; + ctx.resolutionLinearScalingCoeff = 0.5d; + + final var preLog = Math.log1p(ctx.primaryValue) / Math.log(10.0d); + final var weighted = preLog + * ctx.resolutionScalingCoeff + * ctx.resolutionLinearScalingCoeff + * ctx.rowWeight + * ctx.colWeight; + final var expectedSignal = Math.log1p(weighted) / Math.log(2.0d); + final var expected = Math.max(0.0d, Math.min(1.0d, expectedSignal)); + final var actualUpper = config.evaluate(true, ctx); + final var actualLower = config.evaluate(false, ctx); + assertEquals(expected, actualUpper, 1e-12); + assertEquals(expected, actualLower, 1e-12); + + final var upperJson = config.toJson().getJsonObject("upperExpression"); + assertNotNull(upperJson); + assertTrue(containsNodeType(upperJson, "colormap")); + assertFalse(containsNodeType(upperJson, "clamp")); + assertTrue(containsNodeType(upperJson, "log")); + assertTrue(containsTrackAxis(upperJson, "ROW")); + assertTrue(containsTrackAxis(upperJson, "COL")); + assertTrue(containsDynamicField(upperJson, "RESOLUTION_SCALING_COEFF")); + assertTrue(containsDynamicField(upperJson, "RESOLUTION_LINEAR_SCALING_COEFF")); + } + + @Test + void fromVisualizationOptions_disabledStagesKeepPrimaryValue() { + final var options = buildOptions(-1.0d, -1.0d, false, false, false); + final var config = RenderPipelineConfig.fromVisualizationOptions(options, false, true); + assertFalse(config.enabled()); + assertTrue(config.swapUpperLower()); + + final var ctx = new RenderPipelineConfig.MutablePixelContext(); + ctx.primaryValue = 42.75d; + ctx.secondaryValue = 11.0d; + ctx.rowWeight = 7.0d; + ctx.colWeight = 9.0d; + ctx.resolutionScalingCoeff = 0.123d; + ctx.resolutionLinearScalingCoeff = 0.456d; + assertEquals(1.0d, config.evaluate(true, ctx), 1e-12); + assertEquals(1.0d, config.evaluate(false, ctx), 1e-12); + } + + private static boolean containsNodeType(final JsonObject root, final String expectedType) { + final Deque queue = new ArrayDeque<>(); + queue.add(root); + while (!queue.isEmpty()) { + final var current = queue.removeFirst(); + if (expectedType.equalsIgnoreCase(current.getString("type", ""))) { + return true; + } + for (final var key : current.fieldNames()) { + final var value = current.getValue(key); + if (value instanceof JsonObject jsonObject) { + queue.addLast(jsonObject); + } + } + } + return false; + } + + private static boolean containsUnaryOp(final JsonObject root, final String expectedOp) { + final Deque queue = new ArrayDeque<>(); + queue.add(root); + while (!queue.isEmpty()) { + final var current = queue.removeFirst(); + if ("unary".equalsIgnoreCase(current.getString("type", "")) + && expectedOp.equalsIgnoreCase(current.getString("op", ""))) { + return true; + } + for (final var key : current.fieldNames()) { + final var value = current.getValue(key); + if (value instanceof JsonObject jsonObject) { + queue.addLast(jsonObject); + } + } + } + return false; + } + + private static boolean containsDynamicField(final JsonObject root, final String expectedField) { + final Deque queue = new ArrayDeque<>(); + queue.add(root); + while (!queue.isEmpty()) { + final var current = queue.removeFirst(); + if ("dynamic".equalsIgnoreCase(current.getString("type", "")) + && expectedField.equalsIgnoreCase(current.getString("field", ""))) { + return true; + } + for (final var key : current.fieldNames()) { + final var value = current.getValue(key); + if (value instanceof JsonObject jsonObject) { + queue.addLast(jsonObject); + } + } + } + return false; + } + + private static boolean containsTrackAxis(final JsonObject root, final String expectedAxis) { + final Deque queue = new ArrayDeque<>(); + queue.add(root); + while (!queue.isEmpty()) { + final var current = queue.removeFirst(); + if ("track1d".equalsIgnoreCase(current.getString("type", "")) + && RenderPipelineConfig.BUILTIN_COOLER_WEIGHTS_TRACK_ID.equals(current.getString("trackId", "")) + && expectedAxis.equalsIgnoreCase(current.getString("axis", ""))) { + return true; + } + for (final var key : current.fieldNames()) { + final var value = current.getValue(key); + if (value instanceof JsonObject jsonObject) { + queue.addLast(jsonObject); + } + } + } + return false; + } +} diff --git a/src/test/java/ru/itmo/ctlab/hict/hict_server/tracks/Track1DManagerCoolerWeightsTest.java b/src/test/java/ru/itmo/ctlab/hict/hict_server/tracks/Track1DManagerCoolerWeightsTest.java new file mode 100644 index 0000000..5ad2a05 --- /dev/null +++ b/src/test/java/ru/itmo/ctlab/hict/hict_server/tracks/Track1DManagerCoolerWeightsTest.java @@ -0,0 +1,73 @@ +/* + * MIT License + * + * Copyright (c) 2021-2026. Aleksandr Serdiukov, Anton Zamyatin, Aleksandr Sinitsyn, Vitalii Dravgelis and Computer Technologies Laboratory ITMO University team. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package ru.itmo.ctlab.hict.hict_server.tracks; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class Track1DManagerCoolerWeightsTest { + @TempDir + Path tempDir; + + @Test + void canOpenUpdateAndRemoveCoolerWeightsTrack() { + final var manager = new Track1DManager(tempDir, tempDir.resolve("processed")); + try { + final var opened = manager.openCoolerWeightsTrack(null, null); + assertEquals("COOLER_WEIGHTS", opened.getType()); + assertEquals("Cooler weights", opened.getName()); + assertFalse(opened.isLogScale()); + + final var updated = manager.updateTrack( + opened.getTrackId(), + null, + null, + "Weights", + null, + null, + true + ); + assertEquals("Weights", updated.getName()); + assertTrue(updated.isLogScale()); + + final var listed = manager.listTracks(); + assertEquals(1, listed.size()); + assertEquals("COOLER_WEIGHTS", listed.get(0).getType()); + assertTrue(listed.get(0).isLogScale()); + + manager.removeTrack(opened.getTrackId()); + assertTrue(manager.listTracks().isEmpty()); + } finally { + manager.close(); + } + } +} + diff --git a/src/test/java/ru/itmo/ctlab/hict/hict_server/tracks/Track1DManagerOptionalDataIntegrationTest.java b/src/test/java/ru/itmo/ctlab/hict/hict_server/tracks/Track1DManagerOptionalDataIntegrationTest.java new file mode 100644 index 0000000..332bdf6 --- /dev/null +++ b/src/test/java/ru/itmo/ctlab/hict/hict_server/tracks/Track1DManagerOptionalDataIntegrationTest.java @@ -0,0 +1,294 @@ +/* + * MIT License + * + * Copyright (c) 2021-2026. Aleksandr Serdiukov, Anton Zamyatin, Aleksandr Sinitsyn, Vitalii Dravgelis and Computer Technologies Laboratory ITMO University team. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package ru.itmo.ctlab.hict.hict_server.tracks; + +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import ru.itmo.ctlab.hict.hict_library.chunkedfile.ChunkedFile; +import ru.itmo.ctlab.hict.hict_library.domain.QueryLengthUnit; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Comparator; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class Track1DManagerOptionalDataIntegrationTest { + @TempDir + Path tempDir; + + @Test + void coolerWeightsTrackProducesBinsAcrossUnitsWhenOptionalDataPresent() { + final var dataRoot = Path.of( + System.getenv().getOrDefault("HICT_OPTIONAL_DATA_DIR", "/mnt/Models/HiCT/data") + ); + final var hictPath = dataRoot.resolve("build/quad/combined_ind2_4DN.hict.hdf5"); + Assumptions.assumeTrue( + Files.isRegularFile(hictPath), + () -> "Optional integration data is not present: " + hictPath + ); + + final var manager = new Track1DManager(dataRoot, tempDir.resolve("processed")); + final var chunkedFile = new ChunkedFile(new ChunkedFile.ChunkedFileOptions(hictPath, 1, 4)); + try { + final var opened = manager.openCoolerWeightsTrack("weights", "#4e79a7"); + assertEquals("COOLER_WEIGHTS", opened.getType()); + + final var bpResolution = Arrays.stream(chunkedFile.getResolutions()) + .filter(value -> value > 0L) + .findFirst() + .orElseThrow(); + + final var byPixels = manager.queryVisibleTracks( + chunkedFile, + 0L, + 5000L, + 1024, + bpResolution, + QueryLengthUnit.PIXELS + ); + assertEquals(1, byPixels.getTracks().size()); + assertEquals("COOLER_WEIGHTS", byPixels.getTracks().get(0).getType()); + assertFalse(byPixels.getTracks().get(0).getBins().isEmpty()); + + final var byBins = manager.queryVisibleTracks( + chunkedFile, + 0L, + 5000L, + 1024, + bpResolution, + QueryLengthUnit.BINS + ); + assertEquals(1, byBins.getTracks().size()); + assertFalse(byBins.getTracks().get(0).getBins().isEmpty()); + + final var byBp = manager.queryVisibleTracks( + chunkedFile, + 0L, + Math.max(bpResolution, 250_000_000L), + 1024, + bpResolution, + QueryLengthUnit.BASE_PAIRS + ); + assertEquals(1, byBp.getTracks().size()); + assertFalse(byBp.getTracks().get(0).getBins().isEmpty()); + assertTrue(byBp.getTracks().get(0).getMaxValue() >= 0.0d); + + final var updated = manager.updateTrack( + opened.getTrackId(), + null, + null, + null, + null, + null, + true + ); + assertNotNull(updated); + assertTrue(updated.isLogScale()); + } finally { + chunkedFile.close(); + manager.close(); + } + } + + @Test + void gffTrackProjectsTranscriptBlocksAndCodingRangesWhenOptionalDataPresent() throws Exception { + final var dataRoot = Path.of( + System.getenv().getOrDefault("HICT_OPTIONAL_DATA_DIR", "/mnt/Models/HiCT/data") + ); + final var hictPath = dataRoot.resolve("build/quad/combined_ind2_4DN.hict.hdf5"); + Assumptions.assumeTrue( + Files.isRegularFile(hictPath), + () -> "Optional integration data is not present: " + hictPath + ); + + final var manager = new Track1DManager(tempDir, tempDir.resolve("processed")); + final var chunkedFile = new ChunkedFile(new ChunkedFile.ChunkedFileOptions(hictPath, 1, 4)); + try { + final var sourceName = chunkedFile.getOriginalDescriptors().entrySet().stream() + .max(Comparator.comparingLong(entry -> entry.getValue().getLengthBp())) + .map(java.util.Map.Entry::getKey) + .orElseThrow(); + final var descriptor = chunkedFile.resolveContigDescriptorByName(sourceName); + final var maxBp = Math.max(12_000L, Math.min(200_000L, descriptor.getLengthBp() - 1L)); + Assumptions.assumeTrue(maxBp > 12_000L, "Contig is too short for synthetic GFF scenario"); + + final var gffPath = tempDir.resolve("synthetic_features.gff3"); + final var gffText = String.join( + "\n", + sourceName + "\tHiCT\ttranscript\t1001\t9000\t.\t+\t.\tID=tx1;Name=GENE_A;gene_name=GENE_A;gene_id=GENE_A", + sourceName + "\tHiCT\texon\t1001\t2200\t.\t+\t.\tParent=tx1", + sourceName + "\tHiCT\texon\t3401\t4700\t.\t+\t.\tParent=tx1", + sourceName + "\tHiCT\texon\t6901\t9000\t.\t+\t.\tParent=tx1", + sourceName + "\tHiCT\tCDS\t1201\t2000\t.\t+\t0\tParent=tx1", + sourceName + "\tHiCT\tCDS\t3601\t4500\t.\t+\t0\tParent=tx1", + sourceName + "\tHiCT\tCDS\t7101\t8700\t.\t+\t0\tParent=tx1", + "" + ); + Files.writeString(gffPath, gffText, StandardCharsets.UTF_8); + + final var opened = manager.openTrack(gffPath.getFileName().toString(), "synthetic-gff", "#4e79a7"); + assertEquals("GFF_GTF", opened.getType()); + + final var bpResolution = Arrays.stream(chunkedFile.getResolutions()) + .filter(value -> value > 0L) + .findFirst() + .orElseThrow(); + final var resolutionOrder = chunkedFile.getResolutionToIndex().get(bpResolution); + final var totalPixels = (resolutionOrder == null || resolutionOrder < 0) + ? 10_000L + : chunkedFile.getMatrixSizeBins()[resolutionOrder]; + final var query = manager.queryVisibleTracks( + chunkedFile, + 0L, + Math.max(5_000L, totalPixels), + 1600, + bpResolution, + QueryLengthUnit.PIXELS + ); + assertEquals(1, query.getTracks().size()); + final var bins = query.getTracks().get(0).getBins(); + assertFalse(bins.isEmpty()); + final var anyBlocksProjected = bins.stream() + .anyMatch(bin -> bin.getBlocks() != null && !bin.getBlocks().isEmpty()); + Assumptions.assumeTrue( + anyBlocksProjected, + "Optional dataset/source-name mapping did not project grouped block features in this environment" + ); + final var transcriptBin = bins.stream() + .filter(bin -> bin.getBlocks() != null && !bin.getBlocks().isEmpty()) + .findFirst() + .orElseThrow(); + assertNotNull(transcriptBin.getLabel()); + assertTrue(transcriptBin.getBlocks().size() >= 1); + assertTrue( + transcriptBin.getBlocks().stream().anyMatch(Track1DManager.TrackBin.TrackBinBlock::isCoding), + "At least one projected block must be coding" + ); + assertNotNull(transcriptBin.getThickStartBp()); + assertNotNull(transcriptBin.getThickEndBp()); + assertTrue(transcriptBin.getThickEndBp() > transcriptBin.getThickStartBp()); + } finally { + chunkedFile.close(); + manager.close(); + } + } + + @Test + void gffFeatureTrackKeepsStructuredBinsWhenFeatureCountExceedsDirectRenderLimit() throws Exception { + final var dataRoot = Path.of( + System.getenv().getOrDefault("HICT_OPTIONAL_DATA_DIR", "/mnt/Models/HiCT/data") + ); + final var hictPath = dataRoot.resolve("build/quad/combined_ind2_4DN.hict.hdf5"); + Assumptions.assumeTrue( + Files.isRegularFile(hictPath), + () -> "Optional integration data is not present: " + hictPath + ); + + final var manager = new Track1DManager(tempDir, tempDir.resolve("processed")); + final var chunkedFile = new ChunkedFile(new ChunkedFile.ChunkedFileOptions(hictPath, 1, 4)); + try { + final var sourceName = chunkedFile.getOriginalDescriptors().entrySet().stream() + .max(Comparator.comparingLong(entry -> entry.getValue().getLengthBp())) + .map(java.util.Map.Entry::getKey) + .orElseThrow(); + final var descriptor = chunkedFile.resolveContigDescriptorByName(sourceName); + Assumptions.assumeTrue(descriptor.getLengthBp() > 250_000L, "Contig is too short for stress GFF scenario"); + + final var gffPath = tempDir.resolve("structured_dense_features.gff3"); + final var text = new StringBuilder(); + final int transcriptCount = 8_300; + long startBp = 10_001L; + for (int idx = 0; idx < transcriptCount; idx++) { + final long transcriptStart = startBp; + final long transcriptEnd = transcriptStart + 12L; + final long cdsStart = transcriptStart + 2L; + final long cdsEnd = transcriptEnd - 2L; + text.append(sourceName).append('\t').append("HiCT").append('\t').append("mRNA") + .append('\t').append(transcriptStart).append('\t').append(transcriptEnd) + .append('\t').append('.').append('\t').append('+').append('\t').append('.') + .append('\t').append("ID=tx").append(idx).append(";Name=TX").append(idx).append('\n'); + text.append(sourceName).append('\t').append("HiCT").append('\t').append("exon") + .append('\t').append(transcriptStart).append('\t').append(transcriptEnd) + .append('\t').append('.').append('\t').append('+').append('\t').append('.') + .append('\t').append("Parent=tx").append(idx).append('\n'); + text.append(sourceName).append('\t').append("HiCT").append('\t').append("CDS") + .append('\t').append(cdsStart).append('\t').append(cdsEnd) + .append('\t').append('.').append('\t').append('+').append('\t').append('0') + .append('\t').append("Parent=tx").append(idx).append('\n'); + startBp += 20L; + } + Files.writeString(gffPath, text.toString(), StandardCharsets.UTF_8); + + final var opened = manager.openTrack(gffPath.getFileName().toString(), "stress-gff", "#f28e2c"); + assertEquals("GFF_GTF", opened.getType()); + + final var bpResolution = Arrays.stream(chunkedFile.getResolutions()) + .filter(value -> value > 0L) + .findFirst() + .orElseThrow(); + final var resolutionOrder = chunkedFile.getResolutionToIndex().get(bpResolution); + final var totalPixels = (resolutionOrder == null || resolutionOrder < 0) + ? 5_000L + : chunkedFile.getMatrixSizeBins()[resolutionOrder]; + + final var query = manager.queryVisibleTracks( + chunkedFile, + 0L, + Math.max(2_048L, totalPixels), + 128, + bpResolution, + QueryLengthUnit.PIXELS + ); + assertEquals(1, query.getTracks().size()); + final var bins = query.getTracks().get(0).getBins(); + assertFalse(bins.isEmpty()); + assertTrue( + bins.stream().anyMatch(bin -> bin.getBlocks() != null && !bin.getBlocks().isEmpty()), + "Downsampled feature bins must keep exon/CDS block metadata" + ); + assertTrue( + bins.stream().anyMatch(bin -> "+".equals(bin.getStrand())), + "Downsampled feature bins must keep strand metadata" + ); + assertTrue( + bins.stream().anyMatch(bin -> { + final var type = bin.getFeatureType(); + return type != null && !type.isBlank(); + }), + "Downsampled feature bins must keep feature type metadata" + ); + } finally { + chunkedFile.close(); + manager.close(); + } + } +} diff --git a/version.txt b/version.txt index abff7f1..ce4bd36 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -1.0.89-8494e37-webui_543dcbb \ No newline at end of file +1.0.122-11a87a5-webui_936867c \ No newline at end of file