diff --git a/packages/dicomweb-serve/app.js b/packages/dicomweb-serve/app.js new file mode 100644 index 00000000..662bcc92 --- /dev/null +++ b/packages/dicomweb-serve/app.js @@ -0,0 +1,41 @@ +var createError = require('http-errors'); +var express = require('express'); +var path = require('path'); +var cookieParser = require('cookie-parser'); +var logger = require('morgan'); + +var indexRouter = require('./routes/index'); +var usersRouter = require('./routes/users'); + +var app = express(); + +// view engine setup +app.set('views', path.join(__dirname, 'views')); +app.set('view engine', 'jade'); + +app.use(logger('dev')); +app.use(express.json()); +app.use(express.urlencoded({ extended: false })); +app.use(cookieParser()); +app.use(express.static(path.join(__dirname, 'public'))); + +app.use('/', indexRouter); +app.use('/users', usersRouter); + +// catch 404 and forward to error handler +app.use(function(req, res, next) { + next(createError(404)); +}); + +// error handler +app.use(function(err, req, res, next) { + // set locals, only providing error in development + res.locals.message = err.message; + res.locals.error = req.app.get('env') === 'development' ? err : {}; + + // render the error page + res.status(err.status || 500); + res.render('error'); +}); + +module.exports = app; diff --git a/packages/dicomweb-serve/bin/www b/packages/dicomweb-serve/bin/www new file mode 100644 index 00000000..10c8d3c9 --- /dev/null +++ b/packages/dicomweb-serve/bin/www @@ -0,0 +1,90 @@ +#!/usr/bin/env node + +/** + * Module dependencies. + */ + +var app = require('../app'); +var debug = require('debug')('dicomweb-serve:server'); +var http = require('http'); + +/** + * Get port from environment and store in Express. + */ + +var port = normalizePort(process.env.PORT || '3000'); +app.set('port', port); + +/** + * Create HTTP server. + */ + +var server = http.createServer(app); + +/** + * Listen on provided port, on all network interfaces. + */ + +server.listen(port); +server.on('error', onError); +server.on('listening', onListening); + +/** + * Normalize a port into a number, string, or false. + */ + +function normalizePort(val) { + var port = parseInt(val, 10); + + if (isNaN(port)) { + // named pipe + return val; + } + + if (port >= 0) { + // port number + return port; + } + + return false; +} + +/** + * Event listener for HTTP server "error" event. + */ + +function onError(error) { + if (error.syscall !== 'listen') { + throw error; + } + + var bind = typeof port === 'string' + ? 'Pipe ' + port + : 'Port ' + port; + + // handle specific listen errors with friendly messages + switch (error.code) { + case 'EACCES': + console.error(bind + ' requires elevated privileges'); + process.exit(1); + break; + case 'EADDRINUSE': + console.error(bind + ' is already in use'); + process.exit(1); + break; + default: + throw error; + } +} + +/** + * Event listener for HTTP server "listening" event. + */ + +function onListening() { + var addr = server.address(); + var bind = typeof addr === 'string' + ? 'pipe ' + addr + : 'port ' + addr.port; + debug('Listening on ' + bind); +} diff --git a/packages/dicomweb-serve/package.json b/packages/dicomweb-serve/package.json new file mode 100644 index 00000000..721730e3 --- /dev/null +++ b/packages/dicomweb-serve/package.json @@ -0,0 +1,16 @@ +{ + "name": "dicomweb-serve", + "version": "0.0.0", + "private": true, + "scripts": { + "start": "node ./bin/www" + }, + "dependencies": { + "cookie-parser": "~1.4.4", + "debug": "~2.6.9", + "express": "~4.16.1", + "http-errors": "~1.6.3", + "jade": "~1.11.0", + "morgan": "~1.9.1" + } +} diff --git a/packages/dicomweb-serve/public/stylesheets/style.css b/packages/dicomweb-serve/public/stylesheets/style.css new file mode 100644 index 00000000..9453385b --- /dev/null +++ b/packages/dicomweb-serve/public/stylesheets/style.css @@ -0,0 +1,8 @@ +body { + padding: 50px; + font: 14px "Lucida Grande", Helvetica, Arial, sans-serif; +} + +a { + color: #00B7FF; +} diff --git a/packages/dicomweb-serve/routes/index.js b/packages/dicomweb-serve/routes/index.js new file mode 100644 index 00000000..ecca96a5 --- /dev/null +++ b/packages/dicomweb-serve/routes/index.js @@ -0,0 +1,9 @@ +var express = require('express'); +var router = express.Router(); + +/* GET home page. */ +router.get('/', function(req, res, next) { + res.render('index', { title: 'Express' }); +}); + +module.exports = router; diff --git a/packages/dicomweb-serve/routes/users.js b/packages/dicomweb-serve/routes/users.js new file mode 100644 index 00000000..623e4302 --- /dev/null +++ b/packages/dicomweb-serve/routes/users.js @@ -0,0 +1,9 @@ +var express = require('express'); +var router = express.Router(); + +/* GET users listing. */ +router.get('/', function(req, res, next) { + res.send('respond with a resource'); +}); + +module.exports = router; diff --git a/packages/dicomweb-serve/views/error.jade b/packages/dicomweb-serve/views/error.jade new file mode 100644 index 00000000..51ec12c6 --- /dev/null +++ b/packages/dicomweb-serve/views/error.jade @@ -0,0 +1,6 @@ +extends layout + +block content + h1= message + h2= error.status + pre #{error.stack} diff --git a/packages/dicomweb-serve/views/index.jade b/packages/dicomweb-serve/views/index.jade new file mode 100644 index 00000000..3d63b9a0 --- /dev/null +++ b/packages/dicomweb-serve/views/index.jade @@ -0,0 +1,5 @@ +extends layout + +block content + h1= title + p Welcome to #{title} diff --git a/packages/dicomweb-serve/views/layout.jade b/packages/dicomweb-serve/views/layout.jade new file mode 100644 index 00000000..15af079b --- /dev/null +++ b/packages/dicomweb-serve/views/layout.jade @@ -0,0 +1,7 @@ +doctype html +html + head + title= title + link(rel='stylesheet', href='/stylesheets/style.css') + body + block content diff --git a/packages/static-cs-lite/lib/api/getRenderedBuffer.js b/packages/static-cs-lite/lib/api/getRenderedBuffer.js index 03d1982a..3c138dbc 100644 --- a/packages/static-cs-lite/lib/api/getRenderedBuffer.js +++ b/packages/static-cs-lite/lib/api/getRenderedBuffer.js @@ -12,7 +12,12 @@ const canvasImageToBuffer = require("../adapters/canvasImageToBuffer"); * @param {*} metadata * @param {*} doneCallback Callback method that is invoked once image is rendered */ -function getRenderedBuffer(transferSyntaxUid, decodedPixelData, metadata, doneCallback) { +function getRenderedBuffer( + transferSyntaxUid, + decodedPixelData, + metadata, + doneCallback, +) { const { csCore, canvas, context } = setUpEnv(); function doneRendering(customEvent = {}) { @@ -32,7 +37,12 @@ function getRenderedBuffer(transferSyntaxUid, decodedPixelData, metadata, doneCa } try { - const imageObj = createImage(transferSyntaxUid, decodedPixelData, metadata, canvas); + const imageObj = createImage( + transferSyntaxUid, + decodedPixelData, + metadata, + canvas, + ); canvas.addEventListener(csCore.EVENTS.IMAGE_RENDERED, doneRendering); csCore.renderToCanvas(canvas, imageObj); diff --git a/packages/static-cs-lite/lib/api/renderToCanvas.js b/packages/static-cs-lite/lib/api/renderToCanvas.js index e10d818b..ca64a290 100644 --- a/packages/static-cs-lite/lib/api/renderToCanvas.js +++ b/packages/static-cs-lite/lib/api/renderToCanvas.js @@ -25,7 +25,13 @@ function setContextStyles(ctx, styles, styleKeys) { } }); } -function renderTextToCanvas(canvas, text, pos, size, styles = { textStyle: DEFAULT_STYLES.textStyle }) { +function renderTextToCanvas( + canvas, + text, + pos, + size, + styles = { textStyle: DEFAULT_STYLES.textStyle }, +) { const [x, y] = pos; const ctx = canvas.getContext("2d"); setContextStyles(ctx, styles, ["textStyle"]); @@ -39,7 +45,12 @@ function renderTextToCanvas(canvas, text, pos, size, styles = { textStyle: DEFAU }; } -function renderLinesToCanvas(canvas, points, size, styles = { lineStyle: DEFAULT_STYLES.lineStyle }) { +function renderLinesToCanvas( + canvas, + points, + size, + styles = { lineStyle: DEFAULT_STYLES.lineStyle }, +) { const ctx = canvas.getContext("2d"); if (!points.length || !ctx) { @@ -60,7 +71,12 @@ function renderLinesToCanvas(canvas, points, size, styles = { lineStyle: DEFAULT ctx.stroke(); } -function renderPointToCanvas(canvas, point, size, styles = { pointSyle: DEFAULT_STYLES.pointStyle }) { +function renderPointToCanvas( + canvas, + point, + size, + styles = { pointSyle: DEFAULT_STYLES.pointStyle }, +) { const ctx = canvas.getContext("2d"); if (!point || !ctx) { @@ -75,7 +91,13 @@ function renderPointToCanvas(canvas, point, size, styles = { pointSyle: DEFAULT_ ctx.fill(); } -function renderHLineToCanvas(canvas, position, width, height, styles = { lineStyle: DEFAULT_STYLES.lineStyle }) { +function renderHLineToCanvas( + canvas, + position, + width, + height, + styles = { lineStyle: DEFAULT_STYLES.lineStyle }, +) { const points = [ [position[0], position[1]], [position[0] + width, position[1]], @@ -127,7 +149,7 @@ function renderPointsToCanvas( styles = { lineStyle: DEFAULT_STYLES.lineStyle, pointStyle: DEFAULT_STYLES.pointStyle, - } + }, ) { points.forEach((point, index) => { const useStyles = { ...styles }; @@ -158,13 +180,31 @@ function renderContentToCanvas(enabledElement, content, styles) { let result; switch (content.type) { case "text": - result = renderTextToCanvas(enabledElement.canvas, content.text, content.position, content.size, styles); + result = renderTextToCanvas( + enabledElement.canvas, + content.text, + content.position, + content.size, + styles, + ); break; case "hLine": - result = renderHLineToCanvas(enabledElement.canvas, content.position, content.width, content.height * content.size, styles); + result = renderHLineToCanvas( + enabledElement.canvas, + content.position, + content.width, + content.height * content.size, + styles, + ); break; case "points": - result = renderPointsToCanvas(enabledElement.canvas, content.points, content.strategy, content.size, styles); + result = renderPointsToCanvas( + enabledElement.canvas, + content.points, + content.strategy, + content.size, + styles, + ); break; default: throw new Error(`Unknown type ${content.type} in ${content}`); diff --git a/packages/static-cs-lite/lib/image/createImage.js b/packages/static-cs-lite/lib/image/createImage.js index 53f8c8c1..454247c4 100644 --- a/packages/static-cs-lite/lib/image/createImage.js +++ b/packages/static-cs-lite/lib/image/createImage.js @@ -1,4 +1,3 @@ -/* eslint-disable no-param-reassign */ const dcmjs = require("dcmjs"); const { imageFrameUtils } = require("../util"); @@ -13,8 +12,16 @@ const { imageFrameUtils } = require("../util"); * @param {*} options * @returns */ -function createImage(transferSyntax, decodedPixelData, metadata, canvas, options = {}) { - const dataSet = dcmjs.data.DicomMetaDictionary.naturalizeDataset(JSON.parse(JSON.stringify(metadata))); +function createImage( + transferSyntax, + decodedPixelData, + metadata, + canvas, + options = {}, +) { + const dataSet = dcmjs.data.DicomMetaDictionary.naturalizeDataset( + JSON.parse(JSON.stringify(metadata)), + ); const imageFrame = imageFrameUtils.get.fromDataset(dataSet, decodedPixelData); const { convertFloatPixelDataToInt, targetBuffer } = options; @@ -22,7 +29,10 @@ function createImage(transferSyntax, decodedPixelData, metadata, canvas, options // If we have a target buffer that was written to in the // Decode task, point the image to it here. // We can't have done it within the thread incase it was a SharedArrayBuffer. - const alreadyTyped = imageFrameUtils.convert.pixelDataToTargetBuffer(imageFrame, targetBuffer); + const alreadyTyped = imageFrameUtils.convert.pixelDataToTargetBuffer( + imageFrame, + targetBuffer, + ); const originalDataConstructor = imageFrame.pixelData.constructor; // setup the canvas context @@ -53,7 +63,10 @@ function createImage(transferSyntax, decodedPixelData, metadata, canvas, options // convert color space if (isColorImage) { const context = canvas.getContext("2d"); - const imageData = context.createImageData(imageFrame.columns, imageFrame.rows); + const imageData = context.createImageData( + imageFrame.columns, + imageFrame.rows, + ); // imageData.data is being changed by reference. imageFrameUtils.convert.colorSpace(imageFrame, imageData.data); @@ -65,9 +78,16 @@ function createImage(transferSyntax, decodedPixelData, metadata, canvas, options } } - if ((!imageFrame.smallestPixelValue || !imageFrame.largestPixelValue || imageFrame.pixelData.constructor, originalDataConstructor)) { + if ( + (!imageFrame.smallestPixelValue || + !imageFrame.largestPixelValue || + imageFrame.pixelData.constructor, + originalDataConstructor) + ) { // calculate smallest and largest PixelValue of the converted pixelData - const { min, max } = imageFrameUtils.get.pixelDataMinMax(imageFrame.pixelData); + const { min, max } = imageFrameUtils.get.pixelDataMinMax( + imageFrame.pixelData, + ); imageFrame.smallestPixelValue = min; imageFrame.largestPixelValue = max; @@ -98,7 +118,10 @@ function createImage(transferSyntax, decodedPixelData, metadata, canvas, options // If pixel data is intrinsically floating 32 array, we convert it to int for // display in cornerstone. For other cases when pixel data is typed as // Float32Array for scaling; this conversion is not needed. - if (imageFrame.pixelData instanceof Float32Array && convertFloatPixelDataToInt) { + if ( + imageFrame.pixelData instanceof Float32Array && + convertFloatPixelDataToInt + ) { const floatPixelData = imageFrame.pixelData; const results = imageFrameUtils.get.pixelDataIntType(floatPixelData); @@ -126,7 +149,11 @@ function createImage(transferSyntax, decodedPixelData, metadata, canvas, options } // Modality LUT - if (modalityLUTSequence && modalityLUTSequence.length > 0 && imageFrameUtils.is.modalityLUT(sopClassUID)) { + if ( + modalityLUTSequence && + modalityLUTSequence.length > 0 && + imageFrameUtils.is.modalityLUT(sopClassUID) + ) { image.modalityLUT = modalityLUTSequence[0]; } diff --git a/packages/static-cs-lite/lib/sandbox/index.js b/packages/static-cs-lite/lib/sandbox/index.js index 48a72b02..e7ea3e99 100644 --- a/packages/static-cs-lite/lib/sandbox/index.js +++ b/packages/static-cs-lite/lib/sandbox/index.js @@ -22,7 +22,11 @@ function setUpEnvSandbox() { const csCore = context.require("cornerstone-core"); - return { csCore, context, canvas: context.window.document.querySelector("canvas") }; + return { + csCore, + context, + canvas: context.window.document.querySelector("canvas"), + }; } module.exports = setUpEnvSandbox; diff --git a/packages/static-cs-lite/lib/util/imageFrame/convert/color/colorSpace.js b/packages/static-cs-lite/lib/util/imageFrame/convert/color/colorSpace.js index c89450f2..6077295a 100644 --- a/packages/static-cs-lite/lib/util/imageFrame/convert/color/colorSpace.js +++ b/packages/static-cs-lite/lib/util/imageFrame/convert/color/colorSpace.js @@ -30,7 +30,9 @@ function colorSpace(imageFrame, rgbaBuffer) { convertYBRFull(imageFrame, rgbaBuffer); break; default: - throw new Error(`No color space conversion for photometric interpretation ${photometricInterpretation}`); + throw new Error( + `No color space conversion for photometric interpretation ${photometricInterpretation}`, + ); } } diff --git a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertPALETTECOLOR.js b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertPALETTECOLOR.js index 305409eb..d01cba32 100755 --- a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertPALETTECOLOR.js +++ b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertPALETTECOLOR.js @@ -1,4 +1,3 @@ -/* eslint-disable no-plusplus, no-param-reassign, no-bitwise */ function convertLUTto8Bit(lut, shift) { const numEntries = lut.length; const cleanedLUT = new Uint8ClampedArray(numEntries); @@ -30,7 +29,8 @@ function converter(imageFrame, rgbaBuffer) { let rgbaIndex = 0; const start = imageFrame.redPaletteColorLookupTableDescriptor[1]; - const shift = imageFrame.redPaletteColorLookupTableDescriptor[2] === 8 ? 0 : 8; + const shift = + imageFrame.redPaletteColorLookupTableDescriptor[2] === 8 ? 0 : 8; const rDataCleaned = convertLUTto8Bit(rData, shift); const gDataCleaned = convertLUTto8Bit(gData, shift); diff --git a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertRGBColorByPixel.js b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertRGBColorByPixel.js index d375ea44..ee07c7ca 100755 --- a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertRGBColorByPixel.js +++ b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertRGBColorByPixel.js @@ -1,5 +1,3 @@ -/* eslint-disable no-plusplus, no-param-reassign */ - /** * Convert pixel data with RGB (by pixel) Photometric Interpretation to RGBA * diff --git a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertRGBColorByPlane.js b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertRGBColorByPlane.js index 4228c3c0..ff924865 100755 --- a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertRGBColorByPlane.js +++ b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertRGBColorByPlane.js @@ -1,4 +1,3 @@ -/* eslint-disable no-plusplus, no-param-reassign */ const assertArrayDivisibility = require("../../../assertArrayDivisibility"); /** @@ -9,7 +8,12 @@ const assertArrayDivisibility = require("../../../assertArrayDivisibility"); * @returns {void} */ function converter(imageFrame, rgbaBuffer) { - if (!assertArrayDivisibility(imageFrame, 3, ["decodeRGB: rgbBuffer must not be undefined", "decodeRGB: rgbBuffer length must be divisble by 3"])) { + if ( + !assertArrayDivisibility(imageFrame, 3, [ + "decodeRGB: rgbBuffer must not be undefined", + "decodeRGB: rgbBuffer length must be divisble by 3", + ]) + ) { return; } diff --git a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFull422ByPixel.js b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFull422ByPixel.js index 7f626013..76e0340d 100755 --- a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFull422ByPixel.js +++ b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFull422ByPixel.js @@ -1,4 +1,3 @@ -/* eslint-disable no-plusplus, no-param-reassign */ const assertArrayDivisibility = require("../../../assertArrayDivisibility"); /** @@ -9,7 +8,12 @@ const assertArrayDivisibility = require("../../../assertArrayDivisibility"); * @returns {void} */ function converter(imageFrame, rgbaBuffer) { - if (!assertArrayDivisibility(imageFrame, 2, ["decodeRGB: ybrBuffer must not be undefined", "decodeRGB: ybrBuffer length must be divisble by 2"])) { + if ( + !assertArrayDivisibility(imageFrame, 2, [ + "decodeRGB: ybrBuffer must not be undefined", + "decodeRGB: ybrBuffer length must be divisble by 2", + ]) + ) { return; } diff --git a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFullByPixel.js b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFullByPixel.js index 259be27d..c2106cc0 100755 --- a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFullByPixel.js +++ b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFullByPixel.js @@ -1,4 +1,3 @@ -/* eslint-disable no-plusplus, no-param-reassign */ const assertArrayDivisibility = require("../../../assertArrayDivisibility"); /** @@ -9,7 +8,12 @@ const assertArrayDivisibility = require("../../../assertArrayDivisibility"); * @returns {void} */ function converter(imageFrame, rgbaBuffer) { - if (!assertArrayDivisibility(imageFrame, 3, ["decodeRGB: ybrBuffer must not be undefined", "decodeRGB: ybrBuffer length must be divisble by 3"])) { + if ( + !assertArrayDivisibility(imageFrame, 3, [ + "decodeRGB: ybrBuffer must not be undefined", + "decodeRGB: ybrBuffer length must be divisble by 3", + ]) + ) { return; } diff --git a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFullByPlane.js b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFullByPlane.js index eff38d3d..06145817 100755 --- a/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFullByPlane.js +++ b/packages/static-cs-lite/lib/util/imageFrame/convert/color/convertYBRFullByPlane.js @@ -1,4 +1,3 @@ -/* eslint-disable no-plusplus, no-param-reassign */ const assertArrayDivisibility = require("../../../assertArrayDivisibility"); /** @@ -9,7 +8,12 @@ const assertArrayDivisibility = require("../../../assertArrayDivisibility"); * @returns {void} */ function converter(imageFrame, rgbaBuffer) { - if (!assertArrayDivisibility(imageFrame, 3, ["decodeRGB: ybrBuffer must not be undefined", "decodeRGB: ybrBuffer length must be divisble by 3"])) { + if ( + !assertArrayDivisibility(imageFrame, 3, [ + "decodeRGB: ybrBuffer must not be undefined", + "decodeRGB: ybrBuffer length must be divisble by 3", + ]) + ) { return; } diff --git a/packages/static-cs-lite/lib/util/imageFrame/convert/pixelDataType.js b/packages/static-cs-lite/lib/util/imageFrame/convert/pixelDataType.js index d8df1395..f92c6224 100644 --- a/packages/static-cs-lite/lib/util/imageFrame/convert/pixelDataType.js +++ b/packages/static-cs-lite/lib/util/imageFrame/convert/pixelDataType.js @@ -1,4 +1,3 @@ -/* eslint-disable no-param-reassign */ /** * It converts pixel data type based on imageFrame properties * @param {*} imageFrame object containing frame properties and also pixelData (this param is mutate) diff --git a/packages/static-cs-lite/lib/util/imageFrame/get/fromDataset.js b/packages/static-cs-lite/lib/util/imageFrame/get/fromDataset.js index 3a2fbc81..e113e41d 100644 --- a/packages/static-cs-lite/lib/util/imageFrame/get/fromDataset.js +++ b/packages/static-cs-lite/lib/util/imageFrame/get/fromDataset.js @@ -8,9 +8,18 @@ const paletteColor = require("./paletteColor"); * @returns image frame */ function fromDataset(dataSet, decodedPixelData) { - const bluePaletteColorLookupTableData = paletteColor(dataSet.BluePaletteColorLookupTableData, dataSet.BluePaletteColorLookupTableDescriptor); - const greenPaletteColorLookupTableData = paletteColor(dataSet.GreenPaletteColorLookupTableData, dataSet.GreenPaletteColorLookupTableDescriptor); - const redPaletteColorLookupTableData = paletteColor(dataSet.RedPaletteColorLookupTableData, dataSet.RedPaletteColorLookupTableDescriptor); + const bluePaletteColorLookupTableData = paletteColor( + dataSet.BluePaletteColorLookupTableData, + dataSet.BluePaletteColorLookupTableDescriptor, + ); + const greenPaletteColorLookupTableData = paletteColor( + dataSet.GreenPaletteColorLookupTableData, + dataSet.GreenPaletteColorLookupTableDescriptor, + ); + const redPaletteColorLookupTableData = paletteColor( + dataSet.RedPaletteColorLookupTableData, + dataSet.RedPaletteColorLookupTableDescriptor, + ); return { samplesPerPixel: dataSet.SamplesPerPixel, @@ -24,11 +33,14 @@ function fromDataset(dataSet, decodedPixelData) { smallestPixelValue: dataSet.SmallestImagePixelValue, largestPixelValue: dataSet.LargestImagePixelValue, bluePaletteColorLookupTableData, - bluePaletteColorLookupTableDescriptor: dataSet.BluePaletteColorLookupTableDescriptor, + bluePaletteColorLookupTableDescriptor: + dataSet.BluePaletteColorLookupTableDescriptor, greenPaletteColorLookupTableData, - greenPaletteColorLookupTableDescriptor: dataSet.GreenPaletteColorLookupTableDescriptor, + greenPaletteColorLookupTableDescriptor: + dataSet.GreenPaletteColorLookupTableDescriptor, redPaletteColorLookupTableData, - redPaletteColorLookupTableDescriptor: dataSet.RedPaletteColorLookupTableDescriptor, + redPaletteColorLookupTableDescriptor: + dataSet.RedPaletteColorLookupTableDescriptor, pixelData: decodedPixelData, }; } diff --git a/packages/static-cs-lite/lib/util/imageFrame/get/paletteColor.js b/packages/static-cs-lite/lib/util/imageFrame/get/paletteColor.js index 3b80178e..c48d2575 100644 --- a/packages/static-cs-lite/lib/util/imageFrame/get/paletteColor.js +++ b/packages/static-cs-lite/lib/util/imageFrame/get/paletteColor.js @@ -1,5 +1,3 @@ -/* eslint-disable no-plusplus, no-bitwise */ - /** * Returns palette color lut array. * In case colorLutData has InlineBinary value it decodes the binary using lutDescriptor. @@ -38,9 +36,13 @@ function paletteColor(colorLutData, colorLutDescriptor) { try { const paletteStr = colorLutData.InlineBinary; - const paletteBinaryStr = Buffer.from(paletteStr, "base64").toString("binary"); + const paletteBinaryStr = Buffer.from(paletteStr, "base64").toString( + "binary", + ); - const paletteTypedArray = Uint8Array.from(paletteBinaryStr, (c) => c.charCodeAt(0)); + const paletteTypedArray = Uint8Array.from(paletteBinaryStr, (c) => + c.charCodeAt(0), + ); result = typedArrayToPaletteColorLUT(paletteTypedArray); } catch (e) { diff --git a/packages/static-cs-lite/lib/util/imageFrame/is/jpegBaseline8BitColor.js b/packages/static-cs-lite/lib/util/imageFrame/is/jpegBaseline8BitColor.js index ddaedfe8..54242be1 100644 --- a/packages/static-cs-lite/lib/util/imageFrame/is/jpegBaseline8BitColor.js +++ b/packages/static-cs-lite/lib/util/imageFrame/is/jpegBaseline8BitColor.js @@ -1,9 +1,17 @@ function isJPEGBaseline8BitColor(imageFrame, _transferSyntax) { - const { bitsAllocated, samplesPerPixel, transferSyntax: transferSyntaxFromFrame } = imageFrame; + const { + bitsAllocated, + samplesPerPixel, + transferSyntax: transferSyntaxFromFrame, + } = imageFrame; const transferSyntax = _transferSyntax || transferSyntaxFromFrame; let response = false; - if (bitsAllocated === 8 && transferSyntax === "1.2.840.10008.1.2.4.50" && (samplesPerPixel === 3 || samplesPerPixel === 4)) { + if ( + bitsAllocated === 8 && + transferSyntax === "1.2.840.10008.1.2.4.50" && + (samplesPerPixel === 3 || samplesPerPixel === 4) + ) { response = true; } diff --git a/packages/static-wado-creator/lib/DeleteStudy.js b/packages/static-wado-creator/lib/DeleteStudy.js index ebd1f94e..7325a9c0 100644 --- a/packages/static-wado-creator/lib/DeleteStudy.js +++ b/packages/static-wado-creator/lib/DeleteStudy.js @@ -7,8 +7,15 @@ module.exports = (options) => console.log("Delete Study", studyInstanceUid); const studyData = await this.scanStudy("studies", studyInstanceUid); studyData.delete(); - const allStudies = await JSONReader(options.directoryName, "studies/index.json.gz", []); - const studiesWithoutDeleted = allStudies.filter((study) => studyInstanceUid != Tags.getValue(study, Tags.StudyInstanceUID)); + const allStudies = await JSONReader( + options.directoryName, + "studies/index.json.gz", + [], + ); + const studiesWithoutDeleted = allStudies.filter( + (study) => + studyInstanceUid != Tags.getValue(study, Tags.StudyInstanceUID), + ); await JSONWriter(options.directoryName, "studies", studiesWithoutDeleted); delete this.studyData; }; diff --git a/packages/static-wado-creator/lib/RejectInstance.js b/packages/static-wado-creator/lib/RejectInstance.js index 4311ebba..0c0eee38 100644 --- a/packages/static-wado-creator/lib/RejectInstance.js +++ b/packages/static-wado-creator/lib/RejectInstance.js @@ -1,4 +1,5 @@ -const dataExtractor = /^(.*studies)?[\\/]?([0-9.a-zA-Z]+)[\\/](series[\\/])?([0-9.a-zA-Z]+)([\\/]instances[\\/]([0-9.a-zA-Z]*))?$/; +const dataExtractor = + /^(.*studies)?[\\/]?([0-9.a-zA-Z]+)[\\/](series[\\/])?([0-9.a-zA-Z]+)([\\/]instances[\\/]([0-9.a-zA-Z]*))?$/; module.exports = () => async function rejectInstanceWithOptions(args) { @@ -11,9 +12,20 @@ module.exports = () => studyInstanceUid = extracted[2]; seriesInstanceUid = extracted[4]; sopInstanceUid = extracted[6]; - console.log("Extracted", studyInstanceUid, seriesInstanceUid, sopInstanceUid, reason); + console.log( + "Extracted", + studyInstanceUid, + seriesInstanceUid, + sopInstanceUid, + reason, + ); } else { - console.log("Not extracting from URL", studyInstanceUid, seriesInstanceUid, reason); + console.log( + "Not extracting from URL", + studyInstanceUid, + seriesInstanceUid, + reason, + ); seriesInstanceUid = args[1]; } const studyData = await this.scanStudy(studyInstanceUid); diff --git a/packages/static-wado-creator/lib/StaticWado.js b/packages/static-wado-creator/lib/StaticWado.js index 5b0add8c..d19940e3 100644 --- a/packages/static-wado-creator/lib/StaticWado.js +++ b/packages/static-wado-creator/lib/StaticWado.js @@ -1,6 +1,14 @@ const dicomCodec = require("@cornerstonejs/dicom-codec"); const staticCS = require("@radicalimaging/static-cs-lite"); -const { Stats, handleHomeRelative, dirScanner, JSONReader, JSONWriter, asyncIterableToBuffer, Tags } = require("@radicalimaging/static-wado-util"); +const { + Stats, + handleHomeRelative, + dirScanner, + JSONReader, + JSONWriter, + asyncIterableToBuffer, + Tags, +} = require("@radicalimaging/static-wado-util"); const dicomParser = require("dicom-parser"); const fs = require("fs"); const path = require("path"); @@ -15,7 +23,12 @@ const IdCreator = require("./util/IdCreator"); const ScanStudy = require("./operation/ScanStudy"); const HashDataWriter = require("./writer/HashDataWriter"); const VideoWriter = require("./writer/VideoWriter"); -const { transcodeImageFrame, generateLossyImage, transcodeId, transcodeMetadata } = require("./operation/adapter/transcodeImage"); +const { + transcodeImageFrame, + generateLossyImage, + transcodeId, + transcodeMetadata, +} = require("./operation/adapter/transcodeImage"); const ThumbnailWriter = require("./writer/ThumbnailWriter"); const decodeImage = require("./operation/adapter/decodeImage"); const ThumbnailService = require("./operation/ThumbnailService"); @@ -28,15 +41,30 @@ function setStudyData(studyData) { this.studyData = studyData; } -function internalGenerateImage(originalImageFrame, dataset, metadata, transferSyntaxUid, doneCallback) { +function internalGenerateImage( + originalImageFrame, + dataset, + metadata, + transferSyntaxUid, + doneCallback, +) { decodeImage(originalImageFrame, dataset, transferSyntaxUid) .then((decodeResult = {}) => { if (isVideo(transferSyntaxUid)) { console.log("Video data - no thumbnail generator yet"); } else { const { imageFrame, imageInfo } = decodeResult; - const pixelData = dicomCodec.getPixelData(imageFrame, imageInfo, transferSyntaxUid); - staticCS.getRenderedBuffer(transferSyntaxUid, pixelData, metadata, doneCallback); + const pixelData = dicomCodec.getPixelData( + imageFrame, + imageInfo, + transferSyntaxUid, + ); + staticCS.getRenderedBuffer( + transferSyntaxUid, + pixelData, + metadata, + doneCallback, + ); } }) .catch((error) => { @@ -46,7 +74,12 @@ function internalGenerateImage(originalImageFrame, dataset, metadata, transferSy class StaticWado { constructor(configuration) { - const { rootDir = "~/dicomweb", pathDeduplicated = "deduplicated", pathInstances = "instances", verbose } = configuration; + const { + rootDir = "~/dicomweb", + pathDeduplicated = "deduplicated", + pathInstances = "instances", + verbose, + } = configuration; dicomCodec.setConfig({ verbose }); const directoryName = handleHomeRelative(rootDir); @@ -73,7 +106,9 @@ class StaticWado { delete: DeleteStudy(this.options), setStudyData, rawDicomWriter: RawDicomWriter(this.options), - notificationService: new NotificationService(this.options.notificationDir), + notificationService: new NotificationService( + this.options.notificationDir, + ), internalGenerateImage, }; } @@ -144,7 +179,11 @@ class StaticWado { const studyInstanceUid = dataSet.string("x0020000d"); if (!studyInstanceUid) { - console.log("No study UID, can't import file", params.file, dataSet.elements); + console.log( + "No study UID, can't import file", + params.file, + dataSet.elements, + ); return undefined; } @@ -156,10 +195,14 @@ class StaticWado { sopInstanceUid: dataSet.string("x00080018"), transferSyntaxUid: dataSet.string("x00020010"), }, - params.file + params.file, ); - const targetId = transcodeId(id, this.options, dataSet.uint16(Tags.RawSamplesPerPixel)); + const targetId = transcodeId( + id, + this.options, + dataSet.uint16(Tags.RawSamplesPerPixel), + ); let bulkDataIndex = 0; let imageFrameIndex = 0; @@ -172,17 +215,36 @@ class StaticWado { // TODO - handle other types here too as single part rendered if (options?.mimeType === "application/pdf") { console.log("Writing rendered mimeType", options.mimeType); - const writeStream = WriteStream(id.sopInstanceRootPath, "rendered.pdf", { - gzip: false, - mkdir: true, - }); + const writeStream = WriteStream( + id.sopInstanceRootPath, + "rendered.pdf", + { + gzip: false, + mkdir: true, + }, + ); await writeStream.write(bulkData); await writeStream.close(); } - return this.callback.bulkdata(targetId, _bulkDataIndex, bulkData, options); + return this.callback.bulkdata( + targetId, + _bulkDataIndex, + bulkData, + options, + ); }, imageFrame: async (originalImageFrame) => { - const { imageFrame: transcodedImageFrame, decoded, id: transcodedId } = await transcodeImageFrame(id, targetId, originalImageFrame, dataSet, this.options); + const { + imageFrame: transcodedImageFrame, + decoded, + id: transcodedId, + } = await transcodeImageFrame( + id, + targetId, + originalImageFrame, + dataSet, + this.options, + ); const lossyImage = await generateLossyImage(id, decoded, this.options); @@ -190,7 +252,11 @@ class StaticWado { imageFrameIndex += 1; if (lossyImage) { - await this.callback.imageFrame(lossyImage.id, currentImageFrameIndex, lossyImage.imageFrame); + await this.callback.imageFrame( + lossyImage.id, + currentImageFrameIndex, + lossyImage.imageFrame, + ); } thumbnailService.queueThumbnail( @@ -201,10 +267,14 @@ class StaticWado { id, frameIndex: currentImageFrameIndex, }, - this.options + this.options, ); - return this.callback.imageFrame(transcodedId, currentImageFrameIndex, transcodedImageFrame); + return this.callback.imageFrame( + transcodedId, + currentImageFrameIndex, + transcodedImageFrame, + ); }, videoWriter: async (_dataSet) => this.callback.videoWriter(id, _dataSet), }; @@ -215,8 +285,20 @@ class StaticWado { await this.callback.rawDicomWriter?.(id, result, buffer); const transcodedMeta = transcodeMetadata(result.metadata, id, this.options); - await thumbnailService.generateThumbnails(id, dataSet, transcodedMeta, this.callback, this.options); - await thumbnailService.generateRendered(id, dataSet, transcodedMeta, this.callback, this.options); + await thumbnailService.generateThumbnails( + id, + dataSet, + transcodedMeta, + this.callback, + this.options, + ); + await thumbnailService.generateRendered( + id, + dataSet, + transcodedMeta, + this.callback, + this.options, + ); await this.callback.metadata(targetId, transcodedMeta); // resolve promise with statistics @@ -227,8 +309,20 @@ class StaticWado { return getDataSet(dataSet, generator, params); } - static internalGenerateImage(originalImageFrame, dataSet, metadata, transferSyntaxUid, doneCallback) { - return internalGenerateImage(originalImageFrame, dataSet, metadata, transferSyntaxUid, doneCallback); + static internalGenerateImage( + originalImageFrame, + dataSet, + metadata, + transferSyntaxUid, + doneCallback, + ) { + return internalGenerateImage( + originalImageFrame, + dataSet, + metadata, + transferSyntaxUid, + doneCallback, + ); } /** @@ -261,7 +355,11 @@ class StaticWado { const dirs = await fs.promises.readdir(studiesDir); const studies = []; for (const dir of dirs) { - const study = await JSONReader(`${studiesDir}/${dir}`, "index.json.gz", null); + const study = await JSONReader( + `${studiesDir}/${dir}`, + "index.json.gz", + null, + ); if (study === null) { console.log("No study found in", dir); continue; diff --git a/packages/static-wado-creator/lib/createPart10.js b/packages/static-wado-creator/lib/createPart10.js index ddd2c8b6..885d3154 100644 --- a/packages/static-wado-creator/lib/createPart10.js +++ b/packages/static-wado-creator/lib/createPart10.js @@ -1,4 +1,3 @@ -/* eslint-disable no-param-reassign */ const { Tags, readBulkData } = require("@radicalimaging/static-wado-util"); const dcmjs = require("dcmjs"); @@ -14,8 +13,13 @@ fileMetaInformationVersionArray[1] = 1; const createFmi = (instance) => { // Assume the TSUID is in the value 0 - const TransferSyntaxUID = Tags.getValue(instance, Tags.AvailableTransferSyntaxUID) || UncompressedLEIExplicit; - const MediaStorageSOPClassUID = Tags.getValue(instance, Tags.MediaStorageSOPClassUID); + const TransferSyntaxUID = + Tags.getValue(instance, Tags.AvailableTransferSyntaxUID) || + UncompressedLEIExplicit; + const MediaStorageSOPClassUID = Tags.getValue( + instance, + Tags.MediaStorageSOPClassUID, + ); const SOPInstanceUID = Tags.getValue(instance, Tags.SOPInstanceUID); const naturalFmi = { MediaStorageSOPClassUID, diff --git a/packages/static-wado-creator/lib/mkdicomwebConfig.js b/packages/static-wado-creator/lib/mkdicomwebConfig.js index c38a1168..e106bd7f 100644 --- a/packages/static-wado-creator/lib/mkdicomwebConfig.js +++ b/packages/static-wado-creator/lib/mkdicomwebConfig.js @@ -28,12 +28,14 @@ const { mkdicomwebConfig } = ConfigPoint.register({ }, { key: "--clean", - description: "Clean the outputs before generating/starting to write new values.", + description: + "Clean the outputs before generating/starting to write new values.", defaultValue: false, }, { key: "-d, --deployments ", - description: "List of deployments from configuration to deploy to. Separated by space.", + description: + "List of deployments from configuration to deploy to. Separated by space.", defaultValue: undefined, }, { @@ -52,7 +54,8 @@ const { mkdicomwebConfig } = ConfigPoint.register({ }, { key: "--alternate ", - description: "Generates an alternate representaton of the image generally in the /lossy sub-directory", + description: + "Generates an alternate representaton of the image generally in the /lossy sub-directory", choices: ["jhc", "jls", "jhcLossless", "jlsLossless"], }, { @@ -71,17 +74,20 @@ const { mkdicomwebConfig } = ConfigPoint.register({ }, { key: "-t, --content-type ", - description: 'Destination type to compress to (choices: "jpeg", "jls", "lei", "jls-lossy", "jhc", "jxl" or DICOM Transfer Syntax UID - default: "jls")', + description: + 'Destination type to compress to (choices: "jpeg", "jls", "lei", "jls-lossy", "jhc", "jxl" or DICOM Transfer Syntax UID - default: "jls")', defaultValue: "jls", customParser: compressionOptionParser, }, { key: "--encapsulated-image", - description: "Avoid encapsulating the image frame. Writes with the extension and without multipart", + description: + "Avoid encapsulating the image frame. Writes with the extension and without multipart", }, { key: "-e, --no-encapsulated-image", - description: "Avoid encapsulating the image frame. Writes with the extension and without multipart", + description: + "Avoid encapsulating the image frame. Writes with the extension and without multipart", }, { key: "--single-part-image", @@ -105,13 +111,34 @@ const { mkdicomwebConfig } = ConfigPoint.register({ key: "-r, --recompress ", description: "List of types to recompress separated by space", defaultValue: ["uncompressed", "jp2", "jls", "jll"], - choices: ["uncompressed", "jp2", "jpeg", "jpeglossless", "rle", "jph", "jls", "true", "none"], + choices: [ + "uncompressed", + "jp2", + "jpeg", + "jpeglossless", + "rle", + "jph", + "jls", + "true", + "none", + ], }, { key: "--recompress-color ", - description: "List of types to recompress for color images, separated by space", + description: + "List of types to recompress for color images, separated by space", defaultValue: ["uncompressed"], - choices: ["uncompressed", "jpeg", "jp2", "jpeglossless", "rle", "jph", "jls", "true", "none"], + choices: [ + "uncompressed", + "jpeg", + "jp2", + "jpeglossless", + "rle", + "jph", + "jls", + "true", + "none", + ], }, { key: "-f, --force", @@ -155,7 +182,8 @@ const { mkdicomwebConfig } = ConfigPoint.register({ }, { key: "--path-deduplicated ", - description: "Set the deduplicate data directory path (relative to dir)", + description: + "Set the deduplicate data directory path (relative to dir)", defaultValue: "deduplicated", }, { @@ -170,18 +198,24 @@ const { mkdicomwebConfig } = ConfigPoint.register({ }, { key: "--prepend-bulk-data-uri ", - description: "Prepend bulkdata uri (ex. to use absolute Uri like http://host:3000/dicomweb)", + description: + "Prepend bulkdata uri (ex. to use absolute Uri like http://host:3000/dicomweb)", defaultValue: "", }, { key: "--expand-bulk-data-uri", - description: "expand bulkdata relative uri to use full relative path (should also be set when using --prepend-bulk-data-uri)", + description: + "expand bulkdata relative uri to use full relative path (should also be set when using --prepend-bulk-data-uri)", defaultValue: false, }, { key: "-o, --dir ", description: "Set output directory", - defaultValue: { configOperation: "reference", source: "staticWadoConfig", reference: "rootDir" }, + defaultValue: { + configOperation: "reference", + source: "staticWadoConfig", + reference: "rootDir", + }, }, ], programs: [ @@ -210,24 +244,28 @@ const { mkdicomwebConfig } = ConfigPoint.register({ command: "instance", arguments: ["input"], main: instanceMain, - helpDescription: "Make instance level DICOMweb metadata and bulkdata, but don't group or write series metadata", + helpDescription: + "Make instance level DICOMweb metadata and bulkdata, but don't group or write series metadata", }, { command: "group", arguments: ["input"], main: groupMain, - helpDescription: "Group instance level metadata into deduplicated data.\nDeletes instance level deduplicated information once it is confirmed written.", + helpDescription: + "Group instance level metadata into deduplicated data.\nDeletes instance level deduplicated information once it is confirmed written.", }, { command: "metadata", arguments: ["input"], main: metadataMain, - helpDescription: "Write the metadata object (series and study details) from the grouped deduplicated data.", + helpDescription: + "Write the metadata object (series and study details) from the grouped deduplicated data.", }, { command: "delete", main: deleteMain, - helpDescription: "Delete the given study, series or instance (not yet implemented)", + helpDescription: + "Delete the given study, series or instance (not yet implemented)", }, { command: "reject ", diff --git a/packages/static-wado-creator/lib/model/TagLists.js b/packages/static-wado-creator/lib/model/TagLists.js index c001aaf5..84f41223 100644 --- a/packages/static-wado-creator/lib/model/TagLists.js +++ b/packages/static-wado-creator/lib/model/TagLists.js @@ -6,8 +6,20 @@ const { Tags } = require("@radicalimaging/static-wado-util"); const hasher = hashFactory.hasher(); const { PatientID, PatientName, IssuerOfPatientID } = Tags; -const { StudyDescription, AccessionNumber, StudyInstanceUID, StudyDate, StudyTime } = Tags; -const { SeriesDescription, SeriesNumber, SeriesInstanceUID, SeriesDate, SeriesTime } = Tags; +const { + StudyDescription, + AccessionNumber, + StudyInstanceUID, + StudyDate, + StudyTime, +} = Tags; +const { + SeriesDescription, + SeriesNumber, + SeriesInstanceUID, + SeriesDate, + SeriesTime, +} = Tags; const { DeduppedHash, DeduppedRef, DeduppedType } = Tags; @@ -24,7 +36,16 @@ const PatientQuery = [ Tags.PatientIdentityRemoved, Tags.DeidentificationMethodCodeSequence, ]; -const StudyQuery = [StudyDescription, AccessionNumber, StudyInstanceUID, StudyDate, StudyTime, Tags.StudyStatusID, Tags.StudyPriorityID, Tags.StudyID]; +const StudyQuery = [ + StudyDescription, + AccessionNumber, + StudyInstanceUID, + StudyDate, + StudyTime, + Tags.StudyStatusID, + Tags.StudyPriorityID, + Tags.StudyID, +]; const PatientStudyQuery = [...PatientQuery, ...StudyQuery]; diff --git a/packages/static-wado-creator/lib/model/uids.js b/packages/static-wado-creator/lib/model/uids.js index 713300b1..0efe3cd1 100644 --- a/packages/static-wado-creator/lib/model/uids.js +++ b/packages/static-wado-creator/lib/model/uids.js @@ -21,7 +21,11 @@ const uids = { "1.2.840.10008.1.2.1": uncompressed, "1.2.840.10008.1.2.1.99": uncompressed, "1.2.840.10008.1.2.2": uncompressed, - "1.2.840.10008.1.2.4.50": { contentType: jpeg, lossy: true, extension: ".jpeg" }, + "1.2.840.10008.1.2.4.50": { + contentType: jpeg, + lossy: true, + extension: ".jpeg", + }, "1.2.840.10008.1.2.4.51": { contentType: jpeg, lossy: true }, "1.2.840.10008.1.2.4.57": { contentType: jpeg }, "1.2.840.10008.1.2.4.70": { contentType: jll, extension: ".jll" }, diff --git a/packages/static-wado-creator/lib/operation/InstanceDeduplicate.js b/packages/static-wado-creator/lib/operation/InstanceDeduplicate.js index b6b782ae..b5f5385f 100644 --- a/packages/static-wado-creator/lib/operation/InstanceDeduplicate.js +++ b/packages/static-wado-creator/lib/operation/InstanceDeduplicate.js @@ -28,7 +28,8 @@ async function deduplicateSingleInstance(id, imageFrame, { force }) { return {}; } if (!force && studyData.sopExists(sopUID)) { - if (this.verbose) console.log("SOP Instance UID", sopUID, "already exists, skipping"); + if (this.verbose) + console.log("SOP Instance UID", sopUID, "already exists, skipping"); // TODO - allow replace as an option // Null value means skip writing this instance return null; @@ -39,7 +40,12 @@ async function deduplicateSingleInstance(id, imageFrame, { force }) { if (!this.extractors) this.extractors = extractors; for (const key of Object.keys(this.extractors)) { - const extracted = TagLists.extract(deduplicated, key, this.extractors[key], TagLists.RemoveExtract); + const extracted = TagLists.extract( + deduplicated, + key, + this.extractors[key], + TagLists.RemoveExtract, + ); const hashKey = getValue(extracted, Tags.DeduppedHash); await studyData.addExtracted(this, hashKey, extracted); } @@ -90,7 +96,11 @@ const InstanceDeduplicate = (options) => this.verbose = options.verbose; } - const deduppedInstance = await this.deduplicateSingleInstance(id, imageFrame, options); + const deduppedInstance = await this.deduplicateSingleInstance( + id, + imageFrame, + options, + ); if (deduppedInstance) { // this refers to callee await this.deduplicated(id, deduppedInstance); diff --git a/packages/static-wado-creator/lib/operation/ScanStudy.js b/packages/static-wado-creator/lib/operation/ScanStudy.js index 9495ae44..da5e5517 100644 --- a/packages/static-wado-creator/lib/operation/ScanStudy.js +++ b/packages/static-wado-creator/lib/operation/ScanStudy.js @@ -1,14 +1,24 @@ const path = require("path"); function ScanStudy(options) { - const { directoryName, deduplicatedRoot, deduplicatedInstancesRoot } = options; + const { directoryName, deduplicatedRoot, deduplicatedInstancesRoot } = + options; return function scanStudy(studyInstanceUid) { console.verbose("scanStudy", studyInstanceUid); const studyPath = path.join(directoryName, "studies", studyInstanceUid); - const deduplicatedInstancesPath = path.join(deduplicatedInstancesRoot, studyInstanceUid); + const deduplicatedInstancesPath = path.join( + deduplicatedInstancesRoot, + studyInstanceUid, + ); const deduplicatedPath = path.join(deduplicatedRoot, studyInstanceUid); - console.verbose("Importing", studyInstanceUid, studyPath, deduplicatedInstancesPath, deduplicatedPath); + console.verbose( + "Importing", + studyInstanceUid, + studyPath, + deduplicatedInstancesPath, + deduplicatedPath, + ); return this.completeStudy.getCurrentStudyData(this, { studyPath, deduplicatedPath, diff --git a/packages/static-wado-creator/lib/operation/StudyData.js b/packages/static-wado-creator/lib/operation/StudyData.js index 2f5c9cf8..f60e6ba6 100644 --- a/packages/static-wado-creator/lib/operation/StudyData.js +++ b/packages/static-wado-creator/lib/operation/StudyData.js @@ -8,7 +8,8 @@ const TagLists = require("../model/TagLists"); const { getValue, setValue, getList, setList } = Tags; const hasher = hashFactory.hasher(); -const getSeriesInstanceUid = (seriesInstance) => getValue(seriesInstance, Tags.SeriesInstanceUID); +const getSeriesInstanceUid = (seriesInstance) => + getValue(seriesInstance, Tags.SeriesInstanceUID); /** * StudyData contains information about the grouped study data. It is used to create @@ -21,7 +22,15 @@ const getSeriesInstanceUid = (seriesInstance) => getValue(seriesInstance, Tags.S * level data multiple times when it already exists. */ class StudyData { - constructor({ studyInstanceUid, studyPath, deduplicatedPath, deduplicatedInstancesPath }, { isGroup, clean }) { + constructor( + { + studyInstanceUid, + studyPath, + deduplicatedPath, + deduplicatedInstancesPath, + }, + { isGroup, clean }, + ) { this.studyInstanceUid = studyInstanceUid; this.studyPath = studyPath; this.isGroup = isGroup; @@ -56,21 +65,31 @@ class StudyData { // Wipe out the study directory entirely, as well as the deduplicatedRoot and instancesRoot } this.groupFiles = 0; - const studyDeduplicated = await JSONReader(this.studyPath, "deduplicated/index.json.gz", []); + const studyDeduplicated = await JSONReader( + this.studyPath, + "deduplicated/index.json.gz", + [], + ); const info = studyDeduplicated[0]; if (info) { const hash = getValue(info, Tags.DeduppedHash); console.log("Reading studies//deduplicated/index.json.gz"); this.readDeduplicatedData("index.json.gz", studyDeduplicated, hash); } else { - console.log("No deduplicated/index.json to read in", this.studyPath, "/deduplicated/index.json.gz"); + console.log( + "No deduplicated/index.json to read in", + this.studyPath, + "/deduplicated/index.json.gz", + ); } if (this.deduplicatedPath) { this.groupFiles = await this.readDeduplicated(this.deduplicatedPath); console.verbose("Read groupFiles:", this.groupFiles); } if (this.deduplicatedInstancesPath) { - this.instanceFiles = await this.readDeduplicated(this.deduplicatedInstancesPath); + this.instanceFiles = await this.readDeduplicated( + this.deduplicatedInstancesPath, + ); } } @@ -84,7 +103,11 @@ class StudyData { * a separate type of check. */ get dirty() { - return this.newInstancesAdded > 0 || this.existingFiles.length > 1 || this.instanceFiles > 0; + return ( + this.newInstancesAdded > 0 || + this.existingFiles.length > 1 || + this.instanceFiles > 0 + ); } async dirtyMetadata() { @@ -93,7 +116,9 @@ class StudyData { return true; } if (this.groupFiles > 0) { - console.verbose("dirtyMetadata::Study level deduplicated doesn't match group files"); + console.verbose( + "dirtyMetadata::Study level deduplicated doesn't match group files", + ); } try { const studyFile = await JSONReader(this.studyPath, "index.json.gz", null); @@ -108,7 +133,10 @@ class StudyData { console.verbose("dirtyMetadata::Dedupped hash missing"); return true; } catch (e) { - console.verbose("dirtyMetadata::Exception, assume study metadata is dirty", e); + console.verbose( + "dirtyMetadata::Exception, assume study metadata is dirty", + e, + ); return true; } } @@ -129,7 +157,10 @@ class StudyData { async delete() { await fs.rmSync(this.studyPath, { recursive: true, force: true }); - await fs.rmSync(this.deduplicatedInstancesPath, { recursive: true, force: true }); + await fs.rmSync(this.deduplicatedInstancesPath, { + recursive: true, + force: true, + }); await fs.rmSync(this.deduplicatedPath, { recursive: true, force: true }); this.clear(); } @@ -159,10 +190,15 @@ class StudyData { * Create a full study instance data for instance at index */ async recombine(indexOrSop) { - const index = typeof indexOrSop === "string" ? this.sopInstances[indexOrSop] : indexOrSop; + const index = + typeof indexOrSop === "string" + ? this.sopInstances[indexOrSop] + : indexOrSop; const deduplicated = this.deduplicated[index]; if (index < 0 || index >= this.deduplicated.length) { - throw new Error(`Can't read index ${index}, out of bounds [0..${this.deduplicated.length})`); + throw new Error( + `Can't read index ${index}, out of bounds [0..${this.deduplicated.length})`, + ); } const refs = getList(deduplicated, Tags.DeduppedRef); if (!refs) { @@ -179,7 +215,12 @@ class StudyData { async addExtracted(callback, hashKey, item) { if (this.extractData[hashKey]) { - if (this.verbose) console.log("Already have extracted", hashKey, getValue(item, Tags.DeduppedType)); + if (this.verbose) + console.log( + "Already have extracted", + hashKey, + getValue(item, Tags.DeduppedType), + ); return; } await callback.bulkdata(this, hashKey, item); @@ -322,17 +363,33 @@ class StudyData { const seriesInstance = await this.recombine(i); const type = getValue(seriesInstance, Tags.DeduppedType); if (type == "deleted") { - console.log("Skipping deleted instance", type, getValue(seriesInstance, Tags.SeriesInstanceUID)); + console.log( + "Skipping deleted instance", + type, + getValue(seriesInstance, Tags.SeriesInstanceUID), + ); continue; } const seriesInstanceUid = getSeriesInstanceUid(seriesInstance); if (!seriesInstanceUid) { - console.log("Cant get seriesUid from", Tags.SeriesInstanceUID, seriesInstance); + console.log( + "Cant get seriesUid from", + Tags.SeriesInstanceUID, + seriesInstance, + ); continue; } if (!series[seriesInstanceUid]) { - const seriesQuery = TagLists.extract(seriesInstance, "series", TagLists.SeriesQuery); - const seriesPath = path.join(this.studyPath, "series", seriesInstanceUid); + const seriesQuery = TagLists.extract( + seriesInstance, + "series", + TagLists.SeriesQuery, + ); + const seriesPath = path.join( + this.studyPath, + "series", + seriesInstanceUid, + ); series[seriesInstanceUid] = { seriesPath, seriesQuery, @@ -341,7 +398,9 @@ class StudyData { }; } series[seriesInstanceUid].instances.push(seriesInstance); - series[seriesInstanceUid].instancesQuery.push(TagLists.extract(seriesInstance, "instance", TagLists.InstanceQuery)); + series[seriesInstanceUid].instancesQuery.push( + TagLists.extract(seriesInstance, "instance", TagLists.InstanceQuery), + ); } const seriesList = []; @@ -350,7 +409,8 @@ class StudyData { let numberOfSeries = 0; for (const seriesUid of Object.keys(series)) { const singleSeries = series[seriesUid]; - const { seriesQuery, seriesPath, instances, instancesQuery } = singleSeries; + const { seriesQuery, seriesPath, instances, instancesQuery } = + singleSeries; seriesQuery[Tags.NumberOfSeriesRelatedInstances] = { vr: "IS", Value: [instances.length], @@ -359,20 +419,28 @@ class StudyData { numberOfSeries += 1; const modality = getValue(seriesQuery, Tags.Modality); seriesList.push(seriesQuery); - if (modalitiesInStudy.indexOf(modality) == -1) modalitiesInStudy.push(modality); + if (modalitiesInStudy.indexOf(modality) == -1) + modalitiesInStudy.push(modality); await JSONWriter(seriesPath, "metadata", instances, { gzip: true, index: false, }); // Write out a series singleton that has just the series response for a single series. - await JSONWriter(seriesPath, "series-singleton.json", [seriesQuery], { gzip: true, index: false }); + await JSONWriter(seriesPath, "series-singleton.json", [seriesQuery], { + gzip: true, + index: false, + }); await JSONWriter(seriesPath, "instances", instancesQuery); } await JSONWriter(this.studyPath, "series", seriesList); console.log("Wrote series with", seriesList.length); - const studyQuery = TagLists.extract(anInstance, "study", TagLists.PatientStudyQuery); + const studyQuery = TagLists.extract( + anInstance, + "study", + TagLists.PatientStudyQuery, + ); studyQuery[Tags.ModalitiesInStudy] = { Value: modalitiesInStudy, vr: "CS" }; studyQuery[Tags.NumberOfStudyRelatedInstances] = { Value: [numberOfInstances], @@ -389,8 +457,18 @@ class StudyData { }); const infoItem = this.createInfo(); - console.log("Writing deduplicated study data with", Object.values(this.extractData).length, "extract items and", this.deduplicated.length, "instance items"); - await JSONWriter(this.studyPath, "deduplicated", [infoItem, ...Object.values(this.extractData), ...this.deduplicated]); + console.log( + "Writing deduplicated study data with", + Object.values(this.extractData).length, + "extract items and", + this.deduplicated.length, + "instance items", + ); + await JSONWriter(this.studyPath, "deduplicated", [ + infoItem, + ...Object.values(this.extractData), + ...this.deduplicated, + ]); return studyQuery; } @@ -406,7 +484,6 @@ class StudyData { return data; } - /* eslint-disable-next-line class-methods-use-this */ removeGz(name) { const gzIndex = name.indexOf(".gz"); return (gzIndex > 0 && name.substring(0, gzIndex)) || name; @@ -415,7 +492,11 @@ class StudyData { async deleteInstancesReferenced() { const deduplicatedDirectory = this.deduplicatedInstancesPath; if (!fs.existsSync(deduplicatedDirectory)) return; - console.log("Deleting instances referenced in", this.studyInstanceUid, this.deduplicatedInstancesPath); + console.log( + "Deleting instances referenced in", + this.studyInstanceUid, + this.deduplicatedInstancesPath, + ); const files = await this.listJsonFiles(deduplicatedDirectory); console.log("Deleting", files.length, "files"); let deleteCount = 0; @@ -447,19 +528,28 @@ class StudyData { Object.values(this.extractData).length, "extract items and", this.deduplicated.length, - "instance items" + "instance items", ); setList( data, Tags.DeduppedRef, - Object.keys(this.readHashes).filter(() => this.deduplicatedHashes[hashValue] == undefined) + Object.keys(this.readHashes).filter( + () => this.deduplicatedHashes[hashValue] == undefined, + ), ); - const deduplicatedList = [data, ...Object.values(this.extractData), ...this.deduplicated]; + const deduplicatedList = [ + data, + ...Object.values(this.extractData), + ...this.deduplicated, + ]; // const naturalList = deduplicatedList.map(Tags.naturalizeDataset); // // console.log("naturalList=", JSON.stringify(naturalList,null,2)); // console.log("Going to write study data", naturalList.length); // await JSONWriter(this.deduplicatedPath, hashValue, naturalList, { gzip: true, index: false }); - await JSONWriter(this.deduplicatedPath, hashValue, deduplicatedList, { gzip: true, index: false }); + await JSONWriter(this.deduplicatedPath, hashValue, deduplicatedList, { + gzip: true, + index: false, + }); console.log("Wrote naturalized dataset"); } diff --git a/packages/static-wado-creator/lib/operation/ThumbnailService.js b/packages/static-wado-creator/lib/operation/ThumbnailService.js index d9552840..3daa1a33 100644 --- a/packages/static-wado-creator/lib/operation/ThumbnailService.js +++ b/packages/static-wado-creator/lib/operation/ThumbnailService.js @@ -56,8 +56,12 @@ class ThumbnailService { * @param {Object} programOpts */ queueThumbnail(thumbObjWrapper, programOpts) { - const { id, imageFrame, transcodedId, transcodedImageFrame, frameIndex } = thumbObjWrapper; - const getThumbContent = (originalContent, trancodedContent) => (shouldThumbUseTranscoded(id, programOpts) ? trancodedContent : originalContent); + const { id, imageFrame, transcodedId, transcodedImageFrame, frameIndex } = + thumbObjWrapper; + const getThumbContent = (originalContent, trancodedContent) => + shouldThumbUseTranscoded(id, programOpts) + ? trancodedContent + : originalContent; const thumbObj = { imageFrame: getThumbContent(imageFrame, transcodedImageFrame), @@ -77,7 +81,9 @@ class ThumbnailService { } ffmpeg(input, output) { - execSpawn(`ffmpeg -i "${input}" -vf "thumbnail,scale=640:360" -frames:v 1 -f singlejpeg "${output}"`); + execSpawn( + `ffmpeg -i "${input}" -vf "thumbnail,scale=640:360" -frames:v 1 -f singlejpeg "${output}"`, + ); } dcm2jpg(input, output, options) { @@ -116,10 +122,18 @@ class ThumbnailService { if (pixelData) { const { BulkDataURI } = pixelData; if (BulkDataURI?.indexOf("mp4")) { - fs.mkdirSync(`${itemId.sopInstanceRootPath}/rendered`, { recursive: true }); - const mp4Path = path.join(itemId.sopInstanceRootPath, "rendered/index.mp4"); + fs.mkdirSync(`${itemId.sopInstanceRootPath}/rendered`, { + recursive: true, + }); + const mp4Path = path.join( + itemId.sopInstanceRootPath, + "rendered/index.mp4", + ); // Generate as rendered, as more back ends support that. - const thumbPath = path.join(itemId.sopInstanceRootPath, "rendered/1.jpg"); + const thumbPath = path.join( + itemId.sopInstanceRootPath, + "rendered/1.jpg", + ); console.log("MP4 - converting video format", mp4Path); this.ffmpeg(mp4Path, thumbPath); return thumbPath; @@ -135,23 +149,41 @@ class ThumbnailService { } if (options.dcm2jpg) { - return this.dcm2jpg(id.filename, id.imageFrameRootPath.replace(/frames/, "thumbnail"), {}); + return this.dcm2jpg( + id.filename, + id.imageFrameRootPath.replace(/frames/, "thumbnail"), + {}, + ); } - await callback.internalGenerateImage(imageFrame, dataSet, metadata, id.transferSyntaxUid, async (thumbBuffer) => { - try { - if (thumbBuffer) { - await callback.thumbWriter(id.sopInstanceRootPath, this.thumbFileName, thumbBuffer); - - this.copySyncThumbnail(id.sopInstanceRootPath, id.seriesRootPath); - this.copySyncThumbnail(id.seriesRootPath, id.studyPath); - Stats.StudyStats.add("Thumbnail Write", `Write thumbnail ${this.thumbFileName}`, 100); + await callback.internalGenerateImage( + imageFrame, + dataSet, + metadata, + id.transferSyntaxUid, + async (thumbBuffer) => { + try { + if (thumbBuffer) { + await callback.thumbWriter( + id.sopInstanceRootPath, + this.thumbFileName, + thumbBuffer, + ); + + this.copySyncThumbnail(id.sopInstanceRootPath, id.seriesRootPath); + this.copySyncThumbnail(id.seriesRootPath, id.studyPath); + Stats.StudyStats.add( + "Thumbnail Write", + `Write thumbnail ${this.thumbFileName}`, + 100, + ); + } + return this.thumbFileName; + } catch (e) { + console.log("Couldn't generate thumbnail", this.thumbFileName, e); } - return this.thumbFileName; - } catch (e) { - console.log("Couldn't generate thumbnail", this.thumbFileName, e); - } - }); + }, + ); } /** @@ -165,7 +197,9 @@ class ThumbnailService { */ async copySyncThumbnail(sourceFolderPath, targetFolderPath) { const parentPathLevel = path.join(sourceFolderPath, "../"); - const thumbFilesPath = glob.sync(`${parentPathLevel}*/${this.thumbFileName}`); + const thumbFilesPath = glob.sync( + `${parentPathLevel}*/${this.thumbFileName}`, + ); const thumbIndex = getThumbIndex(thumbFilesPath.length); const thumbFilePath = thumbFilesPath[thumbIndex]; @@ -180,7 +214,10 @@ class ThumbnailService { throw new Error(`Target path: ${targetFolderPath} is not a directory`); } - fs.copyFileSync(thumbFilePath, `${targetFolderPath}/${this.thumbFileName}`); + fs.copyFileSync( + thumbFilePath, + `${targetFolderPath}/${this.thumbFileName}`, + ); } catch (e) { console.log("The file could not be copied", e); } diff --git a/packages/static-wado-creator/lib/operation/adapter/transcodeImage.js b/packages/static-wado-creator/lib/operation/adapter/transcodeImage.js index b2a37cfc..51af68fc 100644 --- a/packages/static-wado-creator/lib/operation/adapter/transcodeImage.js +++ b/packages/static-wado-creator/lib/operation/adapter/transcodeImage.js @@ -103,7 +103,9 @@ const transcodeSourceMap = { * @returns A partial transcoder definition. Otherwise it returns undefined. */ function getDestinationTranscoder(id) { - const destinationTranscoderEntry = Object.entries(transcodeDestinationMap).find(([key, value]) => key === id || value.transferSyntaxUid === id); + const destinationTranscoderEntry = Object.entries( + transcodeDestinationMap, + ).find(([key, value]) => key === id || value.transferSyntaxUid === id); if (destinationTranscoderEntry) { return destinationTranscoderEntry[1]; } @@ -116,8 +118,13 @@ function getDestinationTranscoder(id) { * @param {*} transferSyntaxUid * @returns */ -function getTranscoder(transferSyntaxUid, { contentType: greyContentType, colorContentType }, samplesPerPixel) { - const contentType = samplesPerPixel === 3 ? colorContentType : greyContentType; +function getTranscoder( + transferSyntaxUid, + { contentType: greyContentType, colorContentType }, + samplesPerPixel, +) { + const contentType = + samplesPerPixel === 3 ? colorContentType : greyContentType; const sourceTranscoder = transcodeSourceMap[transferSyntaxUid]; const destinationTranscoder = getDestinationTranscoder(contentType); if (!sourceTranscoder || !destinationTranscoder) { @@ -126,7 +133,8 @@ function getTranscoder(transferSyntaxUid, { contentType: greyContentType, colorC return { transferSyntaxUid: destinationTranscoder.transferSyntaxUid, - transcodeOp: sourceTranscoder.transcodeOp | destinationTranscoder.transcodeOp, // eslint-disable-line no-bitwise + transcodeOp: + sourceTranscoder.transcodeOp | destinationTranscoder.transcodeOp, alias: sourceTranscoder.alias, }; } @@ -148,12 +156,22 @@ function shouldTranscodeImageFrame(id, options, samplesPerPixel) { const transcoder = getTranscoder(transferSyntaxUid, options, samplesPerPixel); const validTranscoder = transcoder && transcoder.transferSyntaxUid; if (!validTranscoder) { - console.verbose("Not transcoding because no decoder found for", transferSyntaxUid, samplesPerPixel); + console.verbose( + "Not transcoding because no decoder found for", + transferSyntaxUid, + samplesPerPixel, + ); return false; } - const validRecompress = recompress.includes("true") || recompress.includes(transcoder.alias); + const validRecompress = + recompress.includes("true") || recompress.includes(transcoder.alias); if (!validRecompress) { - console.verbose("Not transcoding because recompress", recompress, "does not include", transcoder.alias); + console.verbose( + "Not transcoding because recompress", + recompress, + "does not include", + transcoder.alias, + ); return false; } @@ -176,7 +194,11 @@ function shouldThumbUseTranscoded(id, options) { const { transferSyntaxUid } = id; // Ignore the samples per pixel for thumbnails const transcoder = getTranscoder(transferSyntaxUid, options, "thumbnail"); - const result = transcoder && transcoder.transferSyntaxUid && options.recompress.includes(transcoder.alias) && options.recompressThumb.includes(transcoder.alias); + const result = + transcoder && + transcoder.transferSyntaxUid && + options.recompress.includes(transcoder.alias) && + options.recompressThumb.includes(transcoder.alias); return result; } @@ -200,7 +222,8 @@ const beforeEncode = (options, encoder) => { }; function scale(imageFrame, imageInfo) { - const { rows, columns, bitsPerPixel, pixelRepresentation, samplesPerPixel } = imageInfo; + const { rows, columns, bitsPerPixel, pixelRepresentation, samplesPerPixel } = + imageInfo; let ArrayConstructor = Float32Array; if (bitsPerPixel === 8) { ArrayConstructor = pixelRepresentation ? Int8Array : Uint8Array; @@ -218,7 +241,9 @@ function scale(imageFrame, imageInfo) { columns: Math.round(columns / 4), samplesPerPixel, }; - dest.pixelData = new ArrayConstructor(dest.rows * dest.columns * samplesPerPixel); + dest.pixelData = new ArrayConstructor( + dest.rows * dest.columns * samplesPerPixel, + ); replicate(src, dest); return { @@ -248,7 +273,10 @@ async function generateLossyImage(id, decoded, options) { let { imageFrame, imageInfo } = decoded; const lossyId = { ...id, - imageFrameRootPath: id.imageFrameRootPath.replace("frames", options.alternateName), + imageFrameRootPath: id.imageFrameRootPath.replace( + "frames", + options.alternateName, + ), transferSyntaxUid: transcodeDestinationMap.jhc.transferSyntaxUid, }; @@ -264,7 +292,8 @@ async function generateLossyImage(id, decoded, options) { } if (options.alternate === "jls") { - lossyId.transferSyntaxUid = transcodeDestinationMap["jls-lossy"].transferSyntaxUid; + lossyId.transferSyntaxUid = + transcodeDestinationMap["jls-lossy"].transferSyntaxUid; } else if (options.alternate === "jlsLossless") { lossyId.transferSyntaxUid = transcodeDestinationMap.jls.transferSyntaxUid; lossy = false; @@ -277,9 +306,19 @@ async function generateLossyImage(id, decoded, options) { lossy, }), }; - const lossyEncoding = await dicomCodec.encode(imageFrame, imageInfo, lossyId.transferSyntaxUid, encodeOptions); - console.log("Encoded alternate", lossyId.transferSyntaxUid, "of size", lossyEncoding.imageFrame.length); - // eslint-disable-next-line consistent-return + const lossyEncoding = await dicomCodec.encode( + imageFrame, + imageInfo, + lossyId.transferSyntaxUid, + encodeOptions, + ); + console.log( + "Encoded alternate", + lossyId.transferSyntaxUid, + "of size", + lossyEncoding.imageFrame.length, + ); + return { id: lossyId, imageFrame: lossyEncoding.imageFrame }; } catch (e) { console.warn("Unable to create alternate:", e); @@ -300,13 +339,23 @@ function isPalette(dataSet) { * @param {*} options runner options * @returns object result for transcoding operation with id and image frame. */ -async function transcodeImageFrame(id, targetIdSrc, imageFrame, dataSet, options = {}) { +async function transcodeImageFrame( + id, + targetIdSrc, + imageFrame, + dataSet, + options = {}, +) { let targetId = targetIdSrc; let result = {}; const samplesPerPixel = dataSet.uint16(Tags.RawSamplesPerPixel); const planarConfiguration = dataSet.uint16("x00280006"); - if (!shouldTranscodeImageFrame(id, options, samplesPerPixel) || planarConfiguration === 1 || isPalette(dataSet)) { + if ( + !shouldTranscodeImageFrame(id, options, samplesPerPixel) || + planarConfiguration === 1 || + isPalette(dataSet) + ) { console.verbose("Shouldn't transcode"); return { id, @@ -315,11 +364,22 @@ async function transcodeImageFrame(id, targetIdSrc, imageFrame, dataSet, options }; } - const transcoder = getTranscoder(id.transferSyntaxUid, options, samplesPerPixel); + const transcoder = getTranscoder( + id.transferSyntaxUid, + options, + samplesPerPixel, + ); // Don't transcode if not required - if (targetId.transferSyntaxUid !== transcoder.transferSyntaxUid && !options.forceTranscode) { - console.verbose("Image is already in", targetId.transferSyntaxUid, "not transcoding"); + if ( + targetId.transferSyntaxUid !== transcoder.transferSyntaxUid && + !options.forceTranscode + ) { + console.verbose( + "Image is already in", + targetId.transferSyntaxUid, + "not transcoding", + ); return { id, imageFrame, @@ -342,24 +402,56 @@ async function transcodeImageFrame(id, targetIdSrc, imageFrame, dataSet, options try { switch (transcoder.transcodeOp) { case transcodeOp.transcode: - transcodeLog(options, `Full transcoding image from \x1b[43m${id.transferSyntaxUid}\x1b[0m to \x1b[43m${targetId.transferSyntaxUid}\x1b[0m`); - - decoded = await dicomCodec.decode(imageFrame, imageInfo, id.transferSyntaxUid); - result = await dicomCodec.encode(decoded.imageFrame, decoded.imageInfo, targetId.transferSyntaxUid, encodeOptions); - - console.log("transcoded image to", targetId.transferSyntaxUid, "of size", result.imageFrame.length); + transcodeLog( + options, + `Full transcoding image from \x1b[43m${id.transferSyntaxUid}\x1b[0m to \x1b[43m${targetId.transferSyntaxUid}\x1b[0m`, + ); + + decoded = await dicomCodec.decode( + imageFrame, + imageInfo, + id.transferSyntaxUid, + ); + result = await dicomCodec.encode( + decoded.imageFrame, + decoded.imageInfo, + targetId.transferSyntaxUid, + encodeOptions, + ); + + console.log( + "transcoded image to", + targetId.transferSyntaxUid, + "of size", + result.imageFrame.length, + ); processResultMsg = `Transcoding finished`; break; case transcodeOp.encode: - transcodeLog(options, `Encoding image to \x1b[43m${targetId.transferSyntaxUid}\x1b[0m`); - - result = await dicomCodec.encode(imageFrame, imageInfo, targetId.transferSyntaxUid, encodeOptions); + transcodeLog( + options, + `Encoding image to \x1b[43m${targetId.transferSyntaxUid}\x1b[0m`, + ); + + result = await dicomCodec.encode( + imageFrame, + imageInfo, + targetId.transferSyntaxUid, + encodeOptions, + ); processResultMsg = `Encoding finished`; break; case transcodeOp.decode: - transcodeLog(options, `Decoding image from \x1b[43m${id.transferSyntaxUid}\x1b[0m`); - result = await dicomCodec.decode(imageFrame, imageInfo, id.transferSyntaxUid); + transcodeLog( + options, + `Decoding image from \x1b[43m${id.transferSyntaxUid}\x1b[0m`, + ); + result = await dicomCodec.decode( + imageFrame, + imageInfo, + id.transferSyntaxUid, + ); processResultMsg = `Decoding finished`; break; @@ -407,7 +499,11 @@ function transcodeId(id, options, samplesPerPixel) { } const targetId = { ...id }; - const { transferSyntaxUid } = getTranscoder(id.transferSyntaxUid, options, samplesPerPixel); + const { transferSyntaxUid } = getTranscoder( + id.transferSyntaxUid, + options, + samplesPerPixel, + ); targetId.transferSyntaxUid = transferSyntaxUid; @@ -436,7 +532,11 @@ function transcodeMetadata(metadata, id, options) { const result = { ...metadata }; if (result[Tags.AvailableTransferSyntaxUID]) { - Tags.setValue(result, Tags.AvailableTransferSyntaxUID, transcodedId.transferSyntaxUid); + Tags.setValue( + result, + Tags.AvailableTransferSyntaxUID, + transcodedId.transferSyntaxUid, + ); console.verbose("Apply available tsuid", transcodeId.transferSyntaxUid); } diff --git a/packages/static-wado-creator/lib/operation/extractImageFrames.js b/packages/static-wado-creator/lib/operation/extractImageFrames.js index 0320a9db..9c4a2ca1 100644 --- a/packages/static-wado-creator/lib/operation/extractImageFrames.js +++ b/packages/static-wado-creator/lib/operation/extractImageFrames.js @@ -5,7 +5,8 @@ const getUncompressedImageFrame = require("./getUncompressedImageFrame"); const getEncapsulatedImageFrame = require("./getEncapsulatedImageFrame"); const { isVideo } = require("../writer/VideoWriter"); -const areFramesAreFragmented = (attr, numberOfFrames) => attr.encapsulatedPixelData && numberOfFrames != attr.fragments.length; +const areFramesAreFragmented = (attr, numberOfFrames) => + attr.encapsulatedPixelData && numberOfFrames != attr.fragments.length; const getFrameSize = (dataSet) => { const rows = dataSet.uint16("x00280010"); @@ -30,11 +31,25 @@ const extractImageFrames = async (dataSet, attr, vr, callback) => { for (let frameIndex = 0; frameIndex < numberOfFrames; frameIndex++) { if (attr.encapsulatedPixelData) { - const compressedFrame = getEncapsulatedImageFrame(dataSet, attr, frameIndex, framesAreFragmented); + const compressedFrame = getEncapsulatedImageFrame( + dataSet, + attr, + frameIndex, + framesAreFragmented, + ); BulkDataURI = await callback.imageFrame(compressedFrame, { dataSet }); - Stats.OverallStats.add("Image Write", `Write image frame ${frameIndex + 1}`, 5000); + Stats.OverallStats.add( + "Image Write", + `Write image frame ${frameIndex + 1}`, + 5000, + ); } else { - const uncompressedFrame = getUncompressedImageFrame(dataSet, attr, frameIndex, uncompressedFrameSize); + const uncompressedFrame = getUncompressedImageFrame( + dataSet, + attr, + frameIndex, + uncompressedFrameSize, + ); BulkDataURI = await callback.imageFrame(uncompressedFrame, { dataSet }); } } diff --git a/packages/static-wado-creator/lib/operation/getDataSet.js b/packages/static-wado-creator/lib/operation/getDataSet.js index 53f6d574..d27503d1 100644 --- a/packages/static-wado-creator/lib/operation/getDataSet.js +++ b/packages/static-wado-creator/lib/operation/getDataSet.js @@ -13,14 +13,23 @@ const getValue = require("./getValue"); * @param {*} parentAttr Parent reference for sequence element tags. * @returns */ -async function getDataSet(dataSet, callback, optionsOrig, parentAttr = undefined) { +async function getDataSet( + dataSet, + callback, + optionsOrig, + parentAttr = undefined, +) { const metadata = {}; let options = optionsOrig; // iterate over dataSet attributes in order for (const tag in dataSet.elements) { // Raw versions have the x in front of them - if (tag != Tags.RawTransferSyntaxUID && tag >= Tags.RawMinTag && tag < Tags.RawFirstBodyTag) { + if ( + tag != Tags.RawTransferSyntaxUID && + tag >= Tags.RawMinTag && + tag < Tags.RawFirstBodyTag + ) { continue; } if (tag === Tags.RawSpecificCharacterSet) { @@ -28,21 +37,46 @@ async function getDataSet(dataSet, callback, optionsOrig, parentAttr = undefined options = { ...options, SpecificCharacterSet }; } const attr = dataSet.elements[tag]; - /* eslint-disable-next-line no-use-before-define */ - await attributeToJS(metadata, tag, dataSet, attr, callback, options, parentAttr); + + await attributeToJS( + metadata, + tag, + dataSet, + attr, + callback, + options, + parentAttr, + ); } if (metadata[Tags.TransferSyntaxUID]) { // console.log(`Found tsuid ${JSON.stringify(metadata[Tags.TransferSyntaxUID])} assigning to ${Tags.AvailableTransferSyntaxUID}`) - metadata[Tags.AvailableTransferSyntaxUID] = metadata[Tags.TransferSyntaxUID]; + metadata[Tags.AvailableTransferSyntaxUID] = + metadata[Tags.TransferSyntaxUID]; delete metadata[Tags.TransferSyntaxUID]; } return { metadata }; } -async function attributeToJS(metadataSrc, tag, dataSet, attr, callback, options, parentAttr) { +async function attributeToJS( + metadataSrc, + tag, + dataSet, + attr, + callback, + options, + parentAttr, +) { const metadata = metadataSrc; const vr = getVR(attr); - const value = await getValue(dataSet, attr, vr, getDataSet, callback, options, parentAttr); + const value = await getValue( + dataSet, + attr, + vr, + getDataSet, + callback, + options, + parentAttr, + ); const key = tag.substring(1).toUpperCase(); if (value === undefined || value === null || value.length === 0) { if (!vr) return; diff --git a/packages/static-wado-creator/lib/operation/getEncapsulatedImageFrame.js b/packages/static-wado-creator/lib/operation/getEncapsulatedImageFrame.js index fc03e483..4295d892 100644 --- a/packages/static-wado-creator/lib/operation/getEncapsulatedImageFrame.js +++ b/packages/static-wado-creator/lib/operation/getEncapsulatedImageFrame.js @@ -4,20 +4,44 @@ const dicomParser = require("dicom-parser"); * Function to deal with extracting an image frame from an encapsulated data set. */ -const getEncapsulatedImageFrame = (dataSet, attr, frameIndex, framesAreFragmented) => { - if (dataSet.elements.x7fe00010 && dataSet.elements.x7fe00010.basicOffsetTable.length) { +const getEncapsulatedImageFrame = ( + dataSet, + attr, + frameIndex, + framesAreFragmented, +) => { + if ( + dataSet.elements.x7fe00010 && + dataSet.elements.x7fe00010.basicOffsetTable.length + ) { // Basic Offset Table is not empty - return dicomParser.readEncapsulatedImageFrame(dataSet, dataSet.elements.x7fe00010, frameIndex); + return dicomParser.readEncapsulatedImageFrame( + dataSet, + dataSet.elements.x7fe00010, + frameIndex, + ); } // Empty basic offset table if (framesAreFragmented) { - const basicOffsetTable = dicomParser.createJPEGBasicOffsetTable(dataSet, dataSet.elements.x7fe00010); + const basicOffsetTable = dicomParser.createJPEGBasicOffsetTable( + dataSet, + dataSet.elements.x7fe00010, + ); - return dicomParser.readEncapsulatedImageFrame(dataSet, dataSet.elements.x7fe00010, frameIndex, basicOffsetTable); + return dicomParser.readEncapsulatedImageFrame( + dataSet, + dataSet.elements.x7fe00010, + frameIndex, + basicOffsetTable, + ); } - return dicomParser.readEncapsulatedPixelDataFromFragments(dataSet, dataSet.elements.x7fe00010, frameIndex); + return dicomParser.readEncapsulatedPixelDataFromFragments( + dataSet, + dataSet.elements.x7fe00010, + frameIndex, + ); }; module.exports = getEncapsulatedImageFrame; diff --git a/packages/static-wado-creator/lib/operation/getUncompressedImageFrame.js b/packages/static-wado-creator/lib/operation/getUncompressedImageFrame.js index c6c03ac5..3da6fe6d 100644 --- a/packages/static-wado-creator/lib/operation/getUncompressedImageFrame.js +++ b/packages/static-wado-creator/lib/operation/getUncompressedImageFrame.js @@ -1,6 +1,14 @@ -function getUncompressedImageFrame(dataSet, attr, frame, uncompressedFrameSize) { +function getUncompressedImageFrame( + dataSet, + attr, + frame, + uncompressedFrameSize, +) { const start = attr.dataOffset + frame * uncompressedFrameSize; - const binaryValue = dataSet.byteArray.slice(start, start + uncompressedFrameSize); + const binaryValue = dataSet.byteArray.slice( + start, + start + uncompressedFrameSize, + ); return binaryValue; } diff --git a/packages/static-wado-creator/lib/operation/getValue.js b/packages/static-wado-creator/lib/operation/getValue.js index eb3ded61..9bc4f4a2 100644 --- a/packages/static-wado-creator/lib/operation/getValue.js +++ b/packages/static-wado-creator/lib/operation/getValue.js @@ -32,7 +32,7 @@ const getValueInlineBinary = (dataSet, attr) => { if (attr.BulkDataURI) return { BulkDataURI: attr.BulkDataURI }; const binaryValue = dataSet.byteArray.slice( attr.dataOffset, - attr.dataOffset + attr.length + attr.dataOffset + attr.length, ); return { InlineBinary: binaryValue.toString("base64") }; }; @@ -194,7 +194,7 @@ const getValue = async ( getDataSet, callback, options, - parentAttr + parentAttr, ) => { // It will only process pixelData tag if on metadata root. Otherwise it will be skiped. if (attr.tag === "x7fe00010" && !parentAttr) { @@ -203,7 +203,7 @@ const getValue = async ( attr, vr, callback, - options + options, ); return { BulkDataURI }; } @@ -229,7 +229,7 @@ const getValue = async ( } const binaryValue = dataSet.byteArray.slice( attr.dataOffset, - attr.dataOffset + attr.length + attr.dataOffset + attr.length, ); const mimeType = attr.tag == "x00420011" && dataSet.string("x00420012"); diff --git a/packages/static-wado-creator/lib/program/index.js b/packages/static-wado-creator/lib/program/index.js index 4f0ef714..94997cd2 100644 --- a/packages/static-wado-creator/lib/program/index.js +++ b/packages/static-wado-creator/lib/program/index.js @@ -7,7 +7,10 @@ const staticWadoUtil = require("@radicalimaging/static-wado-util"); * @returns Program object */ async function configureProgram(defaults) { - const configurationFile = await staticWadoUtil.loadConfiguration(defaults, process.argv); + const configurationFile = await staticWadoUtil.loadConfiguration( + defaults, + process.argv, + ); console.log("Loaded configuration from", configurationFile); staticWadoUtil.configureCommands(defaults); } diff --git a/packages/static-wado-creator/lib/util/IdCreator.js b/packages/static-wado-creator/lib/util/IdCreator.js index 0eab72b8..386ab303 100644 --- a/packages/static-wado-creator/lib/util/IdCreator.js +++ b/packages/static-wado-creator/lib/util/IdCreator.js @@ -1,12 +1,33 @@ const path = require("path"); -function IdCreator({ directoryName, deduplicatedRoot, deduplicatedInstancesRoot }) { +function IdCreator({ + directoryName, + deduplicatedRoot, + deduplicatedInstancesRoot, +}) { return (uids, filename) => { - const studyPath = path.join(directoryName, "studies", uids.studyInstanceUid); - const seriesRootPath = path.join(studyPath, "series", uids.seriesInstanceUid); - const sopInstanceRootPath = path.join(studyPath, "series", uids.seriesInstanceUid, "instances", uids.sopInstanceUid); + const studyPath = path.join( + directoryName, + "studies", + uids.studyInstanceUid, + ); + const seriesRootPath = path.join( + studyPath, + "series", + uids.seriesInstanceUid, + ); + const sopInstanceRootPath = path.join( + studyPath, + "series", + uids.seriesInstanceUid, + "instances", + uids.sopInstanceUid, + ); const deduplicatedPath = path.join(deduplicatedRoot, uids.studyInstanceUid); - const deduplicatedInstancesPath = path.join(deduplicatedInstancesRoot, uids.studyInstanceUid); + const deduplicatedInstancesPath = path.join( + deduplicatedInstancesRoot, + uids.studyInstanceUid, + ); const imageFrameRootPath = path.join(sopInstanceRootPath, "frames"); return { diff --git a/packages/static-wado-creator/lib/util/adaptProgramOpts.js b/packages/static-wado-creator/lib/util/adaptProgramOpts.js index ac81ff1f..54570319 100644 --- a/packages/static-wado-creator/lib/util/adaptProgramOpts.js +++ b/packages/static-wado-creator/lib/util/adaptProgramOpts.js @@ -34,6 +34,6 @@ module.exports = function adaptProgramOpts(programOpts, defaults) { encapsulatedImage: encapsulatedImage ?? singlePartImage !== true, singlePartImage: singlePartImage ?? encapsulatedImage === false, delete: deleteInstances, - } + }, ); }; diff --git a/packages/static-wado-creator/lib/util/compressionOptionParser.js b/packages/static-wado-creator/lib/util/compressionOptionParser.js index a0099211..cbb6538f 100644 --- a/packages/static-wado-creator/lib/util/compressionOptionParser.js +++ b/packages/static-wado-creator/lib/util/compressionOptionParser.js @@ -1,13 +1,22 @@ const { program } = require("@radicalimaging/static-wado-util"); -const { getDestinationTranscoder } = require("../operation/adapter/transcodeImage"); +const { + getDestinationTranscoder, +} = require("../operation/adapter/transcodeImage"); module.exports = function compressionOptionParser(value) { const destination = getDestinationTranscoder(value); if (destination) { - console.log("Found destination transferSyntax ", destination.transferSyntaxUid, "for ", value); + console.log( + "Found destination transferSyntax ", + destination.transferSyntaxUid, + "for ", + value, + ); } else { console.log("No transcoder destination for ", value); - throw new program.InvalidArgumentError(`No transcoder destination for ${value}`); + throw new program.InvalidArgumentError( + `No transcoder destination for ${value}`, + ); } return destination.transferSyntaxUid; }; diff --git a/packages/static-wado-creator/lib/writer/CompleteStudyWriter.js b/packages/static-wado-creator/lib/writer/CompleteStudyWriter.js index 5aaf10ee..6fcb18b4 100644 --- a/packages/static-wado-creator/lib/writer/CompleteStudyWriter.js +++ b/packages/static-wado-creator/lib/writer/CompleteStudyWriter.js @@ -1,4 +1,8 @@ -const { JSONReader, JSONWriter, Stats } = require("@radicalimaging/static-wado-util"); +const { + JSONReader, + JSONWriter, + Stats, +} = require("@radicalimaging/static-wado-util"); const { Tags } = require("@radicalimaging/static-wado-util"); const StudyData = require("../operation/StudyData"); @@ -22,16 +26,26 @@ const CompleteStudyWriter = (options) => { if (options.isGroup) { if (studyData.dirty) { await studyData.writeDeduplicatedGroup(); - console.log("Wrote updated deduplicated data for study", studyData.studyInstanceUid); + console.log( + "Wrote updated deduplicated data for study", + studyData.studyInstanceUid, + ); } else { - console.log("Not writing new deduplicated data because it is clean:", studyData.studyInstanceUid); + console.log( + "Not writing new deduplicated data because it is clean:", + studyData.studyInstanceUid, + ); } await studyData.deleteInstancesReferenced(); } if (!options.isStudyData) { - console.verbose("Not configured to write study metadata", studyData.studyInstanceUid); - if (options.notifications) this.notificationService.notifyStudy(studyData.studyInstanceUid); + console.verbose( + "Not configured to write study metadata", + studyData.studyInstanceUid, + ); + if (options.notifications) + this.notificationService.notifyStudy(studyData.studyInstanceUid); delete this.studyData; Stats.StudyStats.summarize(); return; @@ -41,29 +55,40 @@ const CompleteStudyWriter = (options) => { if (!isDirtyMetadata) { console.log("Study metadata", studyData.studyInstanceUid, "is clean."); delete this.studyData; - Stats.StudyStats.summarize(`Study metadata ${studyData.studyInstanceUid} has clean metadata, not writing`); + Stats.StudyStats.summarize( + `Study metadata ${studyData.studyInstanceUid} has clean metadata, not writing`, + ); return; } console.log("Writing study metadata", studyData.studyInstanceUid); const studyQuery = await studyData.writeMetadata(); - const allStudies = await JSONReader(options.directoryName, "studies/index.json.gz", []); + const allStudies = await JSONReader( + options.directoryName, + "studies/index.json.gz", + [], + ); const studyUID = Tags.getValue(studyQuery, Tags.StudyInstanceUID); if (!studyUID) { console.error("studyQuery=", studyQuery); throw new Error("Study query has null studyUID"); } - const studyIndex = allStudies.findIndex((item) => Tags.getValue(item, Tags.StudyInstanceUID) == studyUID); + const studyIndex = allStudies.findIndex( + (item) => Tags.getValue(item, Tags.StudyInstanceUID) == studyUID, + ); if (studyIndex == -1) { allStudies.push(studyQuery); } else { allStudies[studyIndex] = studyQuery; } await JSONWriter(options.directoryName, "studies", allStudies); - if (options.notifications) this.notificationService.notifyStudy(studyData.studyInstanceUid); + if (options.notifications) + this.notificationService.notifyStudy(studyData.studyInstanceUid); delete this.studyData; - Stats.StudyStats.summarize(`Wrote study metadata/query files for ${studyData.studyInstanceUid}`); + Stats.StudyStats.summarize( + `Wrote study metadata/query files for ${studyData.studyInstanceUid}`, + ); } /** diff --git a/packages/static-wado-creator/lib/writer/DeduplicateWriter.js b/packages/static-wado-creator/lib/writer/DeduplicateWriter.js index 8f7b513e..2a2a1bd0 100644 --- a/packages/static-wado-creator/lib/writer/DeduplicateWriter.js +++ b/packages/static-wado-creator/lib/writer/DeduplicateWriter.js @@ -24,7 +24,8 @@ const DeduplicateWriter = (options) => const studyData = await this.completeStudy.getCurrentStudyData(this, id); if (options.isDeduplicate) { - if (options.verbose) console.log("Writing single instance", id.studyInstanceUid); + if (options.verbose) + console.log("Writing single instance", id.studyInstanceUid); await perInstanceWriter(id, data); } else if (options.verbose) { console.log("Not writing single instance deduplicated"); diff --git a/packages/static-wado-creator/lib/writer/ExpandUriPath.js b/packages/static-wado-creator/lib/writer/ExpandUriPath.js index 2a9ceffb..1513f155 100644 --- a/packages/static-wado-creator/lib/writer/ExpandUriPath.js +++ b/packages/static-wado-creator/lib/writer/ExpandUriPath.js @@ -8,7 +8,10 @@ const ExpandUriPath = (id, path, options) => { } expandedRelative = `studies/${id.studyInstanceUid}/${expandedRelativeSeries}`; } - if (options.verbose) console.log(`Expanded path returned: ${prependBulkDataUri}${expandedRelative}${path}`); + if (options.verbose) + console.log( + `Expanded path returned: ${prependBulkDataUri}${expandedRelative}${path}`, + ); return `${prependBulkDataUri}${expandedRelative}${path}`; }; diff --git a/packages/static-wado-creator/lib/writer/HashDataWriter.js b/packages/static-wado-creator/lib/writer/HashDataWriter.js index 987224aa..c9fcd2e6 100644 --- a/packages/static-wado-creator/lib/writer/HashDataWriter.js +++ b/packages/static-wado-creator/lib/writer/HashDataWriter.js @@ -43,7 +43,11 @@ const HashDataWriter = gzip, }); if (isRaw) { - await WriteMultipart(writeStream, [new MultipartHeader("Content-Type", "application/octet-stream")], rawData); + await WriteMultipart( + writeStream, + [new MultipartHeader("Content-Type", "application/octet-stream")], + rawData, + ); } else { await writeStream.write(rawData); } diff --git a/packages/static-wado-creator/lib/writer/ImageFrameWriter.js b/packages/static-wado-creator/lib/writer/ImageFrameWriter.js index f687844f..2746a047 100644 --- a/packages/static-wado-creator/lib/writer/ImageFrameWriter.js +++ b/packages/static-wado-creator/lib/writer/ImageFrameWriter.js @@ -18,29 +18,54 @@ const ImageFrameWriter = (options) => { } if (encapsulatedImage || !extension) { - const writeStream = WriteStream(id.imageFrameRootPath, `${1 + index}.mht`, { - gzip: type.gzip, - mkdir: true, - }); + const writeStream = WriteStream( + id.imageFrameRootPath, + `${1 + index}.mht`, + { + gzip: type.gzip, + mkdir: true, + }, + ); await WriteMultipart( writeStream, - [new MultipartHeader("Content-Type", type.contentType, [new MultipartAttribute("transfer-syntax", transferSyntaxUid)])], - content + [ + new MultipartHeader("Content-Type", type.contentType, [ + new MultipartAttribute("transfer-syntax", transferSyntaxUid), + ]), + ], + content, ); await writeStream.close(); - console.verbose("Wrote encapsulated image frame", id.sopInstanceUid, index + 1, type.contentType); + console.verbose( + "Wrote encapsulated image frame", + id.sopInstanceUid, + index + 1, + type.contentType, + ); } if (extension && singlePartImage) { - const writeStreamSingle = WriteStream(id.imageFrameRootPath, `${1 + index}${extension}`, { - gzip: type.gzip, - mkdir: true, - }); + const writeStreamSingle = WriteStream( + id.imageFrameRootPath, + `${1 + index}${extension}`, + { + gzip: type.gzip, + mkdir: true, + }, + ); await writeStreamSingle.write(content); await writeStreamSingle.close(); - console.verbose("Wrote single part image frame", id.sopInstanceUid, index + 1, extension); + console.verbose( + "Wrote single part image frame", + id.sopInstanceUid, + index + 1, + extension, + ); } const includeSeries = true; - return ExpandUriPath(id, `instances/${id.sopInstanceUid}/frames`, { includeSeries, ...options }); + return ExpandUriPath(id, `instances/${id.sopInstanceUid}/frames`, { + includeSeries, + ...options, + }); }; }; diff --git a/packages/static-wado-creator/lib/writer/RawDicomWriter.js b/packages/static-wado-creator/lib/writer/RawDicomWriter.js index 2ec5b62d..91f3a158 100644 --- a/packages/static-wado-creator/lib/writer/RawDicomWriter.js +++ b/packages/static-wado-creator/lib/writer/RawDicomWriter.js @@ -36,7 +36,7 @@ const RawDicomWriter = await WriteMultipart( writeStream, [new MultipartHeader("Content-Type", contentType, [])], - buffer + buffer, ); await writeStream.close(); }; diff --git a/packages/static-wado-creator/lib/writer/VideoWriter.js b/packages/static-wado-creator/lib/writer/VideoWriter.js index 84f673b5..11ae80a4 100644 --- a/packages/static-wado-creator/lib/writer/VideoWriter.js +++ b/packages/static-wado-creator/lib/writer/VideoWriter.js @@ -34,7 +34,7 @@ const VideoWriter = () => filename, { mkdir: true, - } + }, ); let length = 0; const { fragments } = dataSet.elements.x7fe00010; @@ -48,14 +48,14 @@ const VideoWriter = () => const fragment = fragments[i]; const blob = dataSet.byteArray.slice( fragment.position, - fragment.position + fragment.length + fragment.position + fragment.length, ); length += blob.length; await writeStream.write(blob); } await writeStream.close(); console.log( - `Done video ${id.sopInstanceRootPath}\\${filename} of length ${length}` + `Done video ${id.sopInstanceRootPath}\\${filename} of length ${length}`, ); return `series/${id.seriesInstanceUid}/instances/${id.sopInstanceUid}/rendered?length=${length}&offset=0&accept=video/mp4`; }; diff --git a/packages/static-wado-creator/lib/writer/WriteMultipart.js b/packages/static-wado-creator/lib/writer/WriteMultipart.js index a07b2398..959b506d 100644 --- a/packages/static-wado-creator/lib/writer/WriteMultipart.js +++ b/packages/static-wado-creator/lib/writer/WriteMultipart.js @@ -9,7 +9,9 @@ const WriteMultipart = async (writeStream, headers, content) => { if (header.attributes) { for (let j = 0; j < header.attributes.length; j++) { const attribute = header.attributes[j]; - await writeStream.write(`;${attribute.attributeName}=${attribute.attributeValue}`); + await writeStream.write( + `;${attribute.attributeName}=${attribute.attributeValue}`, + ); } } await writeStream.write("\r\n"); diff --git a/packages/static-wado-creator/lib/writer/WriteStream.js b/packages/static-wado-creator/lib/writer/WriteStream.js index 650ee057..92b62f75 100644 --- a/packages/static-wado-creator/lib/writer/WriteStream.js +++ b/packages/static-wado-creator/lib/writer/WriteStream.js @@ -11,17 +11,23 @@ let writeCount = 0; */ const WriteStream = (dir, nameSrc, options = {}) => { const isGzip = nameSrc.indexOf(".gz") != -1 || options.gzip; - const name = (isGzip && nameSrc.indexOf(".gz") === -1 && `${nameSrc}.gz`) || nameSrc; + const name = + (isGzip && nameSrc.indexOf(".gz") === -1 && `${nameSrc}.gz`) || nameSrc; if (options.mkdir) fs.mkdirSync(dir, { recursive: true }); - const tempName = path.join(dir, `tempFile-${Math.round(Math.random() * 1000000000)}`); + const tempName = path.join( + dir, + `tempFile-${Math.round(Math.random() * 1000000000)}`, + ); const finalName = path.join(dir, name); writeCount++; if (writeCount > 10) { console.log("Write count", tempName, finalName, writeCount); if (writeCount > 100) { console.error("Too many open writes", new Error()); - throw new Error(`Write count too high ${writeCount} destination ${finalName}`); + throw new Error( + `Write count too high ${writeCount} destination ${finalName}`, + ); } } const rawStream = fs.createWriteStream(tempName); diff --git a/packages/static-wado-creator/tests/e2e/index.test.js b/packages/static-wado-creator/tests/e2e/index.test.js index 45684f0c..87ab9f67 100644 --- a/packages/static-wado-creator/tests/e2e/index.test.js +++ b/packages/static-wado-creator/tests/e2e/index.test.js @@ -22,7 +22,10 @@ describe("index", () => { let objJuno; function assertExists(fileOrDir, exists = true) { - must(fs.existsSync(fileOrDir), `File ${fileOrDir} ${exists ? "does not" : ""} exist`).be.eql(exists); + must( + fs.existsSync(fileOrDir), + `File ${fileOrDir} ${exists ? "does not" : ""} exist`, + ).be.eql(exists); } beforeEach(() => { @@ -47,7 +50,7 @@ describe("index", () => { return; } console.log(`stdout: ${stdout}`); - } + }, ); processes.createJuno = true; metadataJuno = await JSONReader(junoSeriesDir, "metadata.gz"); diff --git a/packages/static-wado-creator/tests/unit/getValue.test.js b/packages/static-wado-creator/tests/unit/getValue.test.js index cba75c70..69af194e 100644 --- a/packages/static-wado-creator/tests/unit/getValue.test.js +++ b/packages/static-wado-creator/tests/unit/getValue.test.js @@ -6,10 +6,14 @@ const UN = "UN"; describe("getValue", () => { it("gets undefined for grouplength", async () => { - must(await getValue(dataSet, { tag: "x00090000", Value: [1] }, UN)).be.undefined(); + must( + await getValue(dataSet, { tag: "x00090000", Value: [1] }, UN), + ).be.undefined(); }); it("gets undefined for item end", async () => { - must(await getValue(dataSet, { tag: "xfffee00d", Value: [1] }, UN)).be.undefined(); + must( + await getValue(dataSet, { tag: "xfffee00d", Value: [1] }, UN), + ).be.undefined(); }); }); diff --git a/packages/static-wado-plugins/lib/index.js b/packages/static-wado-plugins/lib/index.js index 41a09c9d..c5d1d242 100644 --- a/packages/static-wado-plugins/lib/index.js +++ b/packages/static-wado-plugins/lib/index.js @@ -8,9 +8,12 @@ const plugins = ConfigPoint.getConfig("plugins"); * Define all the local plugins here, so they can be loaded dynamically. */ ConfigPoint.extendConfiguration("plugins", { - readSeriesIndex: "@radicalimaging/static-wado-plugins/lib/readSeriesIndex.plugin.js", - studiesQueryByIndex: "@radicalimaging/static-wado-plugins/lib/studiesQueryByIndex.plugin.js", - studiesQueryToScp: "@radicalimaging/static-wado-plugins/lib/studiesQueryToScp.plugin.js", + readSeriesIndex: + "@radicalimaging/static-wado-plugins/lib/readSeriesIndex.plugin.js", + studiesQueryByIndex: + "@radicalimaging/static-wado-plugins/lib/studiesQueryByIndex.plugin.js", + studiesQueryToScp: + "@radicalimaging/static-wado-plugins/lib/studiesQueryToScp.plugin.js", // The point of plugins is that they can be lazy loaded, so no need to load s3 if not being used. s3Plugin: "@radicalimaging/s3-deploy/s3.plugin.mjs", }); diff --git a/packages/static-wado-plugins/lib/readSeriesIndex.plugin.js b/packages/static-wado-plugins/lib/readSeriesIndex.plugin.js index 162da57c..cddf43b8 100644 --- a/packages/static-wado-plugins/lib/readSeriesIndex.plugin.js +++ b/packages/static-wado-plugins/lib/readSeriesIndex.plugin.js @@ -5,7 +5,10 @@ const { readSeriesIndex } = ConfigPoint.register({ readSeriesIndex: { generator: (params) => (studyInstanceUID) => { console.log("Retrieve series", studyInstanceUID, "in", params.rootDir); - return JSONReader(params.rootDir, `studies/{studyInstanceUID}/series/index.json.gz`); + return JSONReader( + params.rootDir, + `studies/{studyInstanceUID}/series/index.json.gz`, + ); }, }, }); diff --git a/packages/static-wado-plugins/lib/studiesQueryToScp.plugin.js b/packages/static-wado-plugins/lib/studiesQueryToScp.plugin.js index 8dd49109..e8d73077 100644 --- a/packages/static-wado-plugins/lib/studiesQueryToScp.plugin.js +++ b/packages/static-wado-plugins/lib/studiesQueryToScp.plugin.js @@ -12,7 +12,10 @@ const { studiesQueryByIndex } = ConfigPoint.register({ const { queryAe, callingAe = "SCU", staticWadoAe } = params; // asserts there is queryAe value and definition, otherwise throws an exception. - assertions.assertAeDefinition(params, "queryAe", ["queryAe not specified", `No data for aeConfig.${queryAe} is configured in ${Object.keys(aeConfig)}`]); + assertions.assertAeDefinition(params, "queryAe", [ + "queryAe not specified", + `No data for aeConfig.${queryAe} is configured in ${Object.keys(aeConfig)}`, + ]); const aeData = aeConfig[queryAe]; const { host, port } = aeData; @@ -35,10 +38,15 @@ const { studiesQueryByIndex } = ConfigPoint.register({ const status = response.getStatus(); if (status === Status.Pending && response.hasDataset()) { const dataset = response.getDataset(); - if (params.verbose) console.log("Adding result", dataset.elements.StudyInstanceUID); + if (params.verbose) + console.log("Adding result", dataset.elements.StudyInstanceUID); queryList.push(dataset); } else if (status === Status.Success) { - console.log("SCP Study Query success with", queryList.length, "items"); + console.log( + "SCP Study Query success with", + queryList.length, + "items", + ); resolve(queryList); } else if (status === Status.Pending) { console.log("Pending..."); @@ -53,7 +61,13 @@ const { studiesQueryByIndex } = ConfigPoint.register({ reject(e); }); client.send(host, port, callingAe || staticWadoAe, queryAe); - console.log("Sending client request", host, port, callingAe || staticWadoAe, queryAe); + console.log( + "Sending client request", + host, + port, + callingAe || staticWadoAe, + queryAe, + ); }); }; }, diff --git a/packages/static-wado-plugins/lib/web-proxy/index.js b/packages/static-wado-plugins/lib/web-proxy/index.js index 35b7225c..9886b57b 100644 --- a/packages/static-wado-plugins/lib/web-proxy/index.js +++ b/packages/static-wado-plugins/lib/web-proxy/index.js @@ -5,6 +5,9 @@ module.exports = ConfigPoint.createConfiguration("webProxy", { setRoute: (router, item) => { const forwardPath = item.forwardPath || "http://localhost:3000"; console.log("Web Proxy to", forwardPath); - router.get("/*", createProxyMiddleware({ target: forwardPath, changeOrigin: true })); + router.get( + "/*", + createProxyMiddleware({ target: forwardPath, changeOrigin: true }), + ); }, }); diff --git a/packages/static-wado-scp/lib/DcmjsDimseScp.js b/packages/static-wado-scp/lib/DcmjsDimseScp.js index 19d5adf5..6ab617ce 100644 --- a/packages/static-wado-scp/lib/DcmjsDimseScp.js +++ b/packages/static-wado-scp/lib/DcmjsDimseScp.js @@ -70,7 +70,7 @@ class DcmjsDimseScp extends Scp { ) { const transferSyntaxes = context.getTransferSyntaxUids(); const transferSyntax = PreferredTransferSyntax.find((tsuid) => - transferSyntaxes.find((contextTsuid) => contextTsuid === tsuid) + transferSyntaxes.find((contextTsuid) => contextTsuid === tsuid), ); if (transferSyntax) { context.setResult(PresentationContextResult.Accept, transferSyntax); @@ -79,19 +79,19 @@ class DcmjsDimseScp extends Scp { "Rejected syntax", context.getAbstractSyntaxUid(), "because no transfer syntax found in", - transferSyntaxes + transferSyntaxes, ); context.setResult( - PresentationContextResult.RejectTransferSyntaxesNotSupported + PresentationContextResult.RejectTransferSyntaxesNotSupported, ); } } else { console.log( "Not supported abstract syntax", - context.getAbstractSyntaxUid() + context.getAbstractSyntaxUid(), ); context.setResult( - PresentationContextResult.RejectAbstractSyntaxNotSupported + PresentationContextResult.RejectAbstractSyntaxNotSupported, ); } }); @@ -103,7 +103,7 @@ class DcmjsDimseScp extends Scp { } // Handle incoming C-ECHO requests - /* eslint-disable-next-line class-methods-use-this */ + cEchoRequest(request, callback) { const response = CEchoResponse.fromRequest(request); response.setStatus(Status.Success); diff --git a/packages/static-wado-scp/lib/dicomWebScpConfig.js b/packages/static-wado-scp/lib/dicomWebScpConfig.js index 000a0ab2..12414e8e 100644 --- a/packages/static-wado-scp/lib/dicomWebScpConfig.js +++ b/packages/static-wado-scp/lib/dicomWebScpConfig.js @@ -16,11 +16,13 @@ const { dicomWebScpConfig } = ConfigPoint.register({ maximumInlinePrivateLength: 64, maximumInlinePublicLength: 128 * 1024 + 2, helpShort: "dicomwebscp", - helpDescription: "Creates server to receive data on DIMSE and store it DICOM", + helpDescription: + "Creates server to receive data on DIMSE and store it DICOM", options: [ { key: "-c, --clean", - description: "Clean the outputs before generating/starting to write new values.", + description: + "Clean the outputs before generating/starting to write new values.", defaultValue: false, }, { diff --git a/packages/static-wado-scp/lib/loadPlugins.js b/packages/static-wado-scp/lib/loadPlugins.js index 9b2792df..f68c86de 100644 --- a/packages/static-wado-scp/lib/loadPlugins.js +++ b/packages/static-wado-scp/lib/loadPlugins.js @@ -12,7 +12,7 @@ const loadPlugins = (options) => { }) .catch((reason) => { console.log("Unable to load plugin because", reason); - // eslint-disable-next-line no-process-exit + process.exit(-1); }); }; diff --git a/packages/static-wado-scp/lib/program/index.js b/packages/static-wado-scp/lib/program/index.js index 1d4c8057..7f274ac5 100644 --- a/packages/static-wado-scp/lib/program/index.js +++ b/packages/static-wado-scp/lib/program/index.js @@ -8,7 +8,10 @@ const loadPlugins = require("../loadPlugins"); * @returns Program object */ async function configureProgram(defaults) { - const configurationFile = await staticWadoUtil.loadConfiguration(defaults, process.argv); + const configurationFile = await staticWadoUtil.loadConfiguration( + defaults, + process.argv, + ); console.log("Loaded configuration from", configurationFile); loadPlugins(defaults); staticWadoUtil.configureCommands(defaults); diff --git a/packages/static-wado-util/lib/NotificationService.js b/packages/static-wado-util/lib/NotificationService.js index 17a04054..0f36698f 100644 --- a/packages/static-wado-util/lib/NotificationService.js +++ b/packages/static-wado-util/lib/NotificationService.js @@ -89,7 +89,7 @@ class NotificationService { StudyInstanceUID: studyUID, action: options.action || "update", }, - studyUID + studyUID, ); } } diff --git a/packages/static-wado-util/lib/assertions/assertAeDefinition.js b/packages/static-wado-util/lib/assertions/assertAeDefinition.js index c67f3e9c..731e80ac 100644 --- a/packages/static-wado-util/lib/assertions/assertAeDefinition.js +++ b/packages/static-wado-util/lib/assertions/assertAeDefinition.js @@ -11,7 +11,11 @@ const aeConfig = require("../aeConfig"); * @param {*} errorMessages array of error messages for missing ae and missing ae def. Where first index is error string for missing ae and second index is error message for missing ae def. * @returns boolean */ -module.exports = function assertAeDefinition(params, aeStr, errorMessages = []) { +module.exports = function assertAeDefinition( + params, + aeStr, + errorMessages = [], +) { const aeValue = params[aeStr]; const [errorAeMessage, errorAeDefMessage] = errorMessages; diff --git a/packages/static-wado-util/lib/asyncIterableToBuffer.js b/packages/static-wado-util/lib/asyncIterableToBuffer.js index 5f31e3a2..93bd01f4 100644 --- a/packages/static-wado-util/lib/asyncIterableToBuffer.js +++ b/packages/static-wado-util/lib/asyncIterableToBuffer.js @@ -36,7 +36,8 @@ const StreamingFunctions = { index_get(ikey) { const found = this.findChunk(ikey); - if (!found) throw Error(`index ${ikey} not found between 0..${this.combinedLength}`); + if (!found) + throw Error(`index ${ikey} not found between 0..${this.combinedLength}`); return found[ikey - found.start]; }, @@ -79,7 +80,8 @@ const StreamingFunctions = { */ copy(target, targetStart = 0, srcStart = 0, srcEnd = 0) { const { length } = target; - const srcLength = (srcEnd === undefined && Math.min(this.length, srcEnd)) || this.length; + const srcLength = + (srcEnd === undefined && Math.min(this.length, srcEnd)) || this.length; const copied = 0; while (targetStart < length && srcStart < srcLength) { target[targetStart] = this[srcStart]; @@ -108,7 +110,11 @@ const asyncIteratorToBuffer = async (readable) => { const chunks = []; for await (const chunk of readable) { chunks.push(chunk); - Stats.BufferStats.add("Read Async", `Read async buffer ${chunks.length}`, 65536); + Stats.BufferStats.add( + "Read Async", + `Read async buffer ${chunks.length}`, + 65536, + ); } Stats.BufferStats.reset(); return StreamingBuffer(chunks); diff --git a/packages/static-wado-util/lib/dictionary/Tags.js b/packages/static-wado-util/lib/dictionary/Tags.js index a784f479..109d521d 100644 --- a/packages/static-wado-util/lib/dictionary/Tags.js +++ b/packages/static-wado-util/lib/dictionary/Tags.js @@ -1,12 +1,14 @@ const dcmjs = require("dcmjs"); const dataDictionary = require("./dataDictionary"); -const { naturalizeDataset, denaturalizeDataset } = dcmjs.data.DicomMetaDictionary; +const { naturalizeDataset, denaturalizeDataset } = + dcmjs.data.DicomMetaDictionary; /** Find the actual tag for a private value */ const findPrivate = (item, tagObject, create) => { if (typeof tagObject === "string") return tagObject; - if (typeof tagObject === "number") return `00000000${tagObject.toString(16)}`.slice(-8); + if (typeof tagObject === "number") + return `00000000${tagObject.toString(16)}`.slice(-8); const { creator, tag } = tagObject; if (!creator) return tag; const start = tag.substring(0, 4); @@ -16,13 +18,17 @@ const findPrivate = (item, tagObject, create) => { for (let offset = 0x10; offset < 0x40; offset++) { const testTag = `${start}00${offset.toString(16)}`; const testCreator = item[testTag]; - if (testCreator === undefined && assignPosition === undefined) assignPosition = offset; + if (testCreator === undefined && assignPosition === undefined) + assignPosition = offset; if (testCreator && testCreator.Value && testCreator.Value[0] === creator) { return `${start}${offset.toString(16)}${end}`; } } if (create) { - if (!assignPosition) throw new Error(`Couldn't find any assign positions for ${creator} ${tag} in ${item}`); + if (!assignPosition) + throw new Error( + `Couldn't find any assign positions for ${creator} ${tag} in ${item}`, + ); const creatorTag = `${start}00${assignPosition.toString(16)}`; item[creatorTag] = { Value: [creator], vr: "CS" }; return `${start}${assignPosition.toString(16)}${end}`; diff --git a/packages/static-wado-util/lib/endsWith.js b/packages/static-wado-util/lib/endsWith.js index df3da631..f66b681f 100644 --- a/packages/static-wado-util/lib/endsWith.js +++ b/packages/static-wado-util/lib/endsWith.js @@ -1,3 +1,4 @@ -const endsWith = (str, end) => str.length >= end.length && str.substring(str.length - end.length) === end; +const endsWith = (str, end) => + str.length >= end.length && str.substring(str.length - end.length) === end; module.exports = endsWith; diff --git a/packages/static-wado-util/lib/handleHomeRelative.js b/packages/static-wado-util/lib/handleHomeRelative.js index c68e6dd6..c69ba276 100644 --- a/packages/static-wado-util/lib/handleHomeRelative.js +++ b/packages/static-wado-util/lib/handleHomeRelative.js @@ -1,6 +1,7 @@ const homedir = require("os").homedir(); const path = require("path"); -const handleHomeRelative = (dirName) => (dirName[0] == "~" ? path.join(homedir, dirName.substring(1)) : dirName); +const handleHomeRelative = (dirName) => + dirName[0] == "~" ? path.join(homedir, dirName.substring(1)) : dirName; module.exports = handleHomeRelative; diff --git a/packages/static-wado-util/lib/image/bilinear.js b/packages/static-wado-util/lib/image/bilinear.js index d59f7fa5..19f6dc9b 100644 --- a/packages/static-wado-util/lib/image/bilinear.js +++ b/packages/static-wado-util/lib/image/bilinear.js @@ -25,7 +25,10 @@ function bilinear(src, dest) { const ySrc = (y * (srcRows - 1)) / (rows - 1); const ySrc1Off = Math.floor(ySrc) * srcColumns; // Get the second offset, but duplicate the last row so the lookup works - const ySrc2Off = Math.min(ySrc1Off + srcColumns, (srcRows - 1) * srcColumns); + const ySrc2Off = Math.min( + ySrc1Off + srcColumns, + (srcRows - 1) * srcColumns, + ); const yFrac = ySrc - Math.floor(ySrc); const yFracInv = 1 - yFrac; const yOff = y * columns; @@ -43,7 +46,9 @@ function bilinear(src, dest) { // console.log("values", p00, p10, p01, p11); // console.log("fractions", xFracInv, xFrac[x], yFracInv, yFrac); - pixelData[yOff + x] = (p00 * xFracInv + p10 * xFrac[x]) * yFracInv + (p01 * xFracInv + p11 * xFrac[x]) * yFrac; + pixelData[yOff + x] = + (p00 * xFracInv + p10 * xFrac[x]) * yFracInv + + (p01 * xFracInv + p11 * xFrac[x]) * yFrac; } } return pixelData; @@ -51,7 +56,12 @@ function bilinear(src, dest) { /** Handle replicate scaling. Use this function for samplesPerPixel>1 */ function replicate(src, dest) { - const { rows: srcRows, columns: srcColumns, pixelData: srcData, samplesPerPixel = 1 } = src; + const { + rows: srcRows, + columns: srcColumns, + pixelData: srcData, + samplesPerPixel = 1, + } = src; const { rows, columns, pixelData } = dest; const xSrc1Off = []; diff --git a/packages/static-wado-util/lib/loadConfiguration.js b/packages/static-wado-util/lib/loadConfiguration.js index 75869541..f35ab797 100644 --- a/packages/static-wado-util/lib/loadConfiguration.js +++ b/packages/static-wado-util/lib/loadConfiguration.js @@ -19,10 +19,15 @@ const getConfigurationFile = (args, defValue) => { */ module.exports = (defaults, argvSrc) => { const args = argvSrc || process.argv || []; - const configurationFile = getConfigurationFile(args, defaults.configurationFile); - if (!configurationFile || configurationFile === "false") return Promise.resolve(); + const configurationFile = getConfigurationFile( + args, + defaults.configurationFile, + ); + if (!configurationFile || configurationFile === "false") + return Promise.resolve(); - const configurationFiles = (Array.isArray(configurationFile) && configurationFile) || [configurationFile]; + const configurationFiles = (Array.isArray(configurationFile) && + configurationFile) || [configurationFile]; for (const configFile of configurationFiles) { const filename = handleHomeRelative(configFile); if (fs.existsSync(filename)) { diff --git a/packages/static-wado-util/lib/program/index.js b/packages/static-wado-util/lib/program/index.js index 431ae06e..53605608 100644 --- a/packages/static-wado-util/lib/program/index.js +++ b/packages/static-wado-util/lib/program/index.js @@ -4,7 +4,11 @@ const loadConfiguration = require("../loadConfiguration"); function configureBaseProgram(configuration) { const { helpDescription, helpShort } = configuration; - program.name(helpShort).configureHelp({ sortOptions: true }).addHelpText("beforeAll", helpDescription).addHelpCommand(); + program + .name(helpShort) + .configureHelp({ sortOptions: true }) + .addHelpText("beforeAll", helpDescription) + .addHelpCommand(); return program; } @@ -12,7 +16,14 @@ function configureBaseProgram(configuration) { const addOptions = (cmd, options) => { if (options) { options.forEach((optionConfig) => { - const { key, description, defaultValue, choices, isRequired, customParser } = optionConfig; + const { + key, + description, + defaultValue, + choices, + isRequired, + customParser, + } = optionConfig; const option = cmd.createOption(key, description); option.default(defaultValue); if (customParser) { @@ -31,7 +42,6 @@ const addOptions = (cmd, options) => { }; function createVerboseLog(verbose /* , options */) { - // eslint-disable-next-line no-shadow-restricted-names console.verbose = (...args) => { if (!verbose) return; console.log(...args); @@ -43,8 +53,16 @@ function configureCommands(config, configurationFile) { createVerboseLog(false); for (const item of programsDefinition) { - const { command, helpDescription, main, isDefault, options: subOptions } = item; - const cmdConfig = program.command(command, { isDefault }).description(helpDescription); + const { + command, + helpDescription, + main, + isDefault, + options: subOptions, + } = item; + const cmdConfig = program + .command(command, { isDefault }) + .description(helpDescription); cmdConfig.action((...args) => { createVerboseLog(args[args.length - 2].verbose); main.call(config, ...args, configurationFile); @@ -63,7 +81,13 @@ function configureCommands(config, configurationFile) { * @returns Program object */ function configureProgram(configuration) { - const { argumentsRequired = [], optionsRequired = [], argumentsList = [], optionsList = [], packageJson = {} } = configuration; + const { + argumentsRequired = [], + optionsRequired = [], + argumentsList = [], + optionsList = [], + packageJson = {}, + } = configuration; createVerboseLog(false); program.version(packageJson.version); @@ -80,7 +104,8 @@ function configureProgram(configuration) { optionsList.push({ key: "-c, --configuration ", - description: "Sets the base configurationfile, defaults to static-wado.json5 located in the current directory or in user home directory", + description: + "Sets the base configurationfile, defaults to static-wado.json5 located in the current directory or in user home directory", }); // iterate over option list and set to program @@ -89,7 +114,10 @@ function configureProgram(configuration) { option.default(defaultValue); - if (optionsRequired.includes(option.short) || optionsRequired.includes(option.long)) { + if ( + optionsRequired.includes(option.short) || + optionsRequired.includes(option.long) + ) { option.makeOptionMandatory(); } diff --git a/packages/static-wado-util/lib/qidoFilter.js b/packages/static-wado-util/lib/qidoFilter.js index eeb9bb37..4f99a1fd 100644 --- a/packages/static-wado-util/lib/qidoFilter.js +++ b/packages/static-wado-util/lib/qidoFilter.js @@ -42,7 +42,10 @@ const compareValues = (desired, actualSrc) => { return actual.indexOf(desired.substring(0, desired.length - 1)) != -1; } if (desired[0] === "*") { - return actual.indexOf(desired.substring(1)) === actual.length - desired.length + 1; + return ( + actual.indexOf(desired.substring(1)) === + actual.length - desired.length + 1 + ); } } return desired === actual; @@ -73,7 +76,8 @@ const filterItem = (key, queryParams, study) => { if (!testValue) return true; const valueElem = study[key] || study[altKey]; if (!valueElem) return false; - if (valueElem.vr == "DA") return compareDateRange(testValue, valueElem.Value[0]); + if (valueElem.vr == "DA") + return compareDateRange(testValue, valueElem.Value[0]); const value = valueElem.Value ?? valueElem; return !!compareValues(testValue, value); }; diff --git a/packages/static-wado-util/lib/reader/JSONReader.js b/packages/static-wado-util/lib/reader/JSONReader.js index 74ba1f89..34a260b2 100644 --- a/packages/static-wado-util/lib/reader/JSONReader.js +++ b/packages/static-wado-util/lib/reader/JSONReader.js @@ -27,8 +27,17 @@ const JSONReader = async (dirSrc, name, def) => { }; /** Calls the JSON reader on the path appropriate for the given hash data */ -JSONReader.readHashData = async (studyDir, hashValue, extension = ".json.gz") => { - const hashPath = path.join(studyDir, "bulkdata", hashValue.substring(0, 3), hashValue.substring(3, 5)); +JSONReader.readHashData = async ( + studyDir, + hashValue, + extension = ".json.gz", +) => { + const hashPath = path.join( + studyDir, + "bulkdata", + hashValue.substring(0, 3), + hashValue.substring(3, 5), + ); Stats.StudyStats.add("Read Hash Data", "Read hash data", 100); return JSONReader(hashPath, hashValue.substring(5) + extension); }; diff --git a/packages/static-wado-util/lib/reader/NDJSONReader.js b/packages/static-wado-util/lib/reader/NDJSONReader.js index 78d19952..dabce3be 100644 --- a/packages/static-wado-util/lib/reader/NDJSONReader.js +++ b/packages/static-wado-util/lib/reader/NDJSONReader.js @@ -8,7 +8,9 @@ const NDJSONReader = async (dirSrc, name, def) => { return new Promise((resolve) => { try { const ret = []; - const stream = fs.createReadStream(path.join(dir, name)).pipe(ndjson.parse()); + const stream = fs + .createReadStream(path.join(dir, name)) + .pipe(ndjson.parse()); stream.on("data", (it) => ret.push(it)); stream.on("end", () => resolve(ret)); } catch (err) { diff --git a/packages/static-wado-util/lib/reader/dirScanner.js b/packages/static-wado-util/lib/reader/dirScanner.js index b587b77d..f63840b4 100644 --- a/packages/static-wado-util/lib/reader/dirScanner.js +++ b/packages/static-wado-util/lib/reader/dirScanner.js @@ -18,12 +18,16 @@ async function dirScanner(input, options) { if (options.recursive !== false) { await dirScanner( names.map((dirFile) => `${file}/${dirFile}`), - options + options, ); } else { for (let j = 0; j < names.length; j++) { const name = names[j]; - if (!options.matchList || options.matchList.length == 0 || options.matchList.contains(name)) { + if ( + !options.matchList || + options.matchList.length == 0 || + options.matchList.contains(name) + ) { await options.callback(file, name); } } diff --git a/packages/static-wado-util/lib/update/configDiff.js b/packages/static-wado-util/lib/update/configDiff.js index e07df6a0..e83fae27 100644 --- a/packages/static-wado-util/lib/update/configDiff.js +++ b/packages/static-wado-util/lib/update/configDiff.js @@ -23,6 +23,7 @@ const diffObject = (update, base) => { * Performs a difference in configuration between the provide value and the default static-wado-config, retaining only values * which are different from the base configuration, recursively. */ -const configDiff = (newConfig) => diffObject(newConfig, staticWadoConfig.configBase); +const configDiff = (newConfig) => + diffObject(newConfig, staticWadoConfig.configBase); module.exports = configDiff; diff --git a/packages/static-wado-util/lib/writer/JSONWriter.js b/packages/static-wado-util/lib/writer/JSONWriter.js index dd315de6..bcdd6291 100644 --- a/packages/static-wado-util/lib/writer/JSONWriter.js +++ b/packages/static-wado-util/lib/writer/JSONWriter.js @@ -4,8 +4,15 @@ const { Stats } = require("../stats"); const WriteStream = require("./WriteStream"); /** Writes out JSON files to the given file name. Automatically GZips them, and adds the extension */ -const JSONWriter = async (dirSrc, name, data, options = { gzip: true, brotli: false, index: true }) => { - const fileName = options.index ? "index.json.gz" : name + ((options.gzip && ".gz") || (options.brotli && ".br") || ""); +const JSONWriter = async ( + dirSrc, + name, + data, + options = { gzip: true, brotli: false, index: true }, +) => { + const fileName = options.index + ? "index.json.gz" + : name + ((options.gzip && ".gz") || (options.brotli && ".br") || ""); const dir = handleHomeRelative(dirSrc); const dirName = options.index ? path.join(dir, name) : dir; const writeStream = WriteStream(dirName, fileName, { diff --git a/packages/static-wado-util/lib/writer/WriteStream.js b/packages/static-wado-util/lib/writer/WriteStream.js index 52f47a4e..ff744dfb 100644 --- a/packages/static-wado-util/lib/writer/WriteStream.js +++ b/packages/static-wado-util/lib/writer/WriteStream.js @@ -9,10 +9,14 @@ const path = require("path"); */ const WriteStream = (dir, nameSrc, options = {}) => { const isGzip = nameSrc.indexOf(".gz") != -1 || options.gzip; - const name = (isGzip && nameSrc.indexOf(".gz") === -1 && `${nameSrc}.gz`) || nameSrc; + const name = + (isGzip && nameSrc.indexOf(".gz") === -1 && `${nameSrc}.gz`) || nameSrc; if (options.mkdir) fs.mkdirSync(dir, { recursive: true }); - const tempName = path.join(dir, `tempFile-${Math.round(Math.random() * 1000000000)}`); + const tempName = path.join( + dir, + `tempFile-${Math.round(Math.random() * 1000000000)}`, + ); const finalName = path.join(dir, name); const rawStream = fs.createWriteStream(tempName); const closePromise = new Promise((resolve) => { diff --git a/packages/static-wado-util/tests/unit/asyncIterable.js b/packages/static-wado-util/tests/unit/asyncIterable.js index 1aa29587..8dcdb41f 100644 --- a/packages/static-wado-util/tests/unit/asyncIterable.js +++ b/packages/static-wado-util/tests/unit/asyncIterable.js @@ -5,7 +5,9 @@ describe("asyncIterableToBuffer", () => { let dicomp10stream; beforeEach(async () => { - dicomp10stream = fs.createReadStream(`${TEST_DATA_PATH}/dcm/Juno/1.3.6.1.4.1.25403.345050719074.3824.20170125113606.8`); + dicomp10stream = fs.createReadStream( + `${TEST_DATA_PATH}/dcm/Juno/1.3.6.1.4.1.25403.345050719074.3824.20170125113606.8`, + ); }); it("copies child elements correctly", async () => { @@ -29,7 +31,9 @@ describe("asyncIterableToBuffer", () => { const bufVal = buffer[i + start]; const subVal = subBuffer[i]; if (bufVal != subVal) { - console.log(`At position ${i} relative to ${start} buffer is ${bufVal} but subVal is ${subVal}`); + console.log( + `At position ${i} relative to ${start} buffer is ${bufVal} but subVal is ${subVal}`, + ); } buffer[i + start].must.be.eql(subBuffer[i]); } diff --git a/packages/static-wado-util/tests/unit/bilinear.test.js b/packages/static-wado-util/tests/unit/bilinear.test.js index fc638cfa..e3823307 100644 --- a/packages/static-wado-util/tests/unit/bilinear.test.js +++ b/packages/static-wado-util/tests/unit/bilinear.test.js @@ -1,4 +1,3 @@ -/* eslint-disable no-use-before-define */ const must = require("must"); const { bilinear } = require("../../lib/image/bilinear"); @@ -29,8 +28,17 @@ describe("bilinear tests", () => { bilinear(src, dest); must(dest.data[0]).equal(src.data[0]); must(dest.data[1]).equal(Math.floor(src.data[0] / 2 + src.data[1] / 2)); - must(dest.data[3]).equal(Math.floor(src.data[0] * 0.75 + src.data[2] * 0.25)); - must(dest.data[4]).equal(Math.floor((src.data[0] * 0.75) / 2 + (src.data[1] * 0.75) / 2 + (src.data[2] * 0.25) / 2 + (src.data[3] * 0.25) / 2)); + must(dest.data[3]).equal( + Math.floor(src.data[0] * 0.75 + src.data[2] * 0.25), + ); + must(dest.data[4]).equal( + Math.floor( + (src.data[0] * 0.75) / 2 + + (src.data[1] * 0.75) / 2 + + (src.data[2] * 0.25) / 2 + + (src.data[3] * 0.25) / 2, + ), + ); }); }); diff --git a/yarn.lock b/yarn.lock index eb2db147..a83a4592 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9038,10 +9038,10 @@ uuid@^10.0.0: resolved "https://registry.yarnpkg.com/uuid/-/uuid-10.0.0.tgz#5a95aa454e6e002725c79055fd42aaba30ca6294" integrity sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ== -uuid@^8.3.2: - version "8.3.2" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" - integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== +uuid@^11.0.2: + version "11.0.5" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-11.0.5.tgz#07b46bdfa6310c92c3fb3953a8720f170427fc62" + integrity sha512-508e6IcKLrhxKdBbcA2b4KQZlLVp2+J5UwQ6F7Drckkc5N9ZJwFa4TgWtsww9UG8fGHbm6gbV19TdM5pQ4GaIA== uuid@^9.0.1: version "9.0.1"