diff --git a/src/CharRNN/index.js b/src/CharRNN/index.js index d4b26a913..af65e235d 100644 --- a/src/CharRNN/index.js +++ b/src/CharRNN/index.js @@ -11,7 +11,7 @@ A LSTM Generator: Run inference mode for a pre-trained LSTM. import * as tf from "@tensorflow/tfjs"; import axios from "axios"; -import sampleFromDistribution from "./../utils/sample"; +import sampleFromDistribution from "../utils/sample"; import CheckpointLoader from "../utils/checkpointLoader"; import callCallback from "../utils/callcallback"; @@ -309,6 +309,6 @@ class CharRNN { } } -const charRNN = (modelPath = "./", callback) => new CharRNN(modelPath, callback); +const charRNN = (modelPath, callback) => new CharRNN(modelPath, callback); export default charRNN; diff --git a/src/DBSCAN/index.js b/src/DBSCAN/index.js index af99840cc..d24d73ead 100644 --- a/src/DBSCAN/index.js +++ b/src/DBSCAN/index.js @@ -33,9 +33,7 @@ async function loadDataset(inputData) { } else { data = inputData; } - const dataFlat = data.map(d => { - return Object.values(d); - }); + const dataFlat = data.map(d => Object.values(d)); return dataFlat; } @@ -138,6 +136,7 @@ class DBSCAN { getClusterId() { return this.lastClusterId; } + /** * increment cluster id */ @@ -159,9 +158,7 @@ class DBSCAN { return tf .stack([values.asType("float32"), indices.asType("float32")], 1) .arraySync() - .filter(v => { - return v[0] <= this.config.eps; - }) + .filter(v => v[0] <= this.config.eps) .reduce((prev, cur) => { prev.push(cur[1]); return prev; diff --git a/src/FaceApi/index.js b/src/FaceApi/index.js index f6a69e373..e1fdc0907 100644 --- a/src/FaceApi/index.js +++ b/src/FaceApi/index.js @@ -268,7 +268,7 @@ class FaceApiBase { * @param {Object} faceApiOptions */ setReturnOptions(faceApiOptions) { - const output = Object.assign({}, this.config); + const output = { ...this.config}; const options = ["withLandmarks", "withDescriptors"]; options.forEach(prop => { @@ -306,7 +306,7 @@ class FaceApiBase { if (Array.isArray(result) === true) { output = result.map(item => { // if landmarks exist return parts - const newItem = Object.assign({}, item); + const newItem = { ...item}; if (newItem.landmarks) { const { landmarks } = newItem; newItem.parts = { @@ -333,7 +333,7 @@ class FaceApiBase { }); // single detection is an object } else { - output = Object.assign({}, result); + output = { ...result}; if (output.landmarks) { const { landmarks } = result; output.parts = { diff --git a/src/FeatureExtractor/Mobilenet.js b/src/FeatureExtractor/Mobilenet.js index f4ccabdd5..a1907201a 100644 --- a/src/FeatureExtractor/Mobilenet.js +++ b/src/FeatureExtractor/Mobilenet.js @@ -10,7 +10,7 @@ A class that extract features from Mobilenet import * as tf from "@tensorflow/tfjs"; import axios from "axios"; import handleArguments from "../utils/handleArguments"; -import Video from "./../utils/Video"; +import Video from "../utils/Video"; import { imgToTensor } from "../utils/imageUtilities"; import { saveBlob } from "../utils/io"; import callCallback from "../utils/callcallback"; @@ -127,7 +127,7 @@ class Mobilenet { * the video is ready. If no callback is provided, it will return a * promise that will be resolved once the video element has loaded. */ - classification(video, objOrCallback = null, callback) { + classification(video, objOrCallback, callback) { const { options, callback: cb } = handleArguments(objOrCallback, callback); this.usageType = "classifier"; @@ -367,7 +367,7 @@ class Mobilenet { return { value: prediction[0] }; } - async load(filesOrPath = null, callback) { + async load(filesOrPath, callback) { if (typeof filesOrPath !== "string") { let model = null; let weights = null; diff --git a/src/KMeans/index.js b/src/KMeans/index.js index 959f0d736..bdc0e040a 100644 --- a/src/KMeans/index.js +++ b/src/KMeans/index.js @@ -42,9 +42,7 @@ async function loadDataset(inputData) { } else { data = inputData; } - const dataFlat = data.map((d) => { - return Object.values(d) - }); + const dataFlat = data.map(d => Object.values(d)); return dataFlat; } @@ -158,7 +156,7 @@ class KMeans { const centroidKTensor = centroidK.map(d => d.tensor); if (centroidKTensor.length === 0) { return centroid; - } else if (centroidKTensor.length === 1) { + } if (centroidKTensor.length === 1) { return centroidKTensor[0]; } // grab mean for for cluster @@ -186,4 +184,4 @@ class KMeans { const kmeans = (dataset, options, callback) => new KMeans(dataset, options, callback); -export default kmeans; \ No newline at end of file +export default kmeans; diff --git a/src/NeuralNetwork/NeuralNetwork.js b/src/NeuralNetwork/NeuralNetwork.js index 078d9b965..c23eb6b79 100644 --- a/src/NeuralNetwork/NeuralNetwork.js +++ b/src/NeuralNetwork/NeuralNetwork.js @@ -129,9 +129,7 @@ class NeuralNetwork { * @param {*} _inputs */ predictSync(_inputs) { - const output = tf.tidy(() => { - return this.model.predict(_inputs); - }); + const output = tf.tidy(() => this.model.predict(_inputs)); const result = output.arraySync(); output.dispose(); @@ -145,9 +143,7 @@ class NeuralNetwork { * @param {*} _inputs */ async predict(_inputs) { - const output = tf.tidy(() => { - return this.model.predict(_inputs); - }); + const output = tf.tidy(() => this.model.predict(_inputs)); const result = await output.array(); output.dispose(); @@ -211,16 +207,16 @@ class NeuralNetwork { * @param {*} filesOrPath * @param {*} callback */ - async load(filesOrPath = null, callback) { + async load(filesOrPath, callback) { if (filesOrPath instanceof FileList) { const files = await Promise.all( Array.from(filesOrPath).map(async file => { if (file.name.includes('.json') && !file.name.includes('_meta')) { return { name: 'model', file }; - } else if (file.name.includes('.json') && file.name.includes('_meta.json')) { + } if (file.name.includes('.json') && file.name.includes('_meta.json')) { const modelMetadata = await file.text(); return { name: 'metadata', file: modelMetadata }; - } else if (file.name.includes('.bin')) { + } if (file.name.includes('.bin')) { return { name: 'weights', file }; } return { name: null, file: null }; diff --git a/src/NeuralNetwork/NeuralNetworkData.js b/src/NeuralNetwork/NeuralNetworkData.js index 84aaf279b..acabb54d4 100644 --- a/src/NeuralNetwork/NeuralNetworkData.js +++ b/src/NeuralNetwork/NeuralNetworkData.js @@ -100,7 +100,7 @@ class NeuralNetworkData { * @param {*} dataRaw */ getDataStats(dataRaw) { - const meta = Object.assign({}, this.meta); + const meta = { ...this.meta}; const inputMeta = this.getInputMetaStats(dataRaw, meta.inputs, 'xs'); const outputMeta = this.getInputMetaStats(dataRaw, meta.outputs, 'ys'); @@ -125,7 +125,7 @@ class NeuralNetworkData { */ // eslint-disable-next-line no-unused-vars, class-methods-use-this getInputMetaStats(dataRaw, inputOrOutputMeta, xsOrYs) { - const inputMeta = Object.assign({}, inputOrOutputMeta); + const inputMeta = { ...inputOrOutputMeta}; Object.keys(inputMeta).forEach(k => { if (inputMeta[k].dtype === 'string') { @@ -151,7 +151,7 @@ class NeuralNetworkData { */ getDataUnits(dataRaw, _arrayShape = null) { const arrayShape = _arrayShape !== null ? _arrayShape : undefined; - const meta = Object.assign({}, this.meta); + const meta = { ...this.meta}; // if the data has a shape pass it in let inputShape; @@ -182,7 +182,7 @@ class NeuralNetworkData { // eslint-disable-next-line class-methods-use-this, no-unused-vars getInputMetaUnits(_dataRaw, _inputsMeta) { let units = 0; - const inputsMeta = Object.assign({}, _inputsMeta); + const inputsMeta = { ..._inputsMeta}; Object.entries(inputsMeta).forEach(arr => { const { dtype } = arr[1]; @@ -271,7 +271,7 @@ class NeuralNetworkData { */ // eslint-disable-next-line class-methods-use-this, no-unused-vars convertRawToTensors(dataRaw) { - const meta = Object.assign({}, this.meta); + const meta = { ...this.meta}; const dataLength = dataRaw.length; return tf.tidy(() => { @@ -281,18 +281,14 @@ class NeuralNetworkData { dataRaw.forEach(row => { // get xs const xs = Object.keys(meta.inputs) - .map(k => { - return row.xs[k]; - }) + .map(k => row.xs[k]) .flat(); inputArr.push(xs); // get ys const ys = Object.keys(meta.outputs) - .map(k => { - return row.ys[k]; - }) + .map(k => row.ys[k]) .flat(); outputArr.push(ys); @@ -319,7 +315,7 @@ class NeuralNetworkData { * @param {*} dataRaw */ normalizeDataRaw(dataRaw) { - const meta = Object.assign({}, this.meta); + const meta = { ...this.meta}; const normXs = this.normalizeInputData(dataRaw, meta.inputs, 'xs'); const normYs = this.normalizeInputData(dataRaw, meta.outputs, 'ys'); @@ -340,7 +336,7 @@ class NeuralNetworkData { // the data length const dataLength = dataRaw.length; // the copy of the inputs.meta[inputOrOutput] - const inputMeta = Object.assign({}, inputOrOutputMeta); + const inputMeta = { ...inputOrOutputMeta}; // normalized output object const normalized = {}; @@ -392,9 +388,7 @@ class NeuralNetworkData { // value with the onehot array // if none exists, return the given value if (options.legend) { - const normalized = inputArray.map(v => { - return options.legend[v] ? options.legend[v] : v; - }); + const normalized = inputArray.map(v => options.legend[v] ? options.legend[v] : v); return normalized; } @@ -460,7 +454,7 @@ class NeuralNetworkData { * @param {*} _meta */ applyOneHotEncodingsToDataRaw(dataRaw) { - const meta = Object.assign({}, this.meta); + const meta = { ...this.meta}; const output = dataRaw.map(row => { const xs = { @@ -498,7 +492,7 @@ class NeuralNetworkData { * @param {*} dataRaw */ getDataOneHot(dataRaw) { - const meta = Object.assign({}, this.meta); + const meta = { ...this.meta}; const inputMeta = this.getInputMetaOneHot(dataRaw, meta.inputs, 'xs'); const outputMeta = this.getInputMetaOneHot(dataRaw, meta.outputs, 'ys'); @@ -521,7 +515,7 @@ class NeuralNetworkData { * @param {*} xsOrYs */ getInputMetaOneHot(_dataRaw, _inputsMeta, xsOrYs) { - const inputsMeta = Object.assign({}, _inputsMeta); + const inputsMeta = { ..._inputsMeta}; Object.entries(inputsMeta).forEach(arr => { // the key @@ -615,7 +609,7 @@ class NeuralNetworkData { let json; // handle loading parsedJson if (dataUrlOrJson instanceof Object) { - json = Object.assign({}, dataUrlOrJson); + json = { ...dataUrlOrJson}; } else { const {data} = await axios.get(dataUrlOrJson); json = data; @@ -684,7 +678,7 @@ class NeuralNetworkData { * @param {*} filesOrPath * @param {*} callback */ - async loadData(filesOrPath = null, callback) { + async loadData(filesOrPath, callback) { try { let loadedData; @@ -769,7 +763,7 @@ class NeuralNetworkData { * @param {*} filesOrPath * @param {*} callback */ - async loadMeta(filesOrPath = null, callback) { + async loadMeta(filesOrPath, callback) { if (filesOrPath instanceof FileList) { const files = await Promise.all( Array.from(filesOrPath).map(async file => { @@ -778,13 +772,13 @@ class NeuralNetworkData { name: 'model', file, }; - } else if (file.name.includes('.json') && file.name.includes('_meta.json')) { + } if (file.name.includes('.json') && file.name.includes('_meta.json')) { const modelMetadata = await file.text(); return { name: 'metadata', file: modelMetadata, }; - } else if (file.name.includes('.bin')) { + } if (file.name.includes('.bin')) { return { name: 'weights', file, @@ -925,11 +919,11 @@ class NeuralNetworkData { * @param {*} _data */ findEntries(_data) { - const parentCopy = Object.assign({}, _data); + const parentCopy = { ..._data}; if (parentCopy.entries && parentCopy.entries instanceof Array) { return parentCopy.entries; - } else if (parentCopy.data && parentCopy.data instanceof Array) { + } if (parentCopy.data && parentCopy.data instanceof Array) { return parentCopy.data; } diff --git a/src/NeuralNetwork/NeuralNetworkUtils.js b/src/NeuralNetwork/NeuralNetworkUtils.js index 870a4a12e..1c9a9b6ab 100644 --- a/src/NeuralNetwork/NeuralNetworkUtils.js +++ b/src/NeuralNetwork/NeuralNetworkUtils.js @@ -32,9 +32,7 @@ class NeuralNetworkUtils { // eslint-disable-next-line no-unused-vars, class-methods-use-this getMin(_array) { // return Math.min(..._array) - return _array.reduce((a, b) => { - return Math.min(a, b); - }); + return _array.reduce((a, b) => Math.min(a, b)); } /** @@ -43,9 +41,7 @@ class NeuralNetworkUtils { */ // eslint-disable-next-line no-unused-vars, class-methods-use-this getMax(_array) { - return _array.reduce((a, b) => { - return Math.max(a, b); - }); + return _array.reduce((a, b) => Math.max(a, b)); // return Math.max(..._array) } @@ -75,12 +71,10 @@ class NeuralNetworkUtils { return []; } - const output = [...new Array(arr1.length).fill(null)].map((item, idx) => { - return { + const output = [...new Array(arr1.length).fill(null)].map((item, idx) => ({ ...arr1[idx], ...arr2[idx] - } - }) + })) return output; } @@ -116,7 +110,7 @@ class NeuralNetworkUtils { result[label] = item; }); return result; - } else if (typeof incoming === 'object') { + } if (typeof incoming === 'object') { result = incoming; return result; } @@ -148,4 +142,4 @@ const neuralNetworkUtils = () => { return instance; } -export default neuralNetworkUtils(); \ No newline at end of file +export default neuralNetworkUtils(); diff --git a/src/NeuralNetwork/NeuralNetworkVis.js b/src/NeuralNetwork/NeuralNetworkVis.js index 1d15c55c4..41bd00bbd 100644 --- a/src/NeuralNetwork/NeuralNetworkVis.js +++ b/src/NeuralNetwork/NeuralNetworkVis.js @@ -23,12 +23,10 @@ class NeuralNetworkVis { * @param {*} data */ scatterplot(inputLabel, outputLabel, data) { - const values = data.map(item => { - return { + const values = data.map(item => ({ x: item.xs[inputLabel], y: item.ys[outputLabel], - }; - }); + })); const visOptions = { name: "debug mode", @@ -53,12 +51,10 @@ class NeuralNetworkVis { inputLabels.forEach(inputLabel => { outputLabels.forEach(outputLabel => { - const val = data.map(item => { - return { + const val = data.map(item => ({ x: item.xs[inputLabel], y: item.ys[outputLabel], - }; - }); + })); values = [...values, ...val]; }); @@ -84,12 +80,10 @@ class NeuralNetworkVis { * @param {*} data */ barchart(inputLabel, outputLabel, data) { - const values = data.map(item => { - return { + const values = data.map(item => ({ value: item.xs[inputLabel], index: item.ys[outputLabel], - }; - }); + })); const chartOptions = { xLabel: "label", diff --git a/src/NeuralNetwork/index.js b/src/NeuralNetwork/index.js index 1a5fcf003..5991a01d9 100644 --- a/src/NeuralNetwork/index.js +++ b/src/NeuralNetwork/index.js @@ -330,7 +330,7 @@ class DiyNeuralNetwork { if (Array.isArray(input)) { formattedInputs = input.map(item => this.formatInputItem(item)); } else if (typeof input === 'object') { - const newXInputs = Object.assign({}, input); + const newXInputs = { ...input}; Object.keys(input).forEach(k => { const val = input[k]; newXInputs[k] = this.formatInputItem(val); @@ -393,14 +393,10 @@ class DiyNeuralNetwork { // to run predict or classify on a batch of data if (_input instanceof Array) { - inputData = inputHeaders.map((prop, idx) => { - return this.isOneHotEncodedOrNormalized(_input[idx], prop, meta.inputs); - }); + inputData = inputHeaders.map((prop, idx) => this.isOneHotEncodedOrNormalized(_input[idx], prop, meta.inputs)); } else if (_input instanceof Object) { // TODO: make sure that the input order is preserved! - inputData = inputHeaders.map(prop => { - return this.isOneHotEncodedOrNormalized(_input[prop], prop, meta.inputs); - }); + inputData = inputHeaders.map(prop => this.isOneHotEncodedOrNormalized(_input[prop], prop, meta.inputs)); } // inputData = tf.tensor([inputData.flat()]) @@ -420,9 +416,7 @@ class DiyNeuralNetwork { if (_input instanceof Array) { if (_input.every(item => Array.isArray(item))) { - output = _input.map(item => { - return this.formatInputsForPrediction(item, meta, inputHeaders); - }); + output = _input.map(item => this.formatInputsForPrediction(item, meta, inputHeaders)); return tf.tensor(output, [_input.length, inputHeaders.length]); } @@ -601,7 +595,7 @@ class DiyNeuralNetwork { createNetworkLayers(layerJsonArray, meta) { const layers = [...layerJsonArray]; - const { inputUnits, outputUnits } = Object.assign({}, meta); + const { inputUnits, outputUnits } = { ...meta}; const layersLength = layers.length; if (!(layers.length >= 2)) { @@ -869,8 +863,7 @@ class DiyNeuralNetwork { if (meta !== null) { const labels = Object.keys(meta.outputs); - const formattedResults = unformattedResults.map(unformattedResult => { - return labels.map((item, idx) => { + const formattedResults = unformattedResults.map(unformattedResult => labels.map((item, idx) => { // check to see if the data were normalized // if not, then send back the values, otherwise // unnormalize then return @@ -897,8 +890,7 @@ class DiyNeuralNetwork { } return d; - }); - }); + })); // return single array if the length is less than 2, // otherwise return array of arrays @@ -929,8 +921,7 @@ class DiyNeuralNetwork { if (meta !== null) { const labels = Object.keys(meta.outputs); - const formattedResults = unformattedResults.map(unformattedResult => { - return labels.map((item, idx) => { + const formattedResults = unformattedResults.map(unformattedResult => labels.map((item, idx) => { // check to see if the data were normalized // if not, then send back the values, otherwise // unnormalize then return @@ -957,8 +948,7 @@ class DiyNeuralNetwork { } return d; - }); - }); + })); // return single array if the length is less than 2, // otherwise return array of arrays @@ -1014,17 +1004,13 @@ class DiyNeuralNetwork { const label = Object.keys(meta.outputs)[0]; const vals = Object.entries(meta.outputs[label].legend); - const formattedResults = unformattedResults.map(unformattedResult => { - return vals - .map((item, idx) => { - return { + const formattedResults = unformattedResults.map(unformattedResult => vals + .map((item, idx) => ({ [item[0]]: unformattedResult[idx], label: item[0], confidence: unformattedResult[idx], - }; - }) - .sort((a, b) => b.confidence - a.confidence); - }); + })) + .sort((a, b) => b.confidence - a.confidence)); // return single array if the length is less than 2, // otherwise return array of arrays @@ -1079,17 +1065,13 @@ class DiyNeuralNetwork { const label = Object.keys(meta.outputs)[0]; const vals = Object.entries(meta.outputs[label].legend); - const formattedResults = unformattedResults.map(unformattedResult => { - return vals - .map((item, idx) => { - return { + const formattedResults = unformattedResults.map(unformattedResult => vals + .map((item, idx) => ({ [item[0]]: unformattedResult[idx], label: item[0], confidence: unformattedResult[idx], - }; - }) - .sort((a, b) => b.confidence - a.confidence); - }); + })) + .sort((a, b) => b.confidence - a.confidence)); // return single array if the length is less than 2, // otherwise return array of arrays @@ -1121,7 +1103,7 @@ class DiyNeuralNetwork { * @param {*} filesOrPath * @param {*} callback */ - async loadData(filesOrPath = null, callback) { + async loadData(filesOrPath, callback) { this.neuralNetworkData.loadData(filesOrPath, callback); } @@ -1151,7 +1133,7 @@ class DiyNeuralNetwork { * @param {*} filesOrPath * @param {*} callback */ - async load(filesOrPath = null, cb) { + async load(filesOrPath, cb) { let callback; if (cb) { callback = cb; diff --git a/src/NeuralNetwork/index.test.js b/src/NeuralNetwork/index.test.js index 18b42de73..325c56bdd 100644 --- a/src/NeuralNetwork/index.test.js +++ b/src/NeuralNetwork/index.test.js @@ -335,9 +335,7 @@ describe('NeuralNetwork', () => { epochs: 2, shuffle: true, validationSplit: 0.2, - whileTraining: () => { - return null; - }, + whileTraining: () => null, }; await brain.trainInternal(trainingOptions); diff --git a/src/ObjectDetector/CocoSsd/index.js b/src/ObjectDetector/CocoSsd/index.js index 236a65295..7652e3a9d 100644 --- a/src/ObjectDetector/CocoSsd/index.js +++ b/src/ObjectDetector/CocoSsd/index.js @@ -72,8 +72,7 @@ export class CocoSsdBase { async detectInternal(imgToPredict) { this.isPredicting = true; const predictions = await this.model.detect(imgToPredict); - const formattedPredictions = predictions.map(prediction => { - return { + const formattedPredictions = predictions.map(prediction => ({ label: prediction.class, confidence: prediction.score, x: prediction.bbox[0], @@ -86,8 +85,7 @@ export class CocoSsdBase { width: prediction.bbox[2] / imgToPredict.width, height: prediction.bbox[3] / imgToPredict.height, }, - }; - }); + })); this.isPredicting = false; return formattedPredictions; } diff --git a/src/ObjectDetector/YOLO/index.js b/src/ObjectDetector/YOLO/index.js index cf15537cd..624ed6d1a 100644 --- a/src/ObjectDetector/YOLO/index.js +++ b/src/ObjectDetector/YOLO/index.js @@ -11,12 +11,11 @@ Heavily derived from https://github.com/ModelDepot/tfjs-yolo-tiny (ModelDepot: m import * as tf from '@tensorflow/tfjs'; import handleArguments from "../../utils/handleArguments"; -import Video from './../../utils/Video'; -import { imgToTensor } from "./../../utils/imageUtilities"; -import callCallback from './../../utils/callcallback'; -import CLASS_NAMES from './../../utils/COCO_CLASSES'; -import modelLoader from './../../utils/modelLoader'; - +import Video from '../../utils/Video'; +import { imgToTensor } from "../../utils/imageUtilities"; +import callCallback from "../../utils/callcallback"; +import CLASS_NAMES from "../../utils/COCO_CLASSES"; +import modelLoader from "../../utils/modelLoader"; import { nonMaxSuppression, boxesToCorners, diff --git a/src/ObjectDetector/YOLO/postprocess.js b/src/ObjectDetector/YOLO/postprocess.js index 4b05c4e99..a55279592 100644 --- a/src/ObjectDetector/YOLO/postprocess.js +++ b/src/ObjectDetector/YOLO/postprocess.js @@ -77,8 +77,7 @@ export async function filterBoxes( }) } -export const boxesToCorners = (boxXY, boxWH) => { - return tf.tidy(() => { +export const boxesToCorners = (boxXY, boxWH) => tf.tidy(() => { const two = tf.tensor1d([2.0]); const boxMins = tf.sub(boxXY, tf.div(boxWH, two)); const boxMaxes = tf.add(boxXY, tf.div(boxWH, two)); @@ -94,11 +93,9 @@ export const boxesToCorners = (boxXY, boxWH) => { boxMaxes.slice([0, 0, 0, 1], size), boxMaxes.slice([0, 0, 0, 0], size), ], 3); - }) -}; + }); -export const nonMaxSuppression = (boxes, scores, iouThreshold) => { - return tf.tidy(() => { +export const nonMaxSuppression = (boxes, scores, iouThreshold) => tf.tidy(() => { // Zip together scores, box corners, and index const zipped = []; for (let i = 0; i < scores.length; i += 1) { @@ -129,8 +126,7 @@ export const nonMaxSuppression = (boxes, scores, iouThreshold) => { selectedBoxes.map(e => e[1]), selectedBoxes.map(e => e[0]), ]; - }) -}; + }); // Convert yolo output to bounding box + prob tensors /* eslint no-param-reassign: 0 */ @@ -170,4 +166,4 @@ export function head(feats, anchors, numClasses) { return [boxXY, boxWH, boxConfidence, boxClassProbs]; }) -} \ No newline at end of file +} diff --git a/src/ObjectDetector/index.test.js b/src/ObjectDetector/index.test.js index e9549585d..f69f0a768 100644 --- a/src/ObjectDetector/index.test.js +++ b/src/ObjectDetector/index.test.js @@ -25,15 +25,11 @@ const mockYoloObject = { classProbThreshold: YOLO_DEFAULTS.classProbThreshold, filterBoxesThreshold: YOLO_DEFAULTS.filterBoxesThreshold, size: YOLO_DEFAULTS.size, - detect: () => { - return [{ label: "bird", confidence: 0.9 }]; - }, + detect: () => [{ label: "bird", confidence: 0.9 }], }; const mockCocoObject = { config: { ...COCOSSD_DEFAULTS }, - detect: () => { - return [{ label: "bird", confidence: 0.9 }]; - }, + detect: () => [{ label: "bird", confidence: 0.9 }], }; function mockObjectDetector(modelName) { diff --git a/src/Pix2pix/index.js b/src/Pix2pix/index.js index b1c32faf0..b6f68e7f3 100644 --- a/src/Pix2pix/index.js +++ b/src/Pix2pix/index.js @@ -113,17 +113,13 @@ class Pix2pix { } static preprocess(inputPreproc) { - const result = tf.tidy(() => { - return tf.sub(tf.mul(inputPreproc, tf.scalar(2)), tf.scalar(1)); - }); + const result = tf.tidy(() => tf.sub(tf.mul(inputPreproc, tf.scalar(2)), tf.scalar(1))); inputPreproc.dispose(); return result; } static deprocess(inputDeproc) { - const result = tf.tidy(() => { - return tf.div(tf.add(inputDeproc, tf.scalar(1)), tf.scalar(2)); - }); + const result = tf.tidy(() => tf.div(tf.add(inputDeproc, tf.scalar(1)), tf.scalar(2))); inputDeproc.dispose(); return result; } @@ -140,9 +136,7 @@ class Pix2pix { static conv2d(inputCon, filterCon) { const tempFilter = filterCon.clone() - const result = tf.tidy(() => { - return tf.conv2d(inputCon, tempFilter, [2, 2], 'same'); - }); + const result = tf.tidy(() => tf.conv2d(inputCon, tempFilter, [2, 2], 'same')); inputCon.dispose(); tempFilter.dispose(); return result; @@ -166,4 +160,4 @@ const pix2pix = (model, callback) => { return callback ? instance : instance.ready; }; -export default pix2pix; \ No newline at end of file +export default pix2pix; diff --git a/src/SoundClassifier/index.js b/src/SoundClassifier/index.js index 746379e7e..31d7d1668 100644 --- a/src/SoundClassifier/index.js +++ b/src/SoundClassifier/index.js @@ -65,7 +65,7 @@ class SoundClassifier { * @param {function} cb - a callback function that handles the results of the function. * @return {function} a promise or the results of a given callback, cb. */ - async classify(numOrCallback = null, cb) { + async classify(numOrCallback, cb) { const args = handleArguments(numOrCallback, cb); const numberOfClasses = args.number || this.topk; diff --git a/src/SoundClassifier/speechcommands.js b/src/SoundClassifier/speechcommands.js index 5b4ed327a..8d8cabb5e 100644 --- a/src/SoundClassifier/speechcommands.js +++ b/src/SoundClassifier/speechcommands.js @@ -21,6 +21,7 @@ export class SpeechCommands { this.allLabels = this.model.wordLabels(); } + // eslint-disable-next-line default-param-last classify(topk = this.allLabels.length, cb) { return this.model.listen(result => { if (result.scores) { @@ -30,9 +31,7 @@ export class SpeechCommands { } return cb(`ERROR: Cannot find scores in result: ${result}`); }, this.options) - .catch(err => { - return cb(`ERROR: ${err.message}`); - }); + .catch(err => cb(`ERROR: ${err.message}`)); } } diff --git a/src/StyleTransfer/index.js b/src/StyleTransfer/index.js index a38b9bd4a..f6ac4f950 100644 --- a/src/StyleTransfer/index.js +++ b/src/StyleTransfer/index.js @@ -13,7 +13,7 @@ The original TensorFlow implementation was developed by Logan Engstrom: github.c import * as tf from '@tensorflow/tfjs'; import handleArguments from "../utils/handleArguments"; -import Video from './../utils/Video'; +import Video from '../utils/Video'; import CheckpointLoader from '../utils/checkpointLoader'; import { array3DToImage } from '../utils/imageUtilities'; import callCallback from '../utils/callcallback'; diff --git a/src/index.js b/src/index.js index 2735a9c81..acfdb4b5c 100644 --- a/src/index.js +++ b/src/index.js @@ -5,20 +5,20 @@ import * as tf from "@tensorflow/tfjs"; import * as tfvis from "@tensorflow/tfjs-vis"; -import pitchDetection from "./PitchDetection/"; -import imageClassifier from "./ImageClassifier/"; -import soundClassifier from "./SoundClassifier/"; -import KNNClassifier from "./KNNClassifier/"; -import featureExtractor from "./FeatureExtractor/"; -import word2vec from "./Word2vec/"; +import pitchDetection from "./PitchDetection"; +import imageClassifier from "./ImageClassifier"; +import soundClassifier from "./SoundClassifier"; +import KNNClassifier from "./KNNClassifier"; +import featureExtractor from "./FeatureExtractor"; +import word2vec from "./Word2vec"; import {YOLO} from "./ObjectDetector/YOLO"; import {CocoSsd} from "./ObjectDetector/CocoSsd"; import objectDetector from "./ObjectDetector"; import poseNet from "./PoseNet"; import * as imageUtils from "./utils/imageUtilities"; -import styleTransfer from "./StyleTransfer/"; -import charRNN from "./CharRNN/"; -import pix2pix from "./Pix2pix/"; +import styleTransfer from "./StyleTransfer"; +import charRNN from "./CharRNN"; +import pix2pix from "./Pix2pix"; import sketchRNN from "./SketchRNN"; import uNet from "./UNET"; import CVAE from "./CVAE"; @@ -72,7 +72,9 @@ const withPreload = { communityStatement(); })(); -export default Object.assign({ p5Utils }, preloadRegister(withPreload), { +export default ({ + p5Utils, + ...preloadRegister(withPreload), KNNClassifier, communityStatement, ...imageUtils, diff --git a/src/utils/callcallback_test.js b/src/utils/callcallback_test.js index df60a2b23..e8b63502f 100644 --- a/src/utils/callcallback_test.js +++ b/src/utils/callcallback_test.js @@ -31,9 +31,7 @@ describe("callCallback", () => { resolve(greeting); }); - const mockCallback = (err, result) => { - return `hello ${result}`; - }; + const mockCallback = (err, result) => `hello ${result}`; const mockUtils = { mockCallback }; spyOn(mockUtils, "mockCallback").and.callThrough(); diff --git a/src/utils/modelLoader.js b/src/utils/modelLoader.js index 1fc039018..d2bfbdcd5 100644 --- a/src/utils/modelLoader.js +++ b/src/utils/modelLoader.js @@ -8,8 +8,7 @@ import axios from 'axios'; * @returns {boolean} */ export function isAbsoluteURL(str) { - const pattern = new RegExp('^(?:[a-z]+:)?//', 'i'); - return pattern.test(str); + return /^(?:[a-z]+:)?\/\//i.test(str); } /** diff --git a/src/utils/random.js b/src/utils/random.js index aca04c8d4..614524f34 100644 --- a/src/utils/random.js +++ b/src/utils/random.js @@ -46,9 +46,12 @@ const randomSample = (arr, k, withReplacement = false) => { if (k > arr.length) { throw new RangeError('Sample size must be less than or equal to array length when sampling without replacement.') } - sample = arr.map(a => [a, Math.random()]).sort((a, b) => { - return a[1] < b[1] ? -1 : 1;}).slice(0, k).map(a => a[0]); - }; + sample = arr + .map(a => [a, Math.random()]) + .sort((a, b) => a[1] < b[1] ? -1 : 1) + .slice(0, k) + .map(a => a[0]); + } return sample; }; diff --git a/src/utils/testingUtils/index.js b/src/utils/testingUtils/index.js index d0bdfc644..acf82c2bc 100644 --- a/src/utils/testingUtils/index.js +++ b/src/utils/testingUtils/index.js @@ -13,9 +13,7 @@ export const asyncLoadImage = async (src) => { return img; } -export const getRobin = async () => { - return asyncLoadImage("https://cdn.jsdelivr.net/gh/ml5js/ml5-library@main/assets/bird.jpg"); -} +export const getRobin = async () => asyncLoadImage("https://cdn.jsdelivr.net/gh/ml5js/ml5-library@main/assets/bird.jpg") export const randomImageData = (width = 200, height = 100) => { const length = width * height * 4; // 4 channels - RGBA