Skip to content

Commit fd21b99

Browse files
authored
[js/web] set noUnusedParameters to true and fix a few bugs (microsoft#18404)
### Description - set tsconfig "noUnusedParameters" to `true` and fix a few bugs discovered by typescript. how unused parameter is fixed: - for most code (webgl), add underscore as prefix, which is the standard ignore pattern for typescript check. - remove unused parameter from function and modify corresponding function calls (jsep) - fix a bug in ArgMinMax: this 2 operators do not have more than one input(s) so the `createArgMinMaxAttributesFromInputs()` is removed. - add proxy main.ts into typescript check and fix a bug in parameter passing - fixed `run()` function call and add typecheck fix (hack)
1 parent 486b378 commit fd21b99

35 files changed

+146
-114
lines changed

.eslintrc.js

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,9 +146,13 @@ module.exports = {
146146
}, {
147147
files: ['web/lib/**/*.ts'],
148148
excludedFiles: 'web/lib/wasm/proxy-worker/**/*',
149-
parserOptions: { 'project': 'web/tsconfig.json' },rules: {
149+
parserOptions: { 'project': 'web/tsconfig.json' },
150+
rules: {
150151
'no-underscore-dangle': 'off',
151152
}
153+
}, {
154+
files: ['web/lib/wasm/proxy-worker/**/*.ts'],
155+
parserOptions: { 'project': 'web/lib/wasm/proxy-worker/tsconfig.json' },
152156
}, {
153157
files: ['web/lib/onnxjs/**/*.ts'], rules: {
154158
// TODO: those rules are useful. should turn on them in future (webgl refactor)

web/lib/onnxjs/backends/webgl/glsl-coordinate-lib.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ export class CoordsGlslLib extends GlslLib {
186186
/**
187187
* 1D packed output coordinates.
188188
*/
189-
protected getOutputPacked1DCoords(shape: [number], texShape: [number, number]): GlslLibRoutine {
189+
protected getOutputPacked1DCoords(_shape: [number], texShape: [number, number]): GlslLibRoutine {
190190
const packedTexShape = texShape;
191191
let source = '';
192192
if (packedTexShape[0] === 1) {
@@ -331,7 +331,7 @@ export class CoordsGlslLib extends GlslLib {
331331
/**
332332
* Unpacked 1D output coordinates.
333333
*/
334-
protected getOutputUnpacked1DCoords(shape: [number], texShape: [number, number]): GlslLibRoutine {
334+
protected getOutputUnpacked1DCoords(_shape: [number], texShape: [number, number]): GlslLibRoutine {
335335
const source = `
336336
int getOutputCoords() {
337337
ivec2 resTexRC = ivec2(TexCoords.xy *
@@ -641,7 +641,7 @@ export class CoordsGlslLib extends GlslLib {
641641
if (outRank < 2 && inRank > 0) {
642642
unpackedCoordsSnippet = 'coords';
643643
} else {
644-
unpackedCoordsSnippet = inShape.map((s, i) => `coords.${fields[i + rankDiff]}`).join(', ');
644+
unpackedCoordsSnippet = inShape.map((_s, i) => `coords.${fields[i + rankDiff]}`).join(', ');
645645
}
646646

647647
let output = 'return outputValue;';
@@ -734,7 +734,7 @@ export class CoordsGlslLib extends GlslLib {
734734
if (outRank < 2 && inRank > 0) {
735735
unpackedCoordsSnippet = 'coords';
736736
} else {
737-
unpackedCoordsSnippet = inputLayout.unpackedShape.map((s, i) => `coords.${fields[i + rankDiff]}`).join(', ');
737+
unpackedCoordsSnippet = inputLayout.unpackedShape.map((_s, i) => `coords.${fields[i + rankDiff]}`).join(', ');
738738
}
739739
const source = `
740740
float ${funcName}() {

web/lib/onnxjs/backends/webgl/ops/concat-packed.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ import {getChannels, unpackFromChannel} from './packing-utils';
1212

1313
const createPackedConcatProgramMetadata = (inputCount: number, cacheHint: string) => ({
1414
name: 'Concat (packed)',
15-
inputNames: Array.from({length: inputCount}, (v, i) => `X${i}`),
15+
inputNames: Array.from({length: inputCount}, (_v, i) => `X${i}`),
1616
inputTypes: Array(inputCount).fill(TextureType.packed),
1717
cacheHint
1818
});

web/lib/onnxjs/backends/webgl/ops/concat.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,13 +30,13 @@ export const concat: OperatorImplementation<ConcatAttributes> =
3030

3131
const createUnpackedConcatProgramMetadata = (inputCount: number, cacheHint: string) => ({
3232
name: 'Concat',
33-
inputNames: Array.from({length: inputCount}, (v, i) => `X${i}`),
33+
inputNames: Array.from({length: inputCount}, (_v, i) => `X${i}`),
3434
inputTypes: Array(inputCount).fill(TextureType.unpacked),
3535
cacheHint
3636
});
3737

3838
const createUnpackedConcatProgramInfo =
39-
(handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], axis: number): ProgramInfo => {
39+
(_handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], axis: number): ProgramInfo => {
4040
const inputShape = inputs[0].dims.slice();
4141
if (axis >= inputShape.length || axis < (-1 * inputShape.length)) {
4242
throw new Error('axis specified for concat doesn\'t match input dimensionality');

web/lib/onnxjs/backends/webgl/ops/gather.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ const gatherProgramMetadata = {
3030
};
3131

3232
const createGatherProgramInfo =
33-
(handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], axis: number): ProgramInfo => {
33+
(_handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], axis: number): ProgramInfo => {
3434
const inputShape = inputs[0].dims.slice();
3535
const indexDataShape = inputs[1].dims.slice();
3636
const outputShape = new Array(inputShape.length + indexDataShape.length - 1);

web/lib/onnxjs/backends/webgl/ops/im2col.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ const createIm2ColProgramMetadata = (cacheHint: string) => ({
1515
});
1616

1717
const createIm2ColProgramInfo =
18-
(inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, x: Tensor, w: Tensor,
18+
(_inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, x: Tensor, w: Tensor,
1919
outputShape: readonly number[], attributes: ConvAttributes): ProgramInfo => {
2020
const xshape = x.dims;
2121
const wshape = w.dims;

web/lib/onnxjs/backends/webgl/ops/image-scaler.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ const imageScalerProgramMetadata = {
3535
};
3636

3737
const createImageScalerProgramInfo =
38-
(handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], attributes: ImageScalerAttributes):
38+
(_handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], attributes: ImageScalerAttributes):
3939
ProgramInfo => {
4040
const outputShape = inputs[0].dims.slice();
4141
const rank = outputShape.length;

web/lib/onnxjs/backends/webgl/ops/matmul-pack.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -107,10 +107,10 @@ function getBcastSamplerForMatmul(
107107
const rankADiff = outRank - inARank;
108108
const rankBDiff = outRank - inBRank;
109109

110-
unpackedACoordsSnippet = inAShape.map((s, i) => `coords.${allGlChannels[i + rankADiff]}`);
110+
unpackedACoordsSnippet = inAShape.map((_s, i) => `coords.${allGlChannels[i + rankADiff]}`);
111111
unpackedACoordsSnippet[inARank - 1] = 'i*2';
112112
unpackedACoordsSnippet.join(', ');
113-
unpackedBCoordsSnippet = inBShape.map((s, i) => `coords.${allGlChannels[i + rankBDiff]}`);
113+
unpackedBCoordsSnippet = inBShape.map((_s, i) => `coords.${allGlChannels[i + rankBDiff]}`);
114114
unpackedBCoordsSnippet[inBRank - 2] = 'i*2';
115115
unpackedBCoordsSnippet.join(', ');
116116

web/lib/onnxjs/backends/webgl/ops/matmul.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ export function getBiasForMatmul(
117117
if (outRank < 2 && inRank > 0) {
118118
unpackedCoordsSnippet = 'coords';
119119
} else {
120-
unpackedCoordsSnippet = inShape.map((s, i) => `coords.${allGlChannels[i + rankDiff]}`).join(', ');
120+
unpackedCoordsSnippet = inShape.map((_s, i) => `coords.${allGlChannels[i + rankDiff]}`).join(', ');
121121
}
122122
const broadcastDims = BroadcastUtil.getBroadcastDims(inShape, outShape);
123123
const coordsSnippet = broadcastDims.map(d => `coords.${allGlChannels[d + rankDiff]} = 0;`).join('\n');

web/lib/onnxjs/backends/webgl/ops/reduce.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ export const parseReduceAttributes: OperatorInitialization<ReduceAttributes> = (
4646
};
4747

4848
const createReduceProgramInfo =
49-
(handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes, name: string, reduceOp: ReduceOp,
49+
(_handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes, _name: string, reduceOp: ReduceOp,
5050
reduceProgramMetadata: ProgramMetadata): ProgramInfo => {
5151
const outputShape: number[] = [];
5252
const iRank = inputs[0].dims.length || 1;

web/lib/onnxjs/backends/webgl/ops/shape.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import {Tensor} from '../../../tensor';
55
import {WebGLInferenceHandler} from '../inference-handler';
66

7-
export const shape = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {
7+
export const shape = (_inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {
88
validateInputs(inputs);
99
return [new Tensor([inputs[0].dims.length], 'int32', undefined, undefined, new Int32Array(inputs[0].dims))];
1010
};
@@ -13,4 +13,4 @@ const validateInputs = (inputs: Tensor[]): void => {
1313
if (!inputs || inputs.length !== 1) {
1414
throw new Error('Shape requires 1 input.');
1515
}
16-
};
16+
};

web/lib/onnxjs/backends/webgl/ops/slice.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,8 @@ export const parseSliceAttributes: OperatorInitialization<SliceAttributes> = (no
4242
};
4343

4444
const createSliceProgramInfo =
45-
(inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: SliceAttributes): ProgramInfo => {
46-
const axes = (attributes.axes.length === 0) ? input.dims.slice(0).map((val, i) => i) : attributes.axes;
45+
(_inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: SliceAttributes): ProgramInfo => {
46+
const axes = (attributes.axes.length === 0) ? input.dims.slice(0).map((_val, i) => i) : attributes.axes;
4747
const normalizedAxes = ShapeUtil.normalizeAxes(axes, input.dims.length);
4848
const starts = attributes.starts.map((start, i) => {
4949
if (start > input.dims[normalizedAxes[i]] - 1) {

web/lib/onnxjs/backends/webgl/ops/split.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,13 +49,13 @@ export const parseSplitAttributes: OperatorInitialization<SplitAttributes> = (no
4949
};
5050

5151
const getProgramCount =
52-
(inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], axis: number, attributes: SplitAttributes): number => {
52+
(_inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], axis: number, attributes: SplitAttributes): number => {
5353
const [, offsets] = SplitUtil.splitShape(inputs[0].dims, axis, attributes.split, attributes.numOutputs);
5454
return offsets.length;
5555
};
5656

5757
const createSplitProgramInfo =
58-
(inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: SplitAttributes, axis: number, index: number):
58+
(_inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: SplitAttributes, axis: number, index: number):
5959
ProgramInfo => {
6060
const [shapes, offsets] = SplitUtil.splitShape(input.dims, axis, attributes.split, attributes.numOutputs);
6161
const offset = offsets[index];

web/lib/onnxjs/backends/webgl/ops/sum.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ export const sum = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]):
1111

1212
const sumProgramMetadata = {
1313
name: 'Sum',
14-
inputNames: inputs.map((v, i) => `X${i}`),
14+
inputNames: inputs.map((_v, i) => `X${i}`),
1515
inputTypes: new Array(inputs.length).fill(TextureType.unpacked)
1616
};
1717

@@ -24,7 +24,7 @@ const createSumProgramInfo =
2424
(inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], sumProgramMetadata: ProgramMetadata): ProgramInfo => {
2525
const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);
2626
const outputShape = inputs[0].dims.slice();
27-
const sumLine = inputs.map((v, i) => `${glsl.texture2D}(X${i},TexCoords)`).join(' + ');
27+
const sumLine = inputs.map((_v, i) => `${glsl.texture2D}(X${i},TexCoords)`).join(' + ');
2828
const shaderSource = `
2929
void main() {
3030
vec4 result = ${sumLine};
@@ -65,4 +65,4 @@ const validateInputs = (inputs: Tensor[]): void => {
6565
throw new Error('Input types are not matched.');
6666
}
6767
}
68-
};
68+
};

web/lib/onnxjs/backends/webgl/ops/tile.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ export const tile = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]):
2222
};
2323

2424
const createTileProgramInfo =
25-
(handler: WebGLInferenceHandler, inputs: Tensor[], tileProgramMetadata: ProgramMetadata): ProgramInfo => {
25+
(_handler: WebGLInferenceHandler, inputs: Tensor[], tileProgramMetadata: ProgramMetadata): ProgramInfo => {
2626
const inputShape = inputs[0].dims.slice();
2727
const outputShape = new Array(inputShape.length);
2828

@@ -63,4 +63,4 @@ const validateInputs = (inputs: Tensor[]): void => {
6363
if (inputs[1].type !== 'int32' && inputs[1].type !== 'int16') {
6464
throw new Error('Invalid repeat type.');
6565
}
66-
};
66+
};

web/lib/onnxjs/backends/webgl/ops/transpose.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ export const parseTransposeAttributes: OperatorInitialization<TransposeAttribute
3636
(node: Graph.Node): TransposeAttributes => createAttributeWithCacheKey({perm: node.attributes.getInts('perm', [])});
3737

3838
const createTransposeProgramInfo =
39-
(inferenceHandler: WebGLInferenceHandler, input: Tensor, perm: number[]): ProgramInfo => {
39+
(_inferenceHandler: WebGLInferenceHandler, input: Tensor, perm: number[]): ProgramInfo => {
4040
const inputShape = input.dims;
4141
perm = getAdjustedPerm(inputShape, perm);
4242
const unpackedOutputShape = getOutputShape(inputShape, perm);

web/lib/onnxjs/backends/webgl/texture-data-encoder.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ export class RedFloat32DataEncoder implements DataEncoder {
8282
}
8383
decode(buffer: Encoder.DataArrayType, dataSize: number): Float32Array {
8484
if (this.channelSize === 1) {
85-
const filteredData = (buffer as Float32Array).filter((value, index) => index % 4 === 0).subarray(0, dataSize);
85+
const filteredData = (buffer as Float32Array).filter((_value, index) => index % 4 === 0).subarray(0, dataSize);
8686
return filteredData;
8787
}
8888
return buffer.subarray(0, dataSize) as Float32Array;
@@ -119,7 +119,7 @@ export class RGBAFloatDataEncoder implements DataEncoder {
119119
}
120120
decode(buffer: Encoder.DataArrayType, dataSize: number): Float32Array {
121121
if (this.channelSize === 1) {
122-
const filteredData = (buffer as Float32Array).filter((value, index) => index % 4 === 0).subarray(0, dataSize);
122+
const filteredData = (buffer as Float32Array).filter((_value, index) => index % 4 === 0).subarray(0, dataSize);
123123
return filteredData;
124124
}
125125
return buffer.subarray(0, dataSize) as Float32Array;

web/lib/onnxjs/backends/webgl/texture-layout-strategy.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ export class PreferLogicalStrategy implements TextureLayoutStrategy {
105105
// tensor has 3 rows, we pretend it has 4 rows in order to account for the
106106
// fact that the texels containing the third row are half empty.
107107
logShape = logShape.map(
108-
(d, i) => i >= logShape.length - 2 ? (logShape[i] % 2 === 0 ? logShape[i] : logShape[i] + 1) : logShape[i]);
108+
(_d, i) => i >= logShape.length - 2 ? (logShape[i] % 2 === 0 ? logShape[i] : logShape[i] + 1) : logShape[i]);
109109

110110
// Packed texture height is at least 2 (the channel height of a single
111111
// texel).
@@ -182,7 +182,7 @@ export function parseAxisParam(axis: number|number[], shape: number[]): number[]
182182
const rank = shape.length;
183183

184184
// Normalize input
185-
axis = axis == null ? shape.map((s, i) => i) : ([] as number[]).concat(axis);
185+
axis = axis == null ? shape.map((_s, i) => i) : ([] as number[]).concat(axis);
186186

187187
// Check for valid range
188188
assert(

web/lib/onnxjs/backends/webgl/texture-manager.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,7 @@ export class TextureManager {
172172
throw new Error(`TensorData type ${dataType} is not supported`);
173173
}
174174
}
175-
toTextureData(dataType: Tensor.DataType, data: Tensor.NumberType|undefined): Encoder.DataArrayType|undefined {
175+
toTextureData(_dataType: Tensor.DataType, data: Tensor.NumberType|undefined): Encoder.DataArrayType|undefined {
176176
if (!data) {
177177
return undefined;
178178
}

web/lib/onnxjs/execution-plan.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ export class ExecutionPlan {
114114

115115
// resolve downstream nodes
116116
const downstreamNodes = new Set<number>();
117-
outputList.forEach((output, i) => {
117+
outputList.forEach((_output, i) => {
118118
const j = thisOp.node.outputs[i];
119119
for (const currentDownstreamNodeIndex of graphValues[j].to) {
120120
const currentDownstreamNode = graphNodes[currentDownstreamNodeIndex];

web/lib/onnxjs/instrument.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,7 @@ function createCategorizedLogger(category: string): Logger.CategorizedLogger {
176176
// NOTE: argument 'category' is put the last parameter beacause typescript
177177
// doesn't allow optional argument put in front of required argument. This
178178
// order is different from a usual logging API.
179-
function logInternal(severity: Logger.Severity, content: string, stack: number, category?: string) {
179+
function logInternal(severity: Logger.Severity, content: string, _stack: number, category?: string) {
180180
const config = LOGGER_CONFIG_MAP[category || ''] || LOGGER_CONFIG_MAP[''];
181181
if (SEVERITY_VALUE[severity] < SEVERITY_VALUE[config.minimalSeverity]) {
182182
return;

web/lib/onnxjs/util.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -967,7 +967,7 @@ export class ReduceUtil {
967967
const dims = a.dims.slice(0);
968968
// if axes is not set, perform reduce on all axes
969969
if (axes.length === 0) {
970-
dims.forEach((d, ind) => axes.push(ind));
970+
dims.forEach((_d, ind) => axes.push(ind));
971971
}
972972
// get a temporary broadcastable output shape
973973
const outputDims = ReduceUtil.calcReduceShape(dims, axes, true);

web/lib/wasm/jsep/webgpu/ops/argminmax.ts

Lines changed: 4 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -27,11 +27,6 @@ export interface ArgMinMaxAttributes extends AttributeWithCacheKey {
2727
selectLastIndex: number;
2828
}
2929

30-
const createArgMinMaxAttributesFromInputs =
31-
(inputs: readonly TensorView[], attributes: ArgMinMaxAttributes): ArgMinMaxAttributes =>
32-
createAttributeWithCacheKey(
33-
{axis: attributes.axis, keepDims: attributes.keepDims, selectLastIndex: attributes.selectLastIndex});
34-
3530
export const argMin = (context: ComputeContext, attributes: ArgMinMaxAttributes): void => {
3631
validateInputs(context.inputs);
3732
const argMinMaxOp: ReduceOp = (input, output, axes) => {
@@ -51,12 +46,10 @@ export const argMin = (context: ComputeContext, attributes: ArgMinMaxAttributes)
5146
];
5247
};
5348

54-
const updatedAttributes: ArgMinMaxAttributes =
55-
context.inputs.length === 1 ? attributes : createArgMinMaxAttributesFromInputs(context.inputs, attributes);
5649
context.compute(
5750
createReduceProgramInfo(
58-
'ArgMin', {hint: updatedAttributes.cacheKey}, [context.inputs[0]], argMinMaxOp, [updatedAttributes.axis],
59-
DataType.int64, updatedAttributes.keepDims),
51+
'ArgMin', {hint: attributes.cacheKey}, [context.inputs[0]], argMinMaxOp, [attributes.axis], DataType.int64,
52+
attributes.keepDims),
6053
{inputs: [0]});
6154
};
6255

@@ -79,12 +72,10 @@ export const argMax = (context: ComputeContext, attributes: ArgMinMaxAttributes)
7972
];
8073
};
8174

82-
const updatedAttributes: ArgMinMaxAttributes =
83-
context.inputs.length === 1 ? attributes : createArgMinMaxAttributesFromInputs(context.inputs, attributes);
8475
context.compute(
8576
createReduceProgramInfo(
86-
'argMax', {hint: updatedAttributes.cacheKey}, [context.inputs[0]], argMinMaxOp, [updatedAttributes.axis],
87-
DataType.int64, updatedAttributes.keepDims),
77+
'argMax', {hint: attributes.cacheKey}, [context.inputs[0]], argMinMaxOp, [attributes.axis], DataType.int64,
78+
attributes.keepDims),
8879
{inputs: [0]});
8980
};
9081

web/lib/wasm/jsep/webgpu/ops/einsum.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -254,7 +254,7 @@ const createEinsumProgramInfo = (inputs: readonly TensorView[], einsumEquation:
254254
${shaderHelper.mainStart()}
255255
${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}
256256
var outputIndices = ${output.offsetToIndices('global_idx')};
257-
${inputVars.map((inputVar, i) => `var input${i}Indices: ${inputVars[i].type.indices};`).join('\n')}
257+
${inputVars.map((_var, i) => `var input${i}Indices: ${inputVars[i].type.indices};`).join('\n')}
258258
${reduceOps.join('\n')};
259259
${output.setByOffset('global_idx', 'sum')};
260260
}`;

0 commit comments

Comments
 (0)