Skip to content

Commit

Permalink
Disable type inference flag is not being set (#455)
Browse files Browse the repository at this point in the history
* Disable type inference flag is not being set

* Fix softmax
  • Loading branch information
DenisVieriu97 committed Jun 26, 2023
1 parent e6f4876 commit 302584f
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion aten/src/ATen/native/mps/OperationUtils.mm
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ void runMPSGraph(MPSStream* mpsStream, MPSCachedGraph *cachedGraph, NSDictionary
shapes[graphTensor] = [[[MPSGraphShapedType alloc] initWithShape:nil dataType:graphTensorData.dataType] autorelease];
}
MPSGraphCompilationDescriptor *compilationDescriptor = [[MPSGraphCompilationDescriptor new] autorelease];

[compilationDescriptor disableTypeInference];
executable = [[mpsGraph compileWithDevice:nil
feeds:shapes
targetTensors:[results allKeys]
Expand Down
4 changes: 2 additions & 2 deletions aten/src/ATen/native/mps/operations/SoftMax.mm
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ void get_shapes(MPSShape* input_shape_readonly,
assert(0 && "Invalid dim\n");
}
}

bool disableTypeInference = input_.dim() <= 4;
string key = "softmax_mps_out" + getTensorsStringKey(input, true, /*exclude_shape*/true) + ":" +
mem_format_key + ":" + std::to_string(dim_);
CachedGraph* cachedGraph = static_cast<CachedGraph *>(cache_->LookUp(key));
Expand Down Expand Up @@ -162,7 +162,7 @@ void get_shapes(MPSShape* input_shape_readonly,
outputPlaceholder.getMPSGraphTensor() : outputPlaceholder.getMPSGraphTensorData()
};

runMPSGraph(stream, cachedGraph, feeds, results, /*disable_type_inference*/ true);
runMPSGraph(stream, cachedGraph, feeds, results, disableTypeInference);
}

}
Expand Down

0 comments on commit 302584f

Please sign in to comment.