Skip to content

Commit 5592acc

Browse files
authored
Remove Gelu Fusion for TF Newapi (#1886)
Signed-off-by: zehao-intel <[email protected]>
1 parent 4372a76 commit 5592acc

File tree

2 files changed

+1
-415
lines changed

2 files changed

+1
-415
lines changed

neural_compressor/tensorflow/quantization/utils/graph_rewriter/generic/fuse_gelu.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,15 +22,14 @@
2222
from neural_compressor.tensorflow.quantization.utils.graph_rewriter.graph_base import GraphRewriterBase
2323
from neural_compressor.tensorflow.quantization.utils.graph_util import GraphAnalyzer
2424
from neural_compressor.tensorflow.quantization.utils.graph_util import GraphRewriterHelper as Helper
25-
from neural_compressor.tensorflow.utils import SPR_BASE_VERSIONS
2625

2726

2827
class FuseGeluOptimizer(GraphRewriterBase): # pragma: no cover
2928
"""Fuse Sqrt + RealDiv + Erf + AddV2 + Mul + Mul into Gelu op."""
3029

3130
def do_transformation(self):
3231
"""Execute the fusion from small ops to Gelu."""
33-
if not (tf.version.VERSION in ("1.15.0-up2", "1.15.0-up3") or tf.version.VERSION in SPR_BASE_VERSIONS):
32+
if tf.version.VERSION not in ("1.15.0-up2", "1.15.0-up3"):
3433
return self.model
3534

3635
cur_graph = GraphAnalyzer()

0 commit comments

Comments
 (0)