From ed75b5f16888c8abb35dcaa0925f8183b3c1fc58 Mon Sep 17 00:00:00 2001 From: Matthias Cremon Date: Tue, 14 Apr 2026 07:45:19 -0700 Subject: [PATCH] Disable mean fallback op (#18815) Summary: Pull Request resolved: https://github.com/pytorch/executorch/pull/18815 POR models (and virtually anything else that I've seen) only use mean on floats. Removing the fallback by default saves 75.6kB in .text. Differential Revision: D100258757 --- backends/cadence/fusion_g3/operators/op_mean.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/backends/cadence/fusion_g3/operators/op_mean.cpp b/backends/cadence/fusion_g3/operators/op_mean.cpp index cefd45f6ef8..76a01d221af 100644 --- a/backends/cadence/fusion_g3/operators/op_mean.cpp +++ b/backends/cadence/fusion_g3/operators/op_mean.cpp @@ -151,6 +151,7 @@ Tensor& mean_out( p_axis, num_axis_dims); } else { +#ifdef G3_ENABLE_ALL_DTYPES ET_KERNEL_CHECK( ctx, torch::executor::check_mean_dim_args(in, dim_list, keepdim, dtype, out), @@ -183,6 +184,10 @@ Tensor& mean_out( } }); }); +#else + ET_DCHECK_MSG( + false, "mean.out: non-float dtypes require G3_ENABLE_ALL_DTYPES"); +#endif } return out;