4 #ifndef FLATBUFFERS_GENERATED_MATMUL_TT_TARGET_TTNN_H_
5 #define FLATBUFFERS_GENERATED_MATMUL_TT_TARGET_TTNN_H_
7 #include "flatbuffers/flatbuffers.h"
11 static_assert(FLATBUFFERS_VERSION_MAJOR == 24 &&
12 FLATBUFFERS_VERSION_MINOR == 3 &&
13 FLATBUFFERS_VERSION_REVISION == 25,
14 "Non-compatible flatbuffers version included");
25 struct MatmulMultiCoreReuseProgramConfigBuilder;
28 struct MatmulMultiCoreReuseMultiCastProgramConfigBuilder;
31 struct MatmulMultiCoreReuseMultiCast1DProgramConfigBuilder;
34 struct MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfigBuilder;
37 struct MatmulOpBuilder;
40 struct LinearOpBuilder;
64 static const char *
const names[6] = {
66 "MatmulMultiCoreReuseProgramConfig",
67 "MatmulMultiCoreReuseMultiCastProgramConfig",
68 "MatmulMultiCoreReuseMultiCast1DProgramConfig",
69 "MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig",
77 const size_t index =
static_cast<size_t>(e);
102 bool VerifyMatmulProgramConfigVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset<void>> *values, const ::flatbuffers::Vector<MatmulProgramConfig> *types);
107 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
133 bool Verify(::flatbuffers::Verifier &verifier)
const {
134 return VerifyTableStart(verifier) &&
147 ::flatbuffers::FlatBufferBuilder &
fbb_;
171 ::flatbuffers::Offset<MatmulMultiCoreReuseProgramConfig>
Finish() {
173 auto o = ::flatbuffers::Offset<MatmulMultiCoreReuseProgramConfig>(end);
179 ::flatbuffers::FlatBufferBuilder &_fbb,
180 const tt::target::ttnn::CoreCoord *compute_with_storage_grid_size =
nullptr,
181 uint64_t in0_block_w = 0,
182 uint64_t out_subblock_h = 0,
183 uint64_t out_subblock_w = 0,
184 uint64_t per_core_m = 0,
185 uint64_t per_core_n = 0) {
204 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
250 bool Verify(::flatbuffers::Verifier &verifier)
const {
251 return VerifyTableStart(verifier) &&
262 verifier.VerifyTable(fused_activation()) &&
270 ::flatbuffers::FlatBufferBuilder &
fbb_;
309 ::flatbuffers::Offset<MatmulMultiCoreReuseMultiCastProgramConfig>
Finish() {
311 auto o = ::flatbuffers::Offset<MatmulMultiCoreReuseMultiCastProgramConfig>(end);
317 ::flatbuffers::FlatBufferBuilder &_fbb,
318 const tt::target::ttnn::CoreCoord *compute_with_storage_grid_size =
nullptr,
319 uint64_t in0_block_w = 0,
320 uint64_t out_subblock_h = 0,
321 uint64_t out_subblock_w = 0,
322 uint64_t out_block_h = 0,
323 uint64_t out_block_w = 0,
324 uint64_t per_core_m = 0,
325 uint64_t per_core_n = 0,
326 bool transpose_mcast =
false,
327 ::flatbuffers::Offset<tt::target::ttnn::UnaryWithParam> fused_activation = 0,
328 bool fuse_batch =
false) {
352 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
366 VT_NUM_GLOBAL_CB_RECEIVERS = 30
404 const tt::target::ttnn::CoreRangeSet *
hop_cores()
const {
405 return GetPointer<const tt::target::ttnn::CoreRangeSet *>(
VT_HOP_CORES);
408 return GetField<uint64_t>(VT_NUM_GLOBAL_CB_RECEIVERS, 0);
410 bool Verify(::flatbuffers::Verifier &verifier)
const {
411 return VerifyTableStart(verifier) &&
422 verifier.VerifyTable(fused_activation()) &&
426 verifier.VerifyTable(hop_cores()) &&
427 VerifyField<uint64_t>(verifier, VT_NUM_GLOBAL_CB_RECEIVERS, 8) &&
434 ::flatbuffers::FlatBufferBuilder &
fbb_;
472 void add_hop_cores(::flatbuffers::Offset<tt::target::ttnn::CoreRangeSet> hop_cores) {
476 fbb_.AddElement<uint64_t>(MatmulMultiCoreReuseMultiCast1DProgramConfig::VT_NUM_GLOBAL_CB_RECEIVERS, num_global_cb_receivers, 0);
482 ::flatbuffers::Offset<MatmulMultiCoreReuseMultiCast1DProgramConfig>
Finish() {
484 auto o = ::flatbuffers::Offset<MatmulMultiCoreReuseMultiCast1DProgramConfig>(end);
490 ::flatbuffers::FlatBufferBuilder &_fbb,
491 const tt::target::ttnn::CoreCoord *compute_with_storage_grid_size =
nullptr,
492 uint64_t in0_block_w = 0,
493 uint64_t out_subblock_h = 0,
494 uint64_t out_subblock_w = 0,
495 uint64_t out_block_h = 0,
496 uint64_t out_block_w = 0,
497 uint64_t per_core_m = 0,
498 uint64_t per_core_n = 0,
499 bool fuse_batch =
false,
500 ::flatbuffers::Offset<tt::target::ttnn::UnaryWithParam> fused_activation = 0,
501 bool mcast_in0 =
false,
502 bool gather_in0 =
false,
503 ::flatbuffers::Offset<tt::target::ttnn::CoreRangeSet> hop_cores = 0,
504 uint64_t num_global_cb_receivers = 0) {
531 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
549 bool Verify(::flatbuffers::Verifier &verifier)
const {
550 return VerifyTableStart(verifier) &&
555 verifier.VerifyTable(fused_activation()) &&
562 ::flatbuffers::FlatBufferBuilder &
fbb_;
580 ::flatbuffers::Offset<MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig>
Finish() {
582 auto o = ::flatbuffers::Offset<MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig>(end);
588 ::flatbuffers::FlatBufferBuilder &_fbb,
589 uint64_t in0_block_w = 0,
590 uint64_t per_core_m = 0,
591 uint64_t per_core_n = 0,
592 ::flatbuffers::Offset<tt::target::ttnn::UnaryWithParam> fused_activation = 0) {
609 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
616 VT_MATMUL_PROGRAM_CONFIG = 16
618 const tt::target::ttnn::TensorRef *
a()
const {
619 return GetPointer<const tt::target::ttnn::TensorRef *>(
VT_A);
621 const tt::target::ttnn::TensorRef *
b()
const {
622 return GetPointer<const tt::target::ttnn::TensorRef *>(
VT_B);
624 const tt::target::ttnn::TensorRef *
out()
const {
625 return GetPointer<const tt::target::ttnn::TensorRef *>(
VT_OUT);
637 return GetPointer<const void *>(VT_MATMUL_PROGRAM_CONFIG);
652 bool Verify(::flatbuffers::Verifier &verifier)
const {
653 return VerifyTableStart(verifier) &&
654 VerifyOffset(verifier,
VT_A) &&
655 verifier.VerifyTable(a()) &&
656 VerifyOffset(verifier,
VT_B) &&
657 verifier.VerifyTable(b()) &&
658 VerifyOffset(verifier,
VT_OUT) &&
659 verifier.VerifyTable(out()) &&
663 VerifyOffset(verifier, VT_MATMUL_PROGRAM_CONFIG) &&
669 template<>
inline const tt::target::ttnn::MatmulMultiCoreReuseProgramConfig *MatmulOp::matmul_program_config_as<tt::target::ttnn::MatmulMultiCoreReuseProgramConfig>()
const {
670 return matmul_program_config_as_MatmulMultiCoreReuseProgramConfig();
673 template<>
inline const tt::target::ttnn::MatmulMultiCoreReuseMultiCastProgramConfig *MatmulOp::matmul_program_config_as<tt::target::ttnn::MatmulMultiCoreReuseMultiCastProgramConfig>()
const {
674 return matmul_program_config_as_MatmulMultiCoreReuseMultiCastProgramConfig();
677 template<>
inline const tt::target::ttnn::MatmulMultiCoreReuseMultiCast1DProgramConfig *MatmulOp::matmul_program_config_as<tt::target::ttnn::MatmulMultiCoreReuseMultiCast1DProgramConfig>()
const {
678 return matmul_program_config_as_MatmulMultiCoreReuseMultiCast1DProgramConfig();
681 template<>
inline const tt::target::ttnn::MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig *MatmulOp::matmul_program_config_as<tt::target::ttnn::MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig>()
const {
682 return matmul_program_config_as_MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig();
687 ::flatbuffers::FlatBufferBuilder &
fbb_;
689 void add_a(::flatbuffers::Offset<tt::target::ttnn::TensorRef> a) {
692 void add_b(::flatbuffers::Offset<tt::target::ttnn::TensorRef> b) {
695 void add_out(::flatbuffers::Offset<tt::target::ttnn::TensorRef> out) {
708 fbb_.AddOffset(MatmulOp::VT_MATMUL_PROGRAM_CONFIG, matmul_program_config);
714 ::flatbuffers::Offset<MatmulOp>
Finish() {
716 auto o = ::flatbuffers::Offset<MatmulOp>(end);
722 ::flatbuffers::FlatBufferBuilder &_fbb,
723 ::flatbuffers::Offset<tt::target::ttnn::TensorRef> a = 0,
724 ::flatbuffers::Offset<tt::target::ttnn::TensorRef> b = 0,
725 ::flatbuffers::Offset<tt::target::ttnn::TensorRef> out = 0,
726 bool transpose_a =
false,
727 bool transpose_b =
false,
729 ::flatbuffers::Offset<void> matmul_program_config = 0) {
749 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
757 const tt::target::ttnn::TensorRef *
a()
const {
758 return GetPointer<const tt::target::ttnn::TensorRef *>(
VT_A);
760 const tt::target::ttnn::TensorRef *
b()
const {
761 return GetPointer<const tt::target::ttnn::TensorRef *>(
VT_B);
763 const tt::target::ttnn::TensorRef *
bias()
const {
764 return GetPointer<const tt::target::ttnn::TensorRef *>(
VT_BIAS);
766 const tt::target::ttnn::TensorRef *
out()
const {
767 return GetPointer<const tt::target::ttnn::TensorRef *>(
VT_OUT);
775 bool Verify(::flatbuffers::Verifier &verifier)
const {
776 return VerifyTableStart(verifier) &&
777 VerifyOffset(verifier,
VT_A) &&
778 verifier.VerifyTable(a()) &&
779 VerifyOffset(verifier,
VT_B) &&
780 verifier.VerifyTable(b()) &&
781 VerifyOffset(verifier,
VT_BIAS) &&
782 verifier.VerifyTable(bias()) &&
783 VerifyOffset(verifier,
VT_OUT) &&
784 verifier.VerifyTable(out()) &&
793 ::flatbuffers::FlatBufferBuilder &
fbb_;
795 void add_a(::flatbuffers::Offset<tt::target::ttnn::TensorRef> a) {
798 void add_b(::flatbuffers::Offset<tt::target::ttnn::TensorRef> b) {
801 void add_bias(::flatbuffers::Offset<tt::target::ttnn::TensorRef> bias) {
804 void add_out(::flatbuffers::Offset<tt::target::ttnn::TensorRef> out) {
817 ::flatbuffers::Offset<LinearOp>
Finish() {
819 auto o = ::flatbuffers::Offset<LinearOp>(end);
825 ::flatbuffers::FlatBufferBuilder &_fbb,
826 ::flatbuffers::Offset<tt::target::ttnn::TensorRef> a = 0,
827 ::flatbuffers::Offset<tt::target::ttnn::TensorRef> b = 0,
828 ::flatbuffers::Offset<tt::target::ttnn::TensorRef> bias = 0,
829 ::flatbuffers::Offset<tt::target::ttnn::TensorRef> out = 0,
830 bool transpose_a =
false,
831 bool transpose_b =
false) {
853 auto ptr =
reinterpret_cast<const tt::target::ttnn::MatmulMultiCoreReuseProgramConfig *
>(obj);
854 return verifier.VerifyTable(ptr);
857 auto ptr =
reinterpret_cast<const tt::target::ttnn::MatmulMultiCoreReuseMultiCastProgramConfig *
>(obj);
858 return verifier.VerifyTable(ptr);
861 auto ptr =
reinterpret_cast<const tt::target::ttnn::MatmulMultiCoreReuseMultiCast1DProgramConfig *
>(obj);
862 return verifier.VerifyTable(ptr);
865 auto ptr =
reinterpret_cast<const tt::target::ttnn::MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig *
>(obj);
866 return verifier.VerifyTable(ptr);
868 default:
return true;
872 inline bool VerifyMatmulProgramConfigVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset<void>> *values, const ::flatbuffers::Vector<MatmulProgramConfig> *types) {
873 if (!values || !types)
return !values && !types;
874 if (values->size() != types->size())
return false;
875 for (::flatbuffers::uoffset_t i = 0; i < values->size(); ++i) {
VT_TRANSPOSE_A
Definition: matmul_generated.h:613
VT_GATHER_IN0
Definition: matmul_generated.h:364
VT_TRANSPOSE_B
Definition: matmul_generated.h:614
VT_COMPUTE_WITH_STORAGE_GRID_SIZE
Definition: matmul_generated.h:108
VT_MCAST_IN0
Definition: matmul_generated.h:363
VT_PER_CORE_N
Definition: matmul_generated.h:212
VT_FUSED_ACTIVATION
Definition: matmul_generated.h:214
VT_PER_CORE_M
Definition: matmul_generated.h:112
VT_OUT_SUBBLOCK_H
Definition: matmul_generated.h:110
VT_TRANSPOSE_MCAST
Definition: matmul_generated.h:213
VT_OUT_BLOCK_H
Definition: matmul_generated.h:209
VT_OUT_SUBBLOCK_W
Definition: matmul_generated.h:111
VT_A
Definition: matmul_generated.h:610
VT_B
Definition: matmul_generated.h:611
VT_BIAS
Definition: matmul_generated.h:752
VT_OUT
Definition: matmul_generated.h:612
VT_IN0_BLOCK_W
Definition: matmul_generated.h:109
VT_HOP_CORES
Definition: matmul_generated.h:365
VT_OUT_BLOCK_W
Definition: matmul_generated.h:210
VT_FUSE_BATCH
Definition: matmul_generated.h:361
VT_MATMUL_PROGRAM_CONFIG_TYPE
Definition: matmul_generated.h:615
inline ::flatbuffers::Offset< MatmulMultiCoreReuseMultiCast1DProgramConfig > CreateMatmulMultiCoreReuseMultiCast1DProgramConfig(::flatbuffers::FlatBufferBuilder &_fbb, const tt::target::ttnn::CoreCoord *compute_with_storage_grid_size=nullptr, uint64_t in0_block_w=0, uint64_t out_subblock_h=0, uint64_t out_subblock_w=0, uint64_t out_block_h=0, uint64_t out_block_w=0, uint64_t per_core_m=0, uint64_t per_core_n=0, bool fuse_batch=false, ::flatbuffers::Offset< tt::target::ttnn::UnaryWithParam > fused_activation=0, bool mcast_in0=false, bool gather_in0=false, ::flatbuffers::Offset< tt::target::ttnn::CoreRangeSet > hop_cores=0, uint64_t num_global_cb_receivers=0)
Definition: matmul_generated.h:489
inline ::flatbuffers::Offset< MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig > CreateMatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig(::flatbuffers::FlatBufferBuilder &_fbb, uint64_t in0_block_w=0, uint64_t per_core_m=0, uint64_t per_core_n=0, ::flatbuffers::Offset< tt::target::ttnn::UnaryWithParam > fused_activation=0)
Definition: matmul_generated.h:587
inline ::flatbuffers::Offset< MatmulMultiCoreReuseProgramConfig > CreateMatmulMultiCoreReuseProgramConfig(::flatbuffers::FlatBufferBuilder &_fbb, const tt::target::ttnn::CoreCoord *compute_with_storage_grid_size=nullptr, uint64_t in0_block_w=0, uint64_t out_subblock_h=0, uint64_t out_subblock_w=0, uint64_t per_core_m=0, uint64_t per_core_n=0)
Definition: matmul_generated.h:178
const char *const * EnumNamesMatmulProgramConfig()
Definition: matmul_generated.h:63
MatmulProgramConfig
Definition: matmul_generated.h:42
@ MatmulMultiCoreReuseProgramConfig
@ MatmulMultiCoreReuseMultiCastProgramConfig
@ MatmulMultiCoreReuseMultiCast1DProgramConfig
@ MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig
inline ::flatbuffers::Offset< MatmulMultiCoreReuseMultiCastProgramConfig > CreateMatmulMultiCoreReuseMultiCastProgramConfig(::flatbuffers::FlatBufferBuilder &_fbb, const tt::target::ttnn::CoreCoord *compute_with_storage_grid_size=nullptr, uint64_t in0_block_w=0, uint64_t out_subblock_h=0, uint64_t out_subblock_w=0, uint64_t out_block_h=0, uint64_t out_block_w=0, uint64_t per_core_m=0, uint64_t per_core_n=0, bool transpose_mcast=false, ::flatbuffers::Offset< tt::target::ttnn::UnaryWithParam > fused_activation=0, bool fuse_batch=false)
Definition: matmul_generated.h:316
bool VerifyMatmulProgramConfigVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset< void >> *values, const ::flatbuffers::Vector< MatmulProgramConfig > *types)
Definition: matmul_generated.h:872
const MatmulProgramConfig(& EnumValuesMatmulProgramConfig())[5]
Definition: matmul_generated.h:52
const char * EnumNameMatmulProgramConfig(MatmulProgramConfig e)
Definition: matmul_generated.h:75
inline ::flatbuffers::Offset< MatmulOp > CreateMatmulOp(::flatbuffers::FlatBufferBuilder &_fbb, ::flatbuffers::Offset< tt::target::ttnn::TensorRef > a=0, ::flatbuffers::Offset< tt::target::ttnn::TensorRef > b=0, ::flatbuffers::Offset< tt::target::ttnn::TensorRef > out=0, bool transpose_a=false, bool transpose_b=false, tt::target::ttnn::MatmulProgramConfig matmul_program_config_type=tt::target::ttnn::MatmulProgramConfig::NONE, ::flatbuffers::Offset< void > matmul_program_config=0)
Definition: matmul_generated.h:721
bool VerifyMatmulProgramConfig(::flatbuffers::Verifier &verifier, const void *obj, MatmulProgramConfig type)
Definition: matmul_generated.h:847
inline ::flatbuffers::Offset< LinearOp > CreateLinearOp(::flatbuffers::FlatBufferBuilder &_fbb, ::flatbuffers::Offset< tt::target::ttnn::TensorRef > a=0, ::flatbuffers::Offset< tt::target::ttnn::TensorRef > b=0, ::flatbuffers::Offset< tt::target::ttnn::TensorRef > bias=0, ::flatbuffers::Offset< tt::target::ttnn::TensorRef > out=0, bool transpose_a=false, bool transpose_b=false)
Definition: matmul_generated.h:824
Definition: debug_info_generated.h:18
Definition: binary_generated.h:31
const tt::target::ttnn::MatmulMultiCoreReuseMultiCastProgramConfig * matmul_program_config_as_MatmulMultiCoreReuseMultiCastProgramConfig() const
Definition: matmul_generated.h:643
const tt::target::ttnn::MatmulMultiCoreReuseMultiCast1DProgramConfig * matmul_program_config_as_MatmulMultiCoreReuseMultiCast1DProgramConfig() const
Definition: matmul_generated.h:646
const tt::target::ttnn::TensorRef * bias() const
Definition: matmul_generated.h:763
uint64_t per_core_n() const
Definition: matmul_generated.h:130
const tt::target::ttnn::UnaryWithParam * fused_activation() const
Definition: matmul_generated.h:244
tt::target::ttnn::MatmulProgramConfig matmul_program_config_type() const
Definition: matmul_generated.h:633
bool transpose_mcast() const
Definition: matmul_generated.h:241
bool gather_in0() const
Definition: matmul_generated.h:401
const tt::target::ttnn::TensorRef * out() const
Definition: matmul_generated.h:624
uint64_t out_block_h() const
Definition: matmul_generated.h:229
uint64_t out_block_w() const
Definition: matmul_generated.h:232
const tt::target::ttnn::MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig * matmul_program_config_as_MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig() const
Definition: matmul_generated.h:649
const tt::target::ttnn::MatmulMultiCoreReuseProgramConfig * matmul_program_config_as_MatmulMultiCoreReuseProgramConfig() const
Definition: matmul_generated.h:640
bool fuse_batch() const
Definition: matmul_generated.h:247
MatmulMultiCoreReuseMultiCast1DProgramConfigBuilder Builder
Definition: matmul_generated.h:350
const void * matmul_program_config() const
Definition: matmul_generated.h:636
uint64_t out_subblock_w() const
Definition: matmul_generated.h:124
MatmulMultiCoreReuseProgramConfigBuilder Builder
Definition: matmul_generated.h:105
const tt::target::ttnn::CoreRangeSet * hop_cores() const
Definition: matmul_generated.h:404
bool transpose_a() const
Definition: matmul_generated.h:627
bool mcast_in0() const
Definition: matmul_generated.h:398
const T * matmul_program_config_as() const
const tt::target::ttnn::TensorRef * b() const
Definition: matmul_generated.h:621
const tt::target::ttnn::TensorRef * a() const
Definition: matmul_generated.h:618
uint64_t per_core_m() const
Definition: matmul_generated.h:127
MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfigBuilder Builder
Definition: matmul_generated.h:529
MatmulMultiCoreReuseMultiCastProgramConfigBuilder Builder
Definition: matmul_generated.h:202
bool Verify(::flatbuffers::Verifier &verifier) const
Definition: matmul_generated.h:133
const tt::target::ttnn::CoreCoord * compute_with_storage_grid_size() const
Definition: matmul_generated.h:115
bool transpose_b() const
Definition: matmul_generated.h:630
uint64_t num_global_cb_receivers() const
Definition: matmul_generated.h:407
uint64_t out_subblock_h() const
Definition: matmul_generated.h:121
LinearOpBuilder Builder
Definition: matmul_generated.h:747
uint64_t in0_block_w() const
Definition: matmul_generated.h:118
MatmulOpBuilder Builder
Definition: matmul_generated.h:607
Definition: matmul_generated.h:791
LinearOpBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: matmul_generated.h:813
void add_out(::flatbuffers::Offset< tt::target::ttnn::TensorRef > out)
Definition: matmul_generated.h:804
void add_a(::flatbuffers::Offset< tt::target::ttnn::TensorRef > a)
Definition: matmul_generated.h:795
void add_b(::flatbuffers::Offset< tt::target::ttnn::TensorRef > b)
Definition: matmul_generated.h:798
::flatbuffers::Offset< LinearOp > Finish()
Definition: matmul_generated.h:817
::flatbuffers::uoffset_t start_
Definition: matmul_generated.h:794
void add_transpose_a(bool transpose_a)
Definition: matmul_generated.h:807
void add_transpose_b(bool transpose_b)
Definition: matmul_generated.h:810
void add_bias(::flatbuffers::Offset< tt::target::ttnn::TensorRef > bias)
Definition: matmul_generated.h:801
::flatbuffers::FlatBufferBuilder & fbb_
Definition: matmul_generated.h:793
LinearOp Table
Definition: matmul_generated.h:792
Definition: matmul_generated.h:842
static constexpr auto Create
Definition: matmul_generated.h:844
LinearOp type
Definition: matmul_generated.h:843
Definition: matmul_generated.h:432
MatmulMultiCoreReuseMultiCast1DProgramConfigBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: matmul_generated.h:478
::flatbuffers::uoffset_t start_
Definition: matmul_generated.h:435
void add_per_core_n(uint64_t per_core_n)
Definition: matmul_generated.h:457
void add_gather_in0(bool gather_in0)
Definition: matmul_generated.h:469
void add_hop_cores(::flatbuffers::Offset< tt::target::ttnn::CoreRangeSet > hop_cores)
Definition: matmul_generated.h:472
::flatbuffers::Offset< MatmulMultiCoreReuseMultiCast1DProgramConfig > Finish()
Definition: matmul_generated.h:482
::flatbuffers::FlatBufferBuilder & fbb_
Definition: matmul_generated.h:434
void add_in0_block_w(uint64_t in0_block_w)
Definition: matmul_generated.h:439
void add_num_global_cb_receivers(uint64_t num_global_cb_receivers)
Definition: matmul_generated.h:475
void add_fused_activation(::flatbuffers::Offset< tt::target::ttnn::UnaryWithParam > fused_activation)
Definition: matmul_generated.h:463
void add_fuse_batch(bool fuse_batch)
Definition: matmul_generated.h:460
MatmulMultiCoreReuseMultiCast1DProgramConfig Table
Definition: matmul_generated.h:433
void add_out_block_h(uint64_t out_block_h)
Definition: matmul_generated.h:448
void add_mcast_in0(bool mcast_in0)
Definition: matmul_generated.h:466
void add_out_block_w(uint64_t out_block_w)
Definition: matmul_generated.h:451
void add_out_subblock_h(uint64_t out_subblock_h)
Definition: matmul_generated.h:442
void add_compute_with_storage_grid_size(const tt::target::ttnn::CoreCoord *compute_with_storage_grid_size)
Definition: matmul_generated.h:436
void add_per_core_m(uint64_t per_core_m)
Definition: matmul_generated.h:454
void add_out_subblock_w(uint64_t out_subblock_w)
Definition: matmul_generated.h:445
Definition: matmul_generated.h:523
static constexpr auto Create
Definition: matmul_generated.h:525
MatmulMultiCoreReuseMultiCast1DProgramConfig type
Definition: matmul_generated.h:524
Definition: matmul_generated.h:560
void add_per_core_n(uint64_t per_core_n)
Definition: matmul_generated.h:570
MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig Table
Definition: matmul_generated.h:561
MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfigBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: matmul_generated.h:576
::flatbuffers::FlatBufferBuilder & fbb_
Definition: matmul_generated.h:562
void add_fused_activation(::flatbuffers::Offset< tt::target::ttnn::UnaryWithParam > fused_activation)
Definition: matmul_generated.h:573
void add_per_core_m(uint64_t per_core_m)
Definition: matmul_generated.h:567
::flatbuffers::uoffset_t start_
Definition: matmul_generated.h:563
::flatbuffers::Offset< MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig > Finish()
Definition: matmul_generated.h:580
void add_in0_block_w(uint64_t in0_block_w)
Definition: matmul_generated.h:564
Definition: matmul_generated.h:601
MatmulMultiCoreReuseMultiCastDRAMShardedProgramConfig type
Definition: matmul_generated.h:602
static constexpr auto Create
Definition: matmul_generated.h:603
Definition: matmul_generated.h:268
::flatbuffers::FlatBufferBuilder & fbb_
Definition: matmul_generated.h:270
void add_out_subblock_w(uint64_t out_subblock_w)
Definition: matmul_generated.h:281
void add_in0_block_w(uint64_t in0_block_w)
Definition: matmul_generated.h:275
void add_transpose_mcast(bool transpose_mcast)
Definition: matmul_generated.h:296
void add_out_block_h(uint64_t out_block_h)
Definition: matmul_generated.h:284
void add_per_core_n(uint64_t per_core_n)
Definition: matmul_generated.h:293
void add_out_block_w(uint64_t out_block_w)
Definition: matmul_generated.h:287
void add_compute_with_storage_grid_size(const tt::target::ttnn::CoreCoord *compute_with_storage_grid_size)
Definition: matmul_generated.h:272
MatmulMultiCoreReuseMultiCastProgramConfig Table
Definition: matmul_generated.h:269
void add_out_subblock_h(uint64_t out_subblock_h)
Definition: matmul_generated.h:278
void add_fused_activation(::flatbuffers::Offset< tt::target::ttnn::UnaryWithParam > fused_activation)
Definition: matmul_generated.h:299
void add_fuse_batch(bool fuse_batch)
Definition: matmul_generated.h:302
void add_per_core_m(uint64_t per_core_m)
Definition: matmul_generated.h:290
::flatbuffers::Offset< MatmulMultiCoreReuseMultiCastProgramConfig > Finish()
Definition: matmul_generated.h:309
MatmulMultiCoreReuseMultiCastProgramConfigBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: matmul_generated.h:305
::flatbuffers::uoffset_t start_
Definition: matmul_generated.h:271
Definition: matmul_generated.h:344
MatmulMultiCoreReuseMultiCastProgramConfig type
Definition: matmul_generated.h:345
static constexpr auto Create
Definition: matmul_generated.h:346
Definition: matmul_generated.h:145
void add_compute_with_storage_grid_size(const tt::target::ttnn::CoreCoord *compute_with_storage_grid_size)
Definition: matmul_generated.h:149
void add_out_subblock_h(uint64_t out_subblock_h)
Definition: matmul_generated.h:155
MatmulMultiCoreReuseProgramConfigBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: matmul_generated.h:167
::flatbuffers::FlatBufferBuilder & fbb_
Definition: matmul_generated.h:147
::flatbuffers::Offset< MatmulMultiCoreReuseProgramConfig > Finish()
Definition: matmul_generated.h:171
void add_per_core_m(uint64_t per_core_m)
Definition: matmul_generated.h:161
MatmulMultiCoreReuseProgramConfig Table
Definition: matmul_generated.h:146
::flatbuffers::uoffset_t start_
Definition: matmul_generated.h:148
void add_out_subblock_w(uint64_t out_subblock_w)
Definition: matmul_generated.h:158
void add_in0_block_w(uint64_t in0_block_w)
Definition: matmul_generated.h:152
void add_per_core_n(uint64_t per_core_n)
Definition: matmul_generated.h:164
Definition: matmul_generated.h:196
static constexpr auto Create
Definition: matmul_generated.h:198
MatmulMultiCoreReuseProgramConfig type
Definition: matmul_generated.h:197
Definition: matmul_generated.h:685
::flatbuffers::uoffset_t start_
Definition: matmul_generated.h:688
void add_matmul_program_config_type(tt::target::ttnn::MatmulProgramConfig matmul_program_config_type)
Definition: matmul_generated.h:704
::flatbuffers::Offset< MatmulOp > Finish()
Definition: matmul_generated.h:714
void add_b(::flatbuffers::Offset< tt::target::ttnn::TensorRef > b)
Definition: matmul_generated.h:692
MatmulOp Table
Definition: matmul_generated.h:686
void add_transpose_a(bool transpose_a)
Definition: matmul_generated.h:698
void add_transpose_b(bool transpose_b)
Definition: matmul_generated.h:701
void add_out(::flatbuffers::Offset< tt::target::ttnn::TensorRef > out)
Definition: matmul_generated.h:695
void add_a(::flatbuffers::Offset< tt::target::ttnn::TensorRef > a)
Definition: matmul_generated.h:689
::flatbuffers::FlatBufferBuilder & fbb_
Definition: matmul_generated.h:687
void add_matmul_program_config(::flatbuffers::Offset< void > matmul_program_config)
Definition: matmul_generated.h:707
MatmulOpBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: matmul_generated.h:710
Definition: matmul_generated.h:741
static constexpr auto Create
Definition: matmul_generated.h:743
MatmulOp type
Definition: matmul_generated.h:742
Definition: matmul_generated.h:81
static const MatmulProgramConfig enum_value
Definition: matmul_generated.h:82