TT-MLIR
types_generated.h
Go to the documentation of this file.
1 // automatically generated by the FlatBuffers compiler, do not modify
2 
3 
4 #ifndef FLATBUFFERS_GENERATED_TYPES_TT_TARGET_H_
5 #define FLATBUFFERS_GENERATED_TYPES_TT_TARGET_H_
6 
7 #include "flatbuffers/flatbuffers.h"
8 
9 // Ensure the included flatbuffers.h is the same version as when this file was
10 // generated, otherwise it may not be compatible.
11 static_assert(FLATBUFFERS_VERSION_MAJOR == 24 &&
12  FLATBUFFERS_VERSION_MINOR == 3 &&
13  FLATBUFFERS_VERSION_REVISION == 25,
14  "Non-compatible flatbuffers version included");
15 
16 namespace tt {
17 namespace target {
18 
19 struct Dim2d;
20 
21 struct Dim2dRange;
22 
23 struct ShardSpec;
24 struct ShardSpecBuilder;
25 
26 struct MemoryConfigDesc;
27 struct MemoryConfigDescBuilder;
28 
29 struct ReplicateTensor;
30 struct ReplicateTensorBuilder;
31 
32 struct ShardTensor;
33 struct ShardTensorBuilder;
34 
35 struct ShardTensor2D;
36 struct ShardTensor2DBuilder;
37 
38 struct AllGatherTensor;
39 struct AllGatherTensorBuilder;
40 
41 struct DistributionStrategy;
42 struct DistributionStrategyBuilder;
43 
44 struct MemoryDesc;
45 struct MemoryDescBuilder;
46 
47 struct LayoutDesc;
48 struct LayoutDescBuilder;
49 
50 struct TensorDesc;
51 struct TensorDescBuilder;
52 
53 struct CBDesc;
54 struct CBDescBuilder;
55 
56 struct TensorRef;
57 struct TensorRefBuilder;
58 
59 struct CBRef;
60 struct CBRefBuilder;
61 
62 struct ChipDesc;
63 struct ChipDescBuilder;
64 
65 struct ChipCoord;
66 
67 struct ChipChannel;
68 
69 struct ChipPhysicalCores;
70 struct ChipPhysicalCoresBuilder;
71 
72 struct CPUDesc;
73 struct CPUDescBuilder;
74 
75 struct SystemDesc;
76 struct SystemDescBuilder;
77 
78 struct DeviceRef;
79 struct DeviceRefBuilder;
80 
81 struct EventRef;
82 struct EventRefBuilder;
83 
84 enum class Arch : uint32_t {
85  Grayskull = 0,
86  Wormhole_b0 = 1,
87  Blackhole = 2,
88  MIN = Grayskull,
89  MAX = Blackhole
90 };
91 
92 inline const Arch (&EnumValuesArch())[3] {
93  static const Arch values[] = {
97  };
98  return values;
99 }
100 
101 inline const char * const *EnumNamesArch() {
102  static const char * const names[4] = {
103  "Grayskull",
104  "Wormhole_b0",
105  "Blackhole",
106  nullptr
107  };
108  return names;
109 }
110 
111 inline const char *EnumNameArch(Arch e) {
112  if (::flatbuffers::IsOutRange(e, Arch::Grayskull, Arch::Blackhole)) return "";
113  const size_t index = static_cast<size_t>(e);
114  return EnumNamesArch()[index];
115 }
116 
117 enum class DataType : uint16_t {
118  Float32 = 0,
119  Float16 = 1,
120  BFloat16 = 2,
121  BFP_Float8 = 3,
122  BFP_BFloat8 = 4,
123  BFP_Float4 = 5,
124  BFP_BFloat4 = 6,
125  BFP_Float2 = 7,
126  BFP_BFloat2 = 8,
127  UInt32 = 9,
128  UInt16 = 10,
129  UInt8 = 11,
130  MIN = Float32,
131  MAX = UInt8
132 };
133 
134 inline const DataType (&EnumValuesDataType())[12] {
135  static const DataType values[] = {
148  };
149  return values;
150 }
151 
152 inline const char * const *EnumNamesDataType() {
153  static const char * const names[13] = {
154  "Float32",
155  "Float16",
156  "BFloat16",
157  "BFP_Float8",
158  "BFP_BFloat8",
159  "BFP_Float4",
160  "BFP_BFloat4",
161  "BFP_Float2",
162  "BFP_BFloat2",
163  "UInt32",
164  "UInt16",
165  "UInt8",
166  nullptr
167  };
168  return names;
169 }
170 
171 inline const char *EnumNameDataType(DataType e) {
172  if (::flatbuffers::IsOutRange(e, DataType::Float32, DataType::UInt8)) return "";
173  const size_t index = static_cast<size_t>(e);
174  return EnumNamesDataType()[index];
175 }
176 
177 enum class OOBVal : uint16_t {
178  Undef = 0,
179  Zero = 1,
180  One = 2,
181  Inf = 3,
182  NegInf = 4,
183  MIN = Undef,
184  MAX = NegInf
185 };
186 
187 inline const OOBVal (&EnumValuesOOBVal())[5] {
188  static const OOBVal values[] = {
190  OOBVal::Zero,
191  OOBVal::One,
192  OOBVal::Inf,
194  };
195  return values;
196 }
197 
198 inline const char * const *EnumNamesOOBVal() {
199  static const char * const names[6] = {
200  "Undef",
201  "Zero",
202  "One",
203  "Inf",
204  "NegInf",
205  nullptr
206  };
207  return names;
208 }
209 
210 inline const char *EnumNameOOBVal(OOBVal e) {
211  if (::flatbuffers::IsOutRange(e, OOBVal::Undef, OOBVal::NegInf)) return "";
212  const size_t index = static_cast<size_t>(e);
213  return EnumNamesOOBVal()[index];
214 }
215 
216 enum class MemorySpace : uint16_t {
217  System = 0,
218  SystemMMIO = 1,
219  DeviceDRAM = 2,
220  DeviceL1 = 3,
221  MIN = System,
222  MAX = DeviceL1
223 };
224 
225 inline const MemorySpace (&EnumValuesMemorySpace())[4] {
226  static const MemorySpace values[] = {
231  };
232  return values;
233 }
234 
235 inline const char * const *EnumNamesMemorySpace() {
236  static const char * const names[5] = {
237  "System",
238  "SystemMMIO",
239  "DeviceDRAM",
240  "DeviceL1",
241  nullptr
242  };
243  return names;
244 }
245 
246 inline const char *EnumNameMemorySpace(MemorySpace e) {
247  if (::flatbuffers::IsOutRange(e, MemorySpace::System, MemorySpace::DeviceL1)) return "";
248  const size_t index = static_cast<size_t>(e);
249  return EnumNamesMemorySpace()[index];
250 }
251 
252 enum class ChipCapability : uint32_t {
253  PCIE = 1,
254  HostMMIO = 2,
255  NONE = 0,
256  ANY = 3
257 };
258 FLATBUFFERS_DEFINE_BITMASK_OPERATORS(ChipCapability, uint32_t)
259 
261  static const ChipCapability values[] = {
264  };
265  return values;
266 }
267 
268 inline const char * const *EnumNamesChipCapability() {
269  static const char * const names[3] = {
270  "PCIE",
271  "HostMMIO",
272  nullptr
273  };
274  return names;
275 }
276 
277 inline const char *EnumNameChipCapability(ChipCapability e) {
278  if (::flatbuffers::IsOutRange(e, ChipCapability::PCIE, ChipCapability::HostMMIO)) return "";
279  const size_t index = static_cast<size_t>(e) - static_cast<size_t>(ChipCapability::PCIE);
280  return EnumNamesChipCapability()[index];
281 }
282 
283 enum class TensorMemoryLayout : uint16_t {
284  None = 0,
285  Interleaved = 1,
286  SingleBank = 2,
287  HeightSharded = 3,
288  WidthSharded = 4,
289  BlockSharded = 5,
290  MIN = None,
291  MAX = BlockSharded
292 };
293 
295  static const TensorMemoryLayout values[] = {
302  };
303  return values;
304 }
305 
306 inline const char * const *EnumNamesTensorMemoryLayout() {
307  static const char * const names[7] = {
308  "None",
309  "Interleaved",
310  "SingleBank",
311  "HeightSharded",
312  "WidthSharded",
313  "BlockSharded",
314  nullptr
315  };
316  return names;
317 }
318 
320  if (::flatbuffers::IsOutRange(e, TensorMemoryLayout::None, TensorMemoryLayout::BlockSharded)) return "";
321  const size_t index = static_cast<size_t>(e);
322  return EnumNamesTensorMemoryLayout()[index];
323 }
324 
325 enum class TensorLayout : uint16_t {
326  RowMajor = 0,
327  Tile = 1,
328  Invalid = 2,
329  MIN = RowMajor,
330  MAX = Invalid
331 };
332 
333 inline const TensorLayout (&EnumValuesTensorLayout())[3] {
334  static const TensorLayout values[] = {
338  };
339  return values;
340 }
341 
342 inline const char * const *EnumNamesTensorLayout() {
343  static const char * const names[4] = {
344  "RowMajor",
345  "Tile",
346  "Invalid",
347  nullptr
348  };
349  return names;
350 }
351 
352 inline const char *EnumNameTensorLayout(TensorLayout e) {
353  if (::flatbuffers::IsOutRange(e, TensorLayout::RowMajor, TensorLayout::Invalid)) return "";
354  const size_t index = static_cast<size_t>(e);
355  return EnumNamesTensorLayout()[index];
356 }
357 
358 enum class BufferType : uint16_t {
359  DRAM = 0,
360  L1 = 1,
361  SystemMemory = 2,
362  L1Small = 3,
363  Trace = 4,
364  MIN = DRAM,
365  MAX = Trace
366 };
367 
368 inline const BufferType (&EnumValuesBufferType())[5] {
369  static const BufferType values[] = {
375  };
376  return values;
377 }
378 
379 inline const char * const *EnumNamesBufferType() {
380  static const char * const names[6] = {
381  "DRAM",
382  "L1",
383  "SystemMemory",
384  "L1Small",
385  "Trace",
386  nullptr
387  };
388  return names;
389 }
390 
391 inline const char *EnumNameBufferType(BufferType e) {
392  if (::flatbuffers::IsOutRange(e, BufferType::DRAM, BufferType::Trace)) return "";
393  const size_t index = static_cast<size_t>(e);
394  return EnumNamesBufferType()[index];
395 }
396 
397 enum class MeshShardDirection : uint32_t {
398  FullToShardShape = 0,
399  ShardToFullShape = 1,
402 };
403 
405  static const MeshShardDirection values[] = {
408  };
409  return values;
410 }
411 
412 inline const char * const *EnumNamesMeshShardDirection() {
413  static const char * const names[3] = {
414  "FullToShardShape",
415  "ShardToFullShape",
416  nullptr
417  };
418  return names;
419 }
420 
422  if (::flatbuffers::IsOutRange(e, MeshShardDirection::FullToShardShape, MeshShardDirection::ShardToFullShape)) return "";
423  const size_t index = static_cast<size_t>(e);
424  return EnumNamesMeshShardDirection()[index];
425 }
426 
427 enum class MeshShardType : uint32_t {
428  Manual = 0,
429  Replicate = 1,
430  Maximal = 2,
431  Devices = 3,
432  MIN = Manual,
433  MAX = Devices
434 };
435 
437  static const MeshShardType values[] = {
442  };
443  return values;
444 }
445 
446 inline const char * const *EnumNamesMeshShardType() {
447  static const char * const names[5] = {
448  "Manual",
449  "Replicate",
450  "Maximal",
451  "Devices",
452  nullptr
453  };
454  return names;
455 }
456 
457 inline const char *EnumNameMeshShardType(MeshShardType e) {
458  if (::flatbuffers::IsOutRange(e, MeshShardType::Manual, MeshShardType::Devices)) return "";
459  const size_t index = static_cast<size_t>(e);
460  return EnumNamesMeshShardType()[index];
461 }
462 
463 enum class DistributedTensorConfig : uint8_t {
464  NONE = 0,
465  ReplicateTensor = 1,
466  ShardTensor = 2,
467  ShardTensor2D = 3,
468  AllGatherTensor = 4,
469  MIN = NONE,
471 };
472 
474  static const DistributedTensorConfig values[] = {
480  };
481  return values;
482 }
483 
484 inline const char * const *EnumNamesDistributedTensorConfig() {
485  static const char * const names[6] = {
486  "NONE",
487  "ReplicateTensor",
488  "ShardTensor",
489  "ShardTensor2D",
490  "AllGatherTensor",
491  nullptr
492  };
493  return names;
494 }
495 
497  if (::flatbuffers::IsOutRange(e, DistributedTensorConfig::NONE, DistributedTensorConfig::AllGatherTensor)) return "";
498  const size_t index = static_cast<size_t>(e);
499  return EnumNamesDistributedTensorConfig()[index];
500 }
501 
502 template<typename T> struct DistributedTensorConfigTraits {
504 };
505 
506 template<> struct DistributedTensorConfigTraits<tt::target::ReplicateTensor> {
508 };
509 
510 template<> struct DistributedTensorConfigTraits<tt::target::ShardTensor> {
512 };
513 
514 template<> struct DistributedTensorConfigTraits<tt::target::ShardTensor2D> {
516 };
517 
518 template<> struct DistributedTensorConfigTraits<tt::target::AllGatherTensor> {
520 };
521 
522 bool VerifyDistributedTensorConfig(::flatbuffers::Verifier &verifier, const void *obj, DistributedTensorConfig type);
523 bool VerifyDistributedTensorConfigVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset<void>> *values, const ::flatbuffers::Vector<DistributedTensorConfig> *types);
524 
525 enum class CPURole : uint8_t {
526  Host = 0,
527  Device = 1,
528  MIN = Host,
529  MAX = Device
530 };
531 
532 inline const CPURole (&EnumValuesCPURole())[2] {
533  static const CPURole values[] = {
536  };
537  return values;
538 }
539 
540 inline const char * const *EnumNamesCPURole() {
541  static const char * const names[3] = {
542  "Host",
543  "Device",
544  nullptr
545  };
546  return names;
547 }
548 
549 inline const char *EnumNameCPURole(CPURole e) {
550  if (::flatbuffers::IsOutRange(e, CPURole::Host, CPURole::Device)) return "";
551  const size_t index = static_cast<size_t>(e);
552  return EnumNamesCPURole()[index];
553 }
554 
555 enum class MathFidelity : uint8_t {
556  LoFi = 0,
557  HiFi2 = 2,
558  HiFi3 = 3,
559  HiFi4 = 4,
560  MIN = LoFi,
561  MAX = HiFi4
562 };
563 
564 inline const MathFidelity (&EnumValuesMathFidelity())[4] {
565  static const MathFidelity values[] = {
570  };
571  return values;
572 }
573 
574 inline const char * const *EnumNamesMathFidelity() {
575  static const char * const names[6] = {
576  "LoFi",
577  "",
578  "HiFi2",
579  "HiFi3",
580  "HiFi4",
581  nullptr
582  };
583  return names;
584 }
585 
586 inline const char *EnumNameMathFidelity(MathFidelity e) {
587  if (::flatbuffers::IsOutRange(e, MathFidelity::LoFi, MathFidelity::HiFi4)) return "";
588  const size_t index = static_cast<size_t>(e);
589  return EnumNamesMathFidelity()[index];
590 }
591 
593  private:
594  int32_t y_;
595  int32_t x_;
596 
597  public:
598  struct Traits;
599  Dim2d()
600  : y_(0),
601  x_(0) {
602  }
603  Dim2d(int32_t _y, int32_t _x)
604  : y_(::flatbuffers::EndianScalar(_y)),
605  x_(::flatbuffers::EndianScalar(_x)) {
606  }
607  int32_t y() const {
608  return ::flatbuffers::EndianScalar(y_);
609  }
610  int32_t x() const {
611  return ::flatbuffers::EndianScalar(x_);
612  }
613 };
615 
617  using type = Dim2d;
618 };
619 
621  private:
622  tt::target::Dim2d loc_;
623  tt::target::Dim2d size_;
624 
625  public:
626  struct Traits;
627  Dim2dRange()
628  : loc_(),
629  size_() {
630  }
631  Dim2dRange(const tt::target::Dim2d &_loc, const tt::target::Dim2d &_size)
632  : loc_(_loc),
633  size_(_size) {
634  }
635  const tt::target::Dim2d &loc() const {
636  return loc_;
637  }
638  const tt::target::Dim2d &size() const {
639  return size_;
640  }
641 };
642 FLATBUFFERS_STRUCT_END(Dim2dRange, 16);
643 
645  using type = Dim2dRange;
646 };
647 
649  private:
650  uint32_t rack_;
651  uint32_t shelf_;
652  uint32_t y_;
653  uint32_t x_;
654 
655  public:
656  struct Traits;
657  ChipCoord()
658  : rack_(0),
659  shelf_(0),
660  y_(0),
661  x_(0) {
662  }
663  ChipCoord(uint32_t _rack, uint32_t _shelf, uint32_t _y, uint32_t _x)
664  : rack_(::flatbuffers::EndianScalar(_rack)),
665  shelf_(::flatbuffers::EndianScalar(_shelf)),
666  y_(::flatbuffers::EndianScalar(_y)),
667  x_(::flatbuffers::EndianScalar(_x)) {
668  }
669  uint32_t rack() const {
670  return ::flatbuffers::EndianScalar(rack_);
671  }
672  uint32_t shelf() const {
673  return ::flatbuffers::EndianScalar(shelf_);
674  }
675  uint32_t y() const {
676  return ::flatbuffers::EndianScalar(y_);
677  }
678  uint32_t x() const {
679  return ::flatbuffers::EndianScalar(x_);
680  }
681 };
682 FLATBUFFERS_STRUCT_END(ChipCoord, 16);
683 
685  using type = ChipCoord;
686 };
687 
689  private:
690  uint32_t device_id0_;
691  tt::target::Dim2d ethernet_core_coord0_;
692  uint32_t device_id1_;
693  tt::target::Dim2d ethernet_core_coord1_;
694 
695  public:
696  struct Traits;
697  ChipChannel()
698  : device_id0_(0),
699  ethernet_core_coord0_(),
700  device_id1_(0),
701  ethernet_core_coord1_() {
702  }
703  ChipChannel(uint32_t _device_id0, const tt::target::Dim2d &_ethernet_core_coord0, uint32_t _device_id1, const tt::target::Dim2d &_ethernet_core_coord1)
704  : device_id0_(::flatbuffers::EndianScalar(_device_id0)),
705  ethernet_core_coord0_(_ethernet_core_coord0),
706  device_id1_(::flatbuffers::EndianScalar(_device_id1)),
707  ethernet_core_coord1_(_ethernet_core_coord1) {
708  }
709  uint32_t device_id0() const {
710  return ::flatbuffers::EndianScalar(device_id0_);
711  }
712  const tt::target::Dim2d &ethernet_core_coord0() const {
713  return ethernet_core_coord0_;
714  }
715  uint32_t device_id1() const {
716  return ::flatbuffers::EndianScalar(device_id1_);
717  }
718  const tt::target::Dim2d &ethernet_core_coord1() const {
719  return ethernet_core_coord1_;
720  }
721 };
722 FLATBUFFERS_STRUCT_END(ChipChannel, 24);
723 
725  using type = ChipChannel;
726 };
727 
728 struct ShardSpec FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
730  struct Traits;
731  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
732  VT_SHARD_SHAPE = 4
733  };
734  const ::flatbuffers::Vector<int64_t> *shard_shape() const {
735  return GetPointer<const ::flatbuffers::Vector<int64_t> *>(VT_SHARD_SHAPE);
736  }
737  bool Verify(::flatbuffers::Verifier &verifier) const {
738  return VerifyTableStart(verifier) &&
739  VerifyOffset(verifier, VT_SHARD_SHAPE) &&
740  verifier.VerifyVector(shard_shape()) &&
741  verifier.EndTable();
742  }
743 };
744 
746  typedef ShardSpec Table;
747  ::flatbuffers::FlatBufferBuilder &fbb_;
748  ::flatbuffers::uoffset_t start_;
749  void add_shard_shape(::flatbuffers::Offset<::flatbuffers::Vector<int64_t>> shard_shape) {
750  fbb_.AddOffset(ShardSpec::VT_SHARD_SHAPE, shard_shape);
751  }
752  explicit ShardSpecBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
753  : fbb_(_fbb) {
754  start_ = fbb_.StartTable();
755  }
756  ::flatbuffers::Offset<ShardSpec> Finish() {
757  const auto end = fbb_.EndTable(start_);
758  auto o = ::flatbuffers::Offset<ShardSpec>(end);
759  return o;
760  }
761 };
762 
763 inline ::flatbuffers::Offset<ShardSpec> CreateShardSpec(
764  ::flatbuffers::FlatBufferBuilder &_fbb,
765  ::flatbuffers::Offset<::flatbuffers::Vector<int64_t>> shard_shape = 0) {
766  ShardSpecBuilder builder_(_fbb);
767  builder_.add_shard_shape(shard_shape);
768  return builder_.Finish();
769 }
770 
772  using type = ShardSpec;
773  static auto constexpr Create = CreateShardSpec;
774 };
775 
776 inline ::flatbuffers::Offset<ShardSpec> CreateShardSpecDirect(
777  ::flatbuffers::FlatBufferBuilder &_fbb,
778  const std::vector<int64_t> *shard_shape = nullptr) {
779  auto shard_shape__ = shard_shape ? _fbb.CreateVector<int64_t>(*shard_shape) : 0;
781  _fbb,
782  shard_shape__);
783 }
784 
785 struct MemoryConfigDesc FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
787  struct Traits;
788  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
791  VT_SHARD_SPEC = 8
792  };
794  return static_cast<tt::target::TensorMemoryLayout>(GetField<uint16_t>(VT_TENSOR_MEMORY_LAYOUT, 0));
795  }
797  return static_cast<tt::target::BufferType>(GetField<uint16_t>(VT_BUFFER_TYPE, 0));
798  }
799  const tt::target::ShardSpec *shard_spec() const {
800  return GetPointer<const tt::target::ShardSpec *>(VT_SHARD_SPEC);
801  }
802  bool Verify(::flatbuffers::Verifier &verifier) const {
803  return VerifyTableStart(verifier) &&
804  VerifyField<uint16_t>(verifier, VT_TENSOR_MEMORY_LAYOUT, 2) &&
805  VerifyField<uint16_t>(verifier, VT_BUFFER_TYPE, 2) &&
806  VerifyOffset(verifier, VT_SHARD_SPEC) &&
807  verifier.VerifyTable(shard_spec()) &&
808  verifier.EndTable();
809  }
810 };
811 
813  typedef MemoryConfigDesc Table;
814  ::flatbuffers::FlatBufferBuilder &fbb_;
815  ::flatbuffers::uoffset_t start_;
817  fbb_.AddElement<uint16_t>(MemoryConfigDesc::VT_TENSOR_MEMORY_LAYOUT, static_cast<uint16_t>(tensor_memory_layout), 0);
818  }
820  fbb_.AddElement<uint16_t>(MemoryConfigDesc::VT_BUFFER_TYPE, static_cast<uint16_t>(buffer_type), 0);
821  }
822  void add_shard_spec(::flatbuffers::Offset<tt::target::ShardSpec> shard_spec) {
823  fbb_.AddOffset(MemoryConfigDesc::VT_SHARD_SPEC, shard_spec);
824  }
825  explicit MemoryConfigDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
826  : fbb_(_fbb) {
827  start_ = fbb_.StartTable();
828  }
829  ::flatbuffers::Offset<MemoryConfigDesc> Finish() {
830  const auto end = fbb_.EndTable(start_);
831  auto o = ::flatbuffers::Offset<MemoryConfigDesc>(end);
832  return o;
833  }
834 };
835 
836 inline ::flatbuffers::Offset<MemoryConfigDesc> CreateMemoryConfigDesc(
837  ::flatbuffers::FlatBufferBuilder &_fbb,
840  ::flatbuffers::Offset<tt::target::ShardSpec> shard_spec = 0) {
841  MemoryConfigDescBuilder builder_(_fbb);
842  builder_.add_shard_spec(shard_spec);
843  builder_.add_buffer_type(buffer_type);
844  builder_.add_tensor_memory_layout(tensor_memory_layout);
845  return builder_.Finish();
846 }
847 
849  using type = MemoryConfigDesc;
850  static auto constexpr Create = CreateMemoryConfigDesc;
851 };
852 
853 struct ReplicateTensor FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
855  struct Traits;
856  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
857  VT_REPLICATION_FACTOR = 4
858  };
859  uint32_t replication_factor() const {
860  return GetField<uint32_t>(VT_REPLICATION_FACTOR, 0);
861  }
862  bool Verify(::flatbuffers::Verifier &verifier) const {
863  return VerifyTableStart(verifier) &&
864  VerifyField<uint32_t>(verifier, VT_REPLICATION_FACTOR, 4) &&
865  verifier.EndTable();
866  }
867 };
868 
871  ::flatbuffers::FlatBufferBuilder &fbb_;
872  ::flatbuffers::uoffset_t start_;
873  void add_replication_factor(uint32_t replication_factor) {
874  fbb_.AddElement<uint32_t>(ReplicateTensor::VT_REPLICATION_FACTOR, replication_factor, 0);
875  }
876  explicit ReplicateTensorBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
877  : fbb_(_fbb) {
878  start_ = fbb_.StartTable();
879  }
880  ::flatbuffers::Offset<ReplicateTensor> Finish() {
881  const auto end = fbb_.EndTable(start_);
882  auto o = ::flatbuffers::Offset<ReplicateTensor>(end);
883  return o;
884  }
885 };
886 
887 inline ::flatbuffers::Offset<ReplicateTensor> CreateReplicateTensor(
888  ::flatbuffers::FlatBufferBuilder &_fbb,
889  uint32_t replication_factor = 0) {
890  ReplicateTensorBuilder builder_(_fbb);
891  builder_.add_replication_factor(replication_factor);
892  return builder_.Finish();
893 }
894 
897  static auto constexpr Create = CreateReplicateTensor;
898 };
899 
900 struct ShardTensor FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
902  struct Traits;
903  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
904  VT_SHARD_DIM = 4
905  };
906  uint32_t shard_dim() const {
907  return GetField<uint32_t>(VT_SHARD_DIM, 0);
908  }
909  bool Verify(::flatbuffers::Verifier &verifier) const {
910  return VerifyTableStart(verifier) &&
911  VerifyField<uint32_t>(verifier, VT_SHARD_DIM, 4) &&
912  verifier.EndTable();
913  }
914 };
915 
918  ::flatbuffers::FlatBufferBuilder &fbb_;
919  ::flatbuffers::uoffset_t start_;
920  void add_shard_dim(uint32_t shard_dim) {
921  fbb_.AddElement<uint32_t>(ShardTensor::VT_SHARD_DIM, shard_dim, 0);
922  }
923  explicit ShardTensorBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
924  : fbb_(_fbb) {
925  start_ = fbb_.StartTable();
926  }
927  ::flatbuffers::Offset<ShardTensor> Finish() {
928  const auto end = fbb_.EndTable(start_);
929  auto o = ::flatbuffers::Offset<ShardTensor>(end);
930  return o;
931  }
932 };
933 
934 inline ::flatbuffers::Offset<ShardTensor> CreateShardTensor(
935  ::flatbuffers::FlatBufferBuilder &_fbb,
936  uint32_t shard_dim = 0) {
937  ShardTensorBuilder builder_(_fbb);
938  builder_.add_shard_dim(shard_dim);
939  return builder_.Finish();
940 }
941 
943  using type = ShardTensor;
944  static auto constexpr Create = CreateShardTensor;
945 };
946 
947 struct ShardTensor2D FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
949  struct Traits;
950  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
951  VT_SHARD_MESH = 4
952  };
953  const tt::target::Dim2d *shard_mesh() const {
954  return GetStruct<const tt::target::Dim2d *>(VT_SHARD_MESH);
955  }
956  bool Verify(::flatbuffers::Verifier &verifier) const {
957  return VerifyTableStart(verifier) &&
958  VerifyField<tt::target::Dim2d>(verifier, VT_SHARD_MESH, 4) &&
959  verifier.EndTable();
960  }
961 };
962 
965  ::flatbuffers::FlatBufferBuilder &fbb_;
966  ::flatbuffers::uoffset_t start_;
967  void add_shard_mesh(const tt::target::Dim2d *shard_mesh) {
968  fbb_.AddStruct(ShardTensor2D::VT_SHARD_MESH, shard_mesh);
969  }
970  explicit ShardTensor2DBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
971  : fbb_(_fbb) {
972  start_ = fbb_.StartTable();
973  }
974  ::flatbuffers::Offset<ShardTensor2D> Finish() {
975  const auto end = fbb_.EndTable(start_);
976  auto o = ::flatbuffers::Offset<ShardTensor2D>(end);
977  return o;
978  }
979 };
980 
981 inline ::flatbuffers::Offset<ShardTensor2D> CreateShardTensor2D(
982  ::flatbuffers::FlatBufferBuilder &_fbb,
983  const tt::target::Dim2d *shard_mesh = nullptr) {
984  ShardTensor2DBuilder builder_(_fbb);
985  builder_.add_shard_mesh(shard_mesh);
986  return builder_.Finish();
987 }
988 
991  static auto constexpr Create = CreateShardTensor2D;
992 };
993 
994 struct AllGatherTensor FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
996  struct Traits;
997  bool Verify(::flatbuffers::Verifier &verifier) const {
998  return VerifyTableStart(verifier) &&
999  verifier.EndTable();
1000  }
1001 };
1002 
1005  ::flatbuffers::FlatBufferBuilder &fbb_;
1006  ::flatbuffers::uoffset_t start_;
1007  explicit AllGatherTensorBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
1008  : fbb_(_fbb) {
1009  start_ = fbb_.StartTable();
1010  }
1011  ::flatbuffers::Offset<AllGatherTensor> Finish() {
1012  const auto end = fbb_.EndTable(start_);
1013  auto o = ::flatbuffers::Offset<AllGatherTensor>(end);
1014  return o;
1015  }
1016 };
1017 
1018 inline ::flatbuffers::Offset<AllGatherTensor> CreateAllGatherTensor(
1019  ::flatbuffers::FlatBufferBuilder &_fbb) {
1020  AllGatherTensorBuilder builder_(_fbb);
1021  return builder_.Finish();
1022 }
1023 
1026  static auto constexpr Create = CreateAllGatherTensor;
1027 };
1028 
1029 struct DistributionStrategy FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
1031  struct Traits;
1032  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1034  VT_STRATEGY = 6
1035  };
1037  return static_cast<tt::target::DistributedTensorConfig>(GetField<uint8_t>(VT_STRATEGY_TYPE, 0));
1038  }
1039  const void *strategy() const {
1040  return GetPointer<const void *>(VT_STRATEGY);
1041  }
1042  template<typename T> const T *strategy_as() const;
1043  const tt::target::ReplicateTensor *strategy_as_ReplicateTensor() const {
1044  return strategy_type() == tt::target::DistributedTensorConfig::ReplicateTensor ? static_cast<const tt::target::ReplicateTensor *>(strategy()) : nullptr;
1045  }
1046  const tt::target::ShardTensor *strategy_as_ShardTensor() const {
1047  return strategy_type() == tt::target::DistributedTensorConfig::ShardTensor ? static_cast<const tt::target::ShardTensor *>(strategy()) : nullptr;
1048  }
1049  const tt::target::ShardTensor2D *strategy_as_ShardTensor2D() const {
1050  return strategy_type() == tt::target::DistributedTensorConfig::ShardTensor2D ? static_cast<const tt::target::ShardTensor2D *>(strategy()) : nullptr;
1051  }
1052  const tt::target::AllGatherTensor *strategy_as_AllGatherTensor() const {
1053  return strategy_type() == tt::target::DistributedTensorConfig::AllGatherTensor ? static_cast<const tt::target::AllGatherTensor *>(strategy()) : nullptr;
1054  }
1055  bool Verify(::flatbuffers::Verifier &verifier) const {
1056  return VerifyTableStart(verifier) &&
1057  VerifyField<uint8_t>(verifier, VT_STRATEGY_TYPE, 1) &&
1058  VerifyOffset(verifier, VT_STRATEGY) &&
1059  VerifyDistributedTensorConfig(verifier, strategy(), strategy_type()) &&
1060  verifier.EndTable();
1061  }
1062 };
1063 
1064 template<> inline const tt::target::ReplicateTensor *DistributionStrategy::strategy_as<tt::target::ReplicateTensor>() const {
1065  return strategy_as_ReplicateTensor();
1066 }
1067 
1068 template<> inline const tt::target::ShardTensor *DistributionStrategy::strategy_as<tt::target::ShardTensor>() const {
1069  return strategy_as_ShardTensor();
1070 }
1071 
1072 template<> inline const tt::target::ShardTensor2D *DistributionStrategy::strategy_as<tt::target::ShardTensor2D>() const {
1073  return strategy_as_ShardTensor2D();
1074 }
1075 
1076 template<> inline const tt::target::AllGatherTensor *DistributionStrategy::strategy_as<tt::target::AllGatherTensor>() const {
1077  return strategy_as_AllGatherTensor();
1078 }
1079 
1081  typedef DistributionStrategy Table;
1082  ::flatbuffers::FlatBufferBuilder &fbb_;
1083  ::flatbuffers::uoffset_t start_;
1085  fbb_.AddElement<uint8_t>(DistributionStrategy::VT_STRATEGY_TYPE, static_cast<uint8_t>(strategy_type), 0);
1086  }
1087  void add_strategy(::flatbuffers::Offset<void> strategy) {
1088  fbb_.AddOffset(DistributionStrategy::VT_STRATEGY, strategy);
1089  }
1090  explicit DistributionStrategyBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
1091  : fbb_(_fbb) {
1092  start_ = fbb_.StartTable();
1093  }
1094  ::flatbuffers::Offset<DistributionStrategy> Finish() {
1095  const auto end = fbb_.EndTable(start_);
1096  auto o = ::flatbuffers::Offset<DistributionStrategy>(end);
1097  return o;
1098  }
1099 };
1100 
1101 inline ::flatbuffers::Offset<DistributionStrategy> CreateDistributionStrategy(
1102  ::flatbuffers::FlatBufferBuilder &_fbb,
1104  ::flatbuffers::Offset<void> strategy = 0) {
1105  DistributionStrategyBuilder builder_(_fbb);
1106  builder_.add_strategy(strategy);
1107  builder_.add_strategy_type(strategy_type);
1108  return builder_.Finish();
1109 }
1110 
1112  using type = DistributionStrategy;
1113  static auto constexpr Create = CreateDistributionStrategy;
1114 };
1115 
1116 struct MemoryDesc FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
1118  struct Traits;
1119  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1125  VT_SIZE = 14
1126  };
1127  const ::flatbuffers::Vector<int32_t> *shape() const {
1128  return GetPointer<const ::flatbuffers::Vector<int32_t> *>(VT_SHAPE);
1129  }
1130  const tt::target::Dim2d *tile_shape() const {
1131  return GetStruct<const tt::target::Dim2d *>(VT_TILE_SHAPE);
1132  }
1134  return static_cast<tt::target::DataType>(GetField<uint16_t>(VT_DATA_TYPE, 0));
1135  }
1137  return static_cast<tt::target::MemorySpace>(GetField<uint16_t>(VT_MEMORY_SPACE, 0));
1138  }
1140  return static_cast<tt::target::TensorMemoryLayout>(GetField<uint16_t>(VT_MEMORY_LAYOUT, 0));
1141  }
1142  uint64_t size() const {
1143  return GetField<uint64_t>(VT_SIZE, 0);
1144  }
1145  bool Verify(::flatbuffers::Verifier &verifier) const {
1146  return VerifyTableStart(verifier) &&
1147  VerifyOffset(verifier, VT_SHAPE) &&
1148  verifier.VerifyVector(shape()) &&
1149  VerifyField<tt::target::Dim2d>(verifier, VT_TILE_SHAPE, 4) &&
1150  VerifyField<uint16_t>(verifier, VT_DATA_TYPE, 2) &&
1151  VerifyField<uint16_t>(verifier, VT_MEMORY_SPACE, 2) &&
1152  VerifyField<uint16_t>(verifier, VT_MEMORY_LAYOUT, 2) &&
1153  VerifyField<uint64_t>(verifier, VT_SIZE, 8) &&
1154  verifier.EndTable();
1155  }
1156 };
1157 
1159  typedef MemoryDesc Table;
1160  ::flatbuffers::FlatBufferBuilder &fbb_;
1161  ::flatbuffers::uoffset_t start_;
1162  void add_shape(::flatbuffers::Offset<::flatbuffers::Vector<int32_t>> shape) {
1163  fbb_.AddOffset(MemoryDesc::VT_SHAPE, shape);
1164  }
1165  void add_tile_shape(const tt::target::Dim2d *tile_shape) {
1166  fbb_.AddStruct(MemoryDesc::VT_TILE_SHAPE, tile_shape);
1167  }
1169  fbb_.AddElement<uint16_t>(MemoryDesc::VT_DATA_TYPE, static_cast<uint16_t>(data_type), 0);
1170  }
1172  fbb_.AddElement<uint16_t>(MemoryDesc::VT_MEMORY_SPACE, static_cast<uint16_t>(memory_space), 0);
1173  }
1175  fbb_.AddElement<uint16_t>(MemoryDesc::VT_MEMORY_LAYOUT, static_cast<uint16_t>(memory_layout), 0);
1176  }
1177  void add_size(uint64_t size) {
1178  fbb_.AddElement<uint64_t>(MemoryDesc::VT_SIZE, size, 0);
1179  }
1180  explicit MemoryDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
1181  : fbb_(_fbb) {
1182  start_ = fbb_.StartTable();
1183  }
1184  ::flatbuffers::Offset<MemoryDesc> Finish() {
1185  const auto end = fbb_.EndTable(start_);
1186  auto o = ::flatbuffers::Offset<MemoryDesc>(end);
1187  return o;
1188  }
1189 };
1190 
1191 inline ::flatbuffers::Offset<MemoryDesc> CreateMemoryDesc(
1192  ::flatbuffers::FlatBufferBuilder &_fbb,
1193  ::flatbuffers::Offset<::flatbuffers::Vector<int32_t>> shape = 0,
1194  const tt::target::Dim2d *tile_shape = nullptr,
1198  uint64_t size = 0) {
1199  MemoryDescBuilder builder_(_fbb);
1200  builder_.add_size(size);
1201  builder_.add_tile_shape(tile_shape);
1202  builder_.add_shape(shape);
1203  builder_.add_memory_layout(memory_layout);
1204  builder_.add_memory_space(memory_space);
1205  builder_.add_data_type(data_type);
1206  return builder_.Finish();
1207 }
1208 
1210  using type = MemoryDesc;
1211  static auto constexpr Create = CreateMemoryDesc;
1212 };
1213 
1214 inline ::flatbuffers::Offset<MemoryDesc> CreateMemoryDescDirect(
1215  ::flatbuffers::FlatBufferBuilder &_fbb,
1216  const std::vector<int32_t> *shape = nullptr,
1217  const tt::target::Dim2d *tile_shape = nullptr,
1221  uint64_t size = 0) {
1222  auto shape__ = shape ? _fbb.CreateVector<int32_t>(*shape) : 0;
1224  _fbb,
1225  shape__,
1226  tile_shape,
1227  data_type,
1228  memory_space,
1229  memory_layout,
1230  size);
1231 }
1232 
1233 struct LayoutDesc FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
1235  struct Traits;
1236  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1241  VT_STRATEGY = 12
1242  };
1243  const ::flatbuffers::Vector<int32_t> *stride() const {
1244  return GetPointer<const ::flatbuffers::Vector<int32_t> *>(VT_STRIDE);
1245  }
1247  return static_cast<tt::target::OOBVal>(GetField<uint16_t>(VT_OOB_VAL, 0));
1248  }
1249  const ::flatbuffers::Vector<const tt::target::Dim2dRange *> *core_range_set() const {
1250  return GetPointer<const ::flatbuffers::Vector<const tt::target::Dim2dRange *> *>(VT_CORE_RANGE_SET);
1251  }
1252  const tt::target::MemoryDesc *memory_desc() const {
1253  return GetPointer<const tt::target::MemoryDesc *>(VT_MEMORY_DESC);
1254  }
1255  const tt::target::DistributionStrategy *strategy() const {
1256  return GetPointer<const tt::target::DistributionStrategy *>(VT_STRATEGY);
1257  }
1258  bool Verify(::flatbuffers::Verifier &verifier) const {
1259  return VerifyTableStart(verifier) &&
1260  VerifyOffset(verifier, VT_STRIDE) &&
1261  verifier.VerifyVector(stride()) &&
1262  VerifyField<uint16_t>(verifier, VT_OOB_VAL, 2) &&
1263  VerifyOffset(verifier, VT_CORE_RANGE_SET) &&
1264  verifier.VerifyVector(core_range_set()) &&
1265  VerifyOffset(verifier, VT_MEMORY_DESC) &&
1266  verifier.VerifyTable(memory_desc()) &&
1267  VerifyOffset(verifier, VT_STRATEGY) &&
1268  verifier.VerifyTable(strategy()) &&
1269  verifier.EndTable();
1270  }
1271 };
1272 
1274  typedef LayoutDesc Table;
1275  ::flatbuffers::FlatBufferBuilder &fbb_;
1276  ::flatbuffers::uoffset_t start_;
1277  void add_stride(::flatbuffers::Offset<::flatbuffers::Vector<int32_t>> stride) {
1278  fbb_.AddOffset(LayoutDesc::VT_STRIDE, stride);
1279  }
1281  fbb_.AddElement<uint16_t>(LayoutDesc::VT_OOB_VAL, static_cast<uint16_t>(oob_val), 0);
1282  }
1283  void add_core_range_set(::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2dRange *>> core_range_set) {
1284  fbb_.AddOffset(LayoutDesc::VT_CORE_RANGE_SET, core_range_set);
1285  }
1286  void add_memory_desc(::flatbuffers::Offset<tt::target::MemoryDesc> memory_desc) {
1287  fbb_.AddOffset(LayoutDesc::VT_MEMORY_DESC, memory_desc);
1288  }
1289  void add_strategy(::flatbuffers::Offset<tt::target::DistributionStrategy> strategy) {
1290  fbb_.AddOffset(LayoutDesc::VT_STRATEGY, strategy);
1291  }
1292  explicit LayoutDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
1293  : fbb_(_fbb) {
1294  start_ = fbb_.StartTable();
1295  }
1296  ::flatbuffers::Offset<LayoutDesc> Finish() {
1297  const auto end = fbb_.EndTable(start_);
1298  auto o = ::flatbuffers::Offset<LayoutDesc>(end);
1299  return o;
1300  }
1301 };
1302 
1303 inline ::flatbuffers::Offset<LayoutDesc> CreateLayoutDesc(
1304  ::flatbuffers::FlatBufferBuilder &_fbb,
1305  ::flatbuffers::Offset<::flatbuffers::Vector<int32_t>> stride = 0,
1307  ::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2dRange *>> core_range_set = 0,
1308  ::flatbuffers::Offset<tt::target::MemoryDesc> memory_desc = 0,
1309  ::flatbuffers::Offset<tt::target::DistributionStrategy> strategy = 0) {
1310  LayoutDescBuilder builder_(_fbb);
1311  builder_.add_strategy(strategy);
1312  builder_.add_memory_desc(memory_desc);
1313  builder_.add_core_range_set(core_range_set);
1314  builder_.add_stride(stride);
1315  builder_.add_oob_val(oob_val);
1316  return builder_.Finish();
1317 }
1318 
1320  using type = LayoutDesc;
1321  static auto constexpr Create = CreateLayoutDesc;
1322 };
1323 
1324 inline ::flatbuffers::Offset<LayoutDesc> CreateLayoutDescDirect(
1325  ::flatbuffers::FlatBufferBuilder &_fbb,
1326  const std::vector<int32_t> *stride = nullptr,
1328  const std::vector<tt::target::Dim2dRange> *core_range_set = nullptr,
1329  ::flatbuffers::Offset<tt::target::MemoryDesc> memory_desc = 0,
1330  ::flatbuffers::Offset<tt::target::DistributionStrategy> strategy = 0) {
1331  auto stride__ = stride ? _fbb.CreateVector<int32_t>(*stride) : 0;
1332  auto core_range_set__ = core_range_set ? _fbb.CreateVectorOfStructs<tt::target::Dim2dRange>(*core_range_set) : 0;
1334  _fbb,
1335  stride__,
1336  oob_val,
1337  core_range_set__,
1338  memory_desc,
1339  strategy);
1340 }
1341 
1342 struct TensorDesc FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
1344  struct Traits;
1345  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1346  VT_SHAPE = 4,
1347  VT_LAYOUT = 6
1348  };
1349  const ::flatbuffers::Vector<int32_t> *shape() const {
1350  return GetPointer<const ::flatbuffers::Vector<int32_t> *>(VT_SHAPE);
1351  }
1352  const tt::target::LayoutDesc *layout() const {
1353  return GetPointer<const tt::target::LayoutDesc *>(VT_LAYOUT);
1354  }
1355  bool Verify(::flatbuffers::Verifier &verifier) const {
1356  return VerifyTableStart(verifier) &&
1357  VerifyOffset(verifier, VT_SHAPE) &&
1358  verifier.VerifyVector(shape()) &&
1359  VerifyOffset(verifier, VT_LAYOUT) &&
1360  verifier.VerifyTable(layout()) &&
1361  verifier.EndTable();
1362  }
1363 };
1364 
1366  typedef TensorDesc Table;
1367  ::flatbuffers::FlatBufferBuilder &fbb_;
1368  ::flatbuffers::uoffset_t start_;
1369  void add_shape(::flatbuffers::Offset<::flatbuffers::Vector<int32_t>> shape) {
1370  fbb_.AddOffset(TensorDesc::VT_SHAPE, shape);
1371  }
1372  void add_layout(::flatbuffers::Offset<tt::target::LayoutDesc> layout) {
1373  fbb_.AddOffset(TensorDesc::VT_LAYOUT, layout);
1374  }
1375  explicit TensorDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
1376  : fbb_(_fbb) {
1377  start_ = fbb_.StartTable();
1378  }
1379  ::flatbuffers::Offset<TensorDesc> Finish() {
1380  const auto end = fbb_.EndTable(start_);
1381  auto o = ::flatbuffers::Offset<TensorDesc>(end);
1382  return o;
1383  }
1384 };
1385 
1386 inline ::flatbuffers::Offset<TensorDesc> CreateTensorDesc(
1387  ::flatbuffers::FlatBufferBuilder &_fbb,
1388  ::flatbuffers::Offset<::flatbuffers::Vector<int32_t>> shape = 0,
1389  ::flatbuffers::Offset<tt::target::LayoutDesc> layout = 0) {
1390  TensorDescBuilder builder_(_fbb);
1391  builder_.add_layout(layout);
1392  builder_.add_shape(shape);
1393  return builder_.Finish();
1394 }
1395 
1397  using type = TensorDesc;
1398  static auto constexpr Create = CreateTensorDesc;
1399 };
1400 
1401 inline ::flatbuffers::Offset<TensorDesc> CreateTensorDescDirect(
1402  ::flatbuffers::FlatBufferBuilder &_fbb,
1403  const std::vector<int32_t> *shape = nullptr,
1404  ::flatbuffers::Offset<tt::target::LayoutDesc> layout = 0) {
1405  auto shape__ = shape ? _fbb.CreateVector<int32_t>(*shape) : 0;
1407  _fbb,
1408  shape__,
1409  layout);
1410 }
1411 
1412 struct CBDesc FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
1414  struct Traits;
1415  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1416  VT_PORT = 4,
1417  VT_MEMORY_DESC = 6,
1419  VT_NUM_BUFFERS = 10
1420  };
1421  uint32_t port() const {
1422  return GetField<uint32_t>(VT_PORT, 0);
1423  }
1424  const tt::target::MemoryDesc *memory_desc() const {
1425  return GetPointer<const tt::target::MemoryDesc *>(VT_MEMORY_DESC);
1426  }
1427  uint64_t page_size() const {
1428  return GetField<uint64_t>(VT_PAGE_SIZE, 0);
1429  }
1430  uint64_t num_buffers() const {
1431  return GetField<uint64_t>(VT_NUM_BUFFERS, 0);
1432  }
1433  bool Verify(::flatbuffers::Verifier &verifier) const {
1434  return VerifyTableStart(verifier) &&
1435  VerifyField<uint32_t>(verifier, VT_PORT, 4) &&
1436  VerifyOffset(verifier, VT_MEMORY_DESC) &&
1437  verifier.VerifyTable(memory_desc()) &&
1438  VerifyField<uint64_t>(verifier, VT_PAGE_SIZE, 8) &&
1439  VerifyField<uint64_t>(verifier, VT_NUM_BUFFERS, 8) &&
1440  verifier.EndTable();
1441  }
1442 };
1443 
1445  typedef CBDesc Table;
1446  ::flatbuffers::FlatBufferBuilder &fbb_;
1447  ::flatbuffers::uoffset_t start_;
1448  void add_port(uint32_t port) {
1449  fbb_.AddElement<uint32_t>(CBDesc::VT_PORT, port, 0);
1450  }
1451  void add_memory_desc(::flatbuffers::Offset<tt::target::MemoryDesc> memory_desc) {
1452  fbb_.AddOffset(CBDesc::VT_MEMORY_DESC, memory_desc);
1453  }
1454  void add_page_size(uint64_t page_size) {
1455  fbb_.AddElement<uint64_t>(CBDesc::VT_PAGE_SIZE, page_size, 0);
1456  }
1457  void add_num_buffers(uint64_t num_buffers) {
1458  fbb_.AddElement<uint64_t>(CBDesc::VT_NUM_BUFFERS, num_buffers, 0);
1459  }
1460  explicit CBDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
1461  : fbb_(_fbb) {
1462  start_ = fbb_.StartTable();
1463  }
1464  ::flatbuffers::Offset<CBDesc> Finish() {
1465  const auto end = fbb_.EndTable(start_);
1466  auto o = ::flatbuffers::Offset<CBDesc>(end);
1467  return o;
1468  }
1469 };
1470 
1471 inline ::flatbuffers::Offset<CBDesc> CreateCBDesc(
1472  ::flatbuffers::FlatBufferBuilder &_fbb,
1473  uint32_t port = 0,
1474  ::flatbuffers::Offset<tt::target::MemoryDesc> memory_desc = 0,
1475  uint64_t page_size = 0,
1476  uint64_t num_buffers = 0) {
1477  CBDescBuilder builder_(_fbb);
1478  builder_.add_num_buffers(num_buffers);
1479  builder_.add_page_size(page_size);
1480  builder_.add_memory_desc(memory_desc);
1481  builder_.add_port(port);
1482  return builder_.Finish();
1483 }
1484 
1486  using type = CBDesc;
1487  static auto constexpr Create = CreateCBDesc;
1488 };
1489 
1490 struct TensorRef FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
1492  struct Traits;
1493  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1496  VT_SIZE = 8,
1497  VT_DESC = 10
1498  };
1499  uint32_t global_id() const {
1500  return GetField<uint32_t>(VT_GLOBAL_ID, 0);
1501  }
1502  uint64_t address() const {
1503  return GetField<uint64_t>(VT_ADDRESS, 0);
1504  }
1505  uint64_t size() const {
1506  return GetField<uint64_t>(VT_SIZE, 0);
1507  }
1508  const tt::target::TensorDesc *desc() const {
1509  return GetPointer<const tt::target::TensorDesc *>(VT_DESC);
1510  }
1511  bool Verify(::flatbuffers::Verifier &verifier) const {
1512  return VerifyTableStart(verifier) &&
1513  VerifyField<uint32_t>(verifier, VT_GLOBAL_ID, 4) &&
1514  VerifyField<uint64_t>(verifier, VT_ADDRESS, 8) &&
1515  VerifyField<uint64_t>(verifier, VT_SIZE, 8) &&
1516  VerifyOffset(verifier, VT_DESC) &&
1517  verifier.VerifyTable(desc()) &&
1518  verifier.EndTable();
1519  }
1520 };
1521 
1523  typedef TensorRef Table;
1524  ::flatbuffers::FlatBufferBuilder &fbb_;
1525  ::flatbuffers::uoffset_t start_;
1526  void add_global_id(uint32_t global_id) {
1527  fbb_.AddElement<uint32_t>(TensorRef::VT_GLOBAL_ID, global_id, 0);
1528  }
1529  void add_address(uint64_t address) {
1530  fbb_.AddElement<uint64_t>(TensorRef::VT_ADDRESS, address, 0);
1531  }
1532  void add_size(uint64_t size) {
1533  fbb_.AddElement<uint64_t>(TensorRef::VT_SIZE, size, 0);
1534  }
1535  void add_desc(::flatbuffers::Offset<tt::target::TensorDesc> desc) {
1536  fbb_.AddOffset(TensorRef::VT_DESC, desc);
1537  }
1538  explicit TensorRefBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
1539  : fbb_(_fbb) {
1540  start_ = fbb_.StartTable();
1541  }
1542  ::flatbuffers::Offset<TensorRef> Finish() {
1543  const auto end = fbb_.EndTable(start_);
1544  auto o = ::flatbuffers::Offset<TensorRef>(end);
1545  return o;
1546  }
1547 };
1548 
1549 inline ::flatbuffers::Offset<TensorRef> CreateTensorRef(
1550  ::flatbuffers::FlatBufferBuilder &_fbb,
1551  uint32_t global_id = 0,
1552  uint64_t address = 0,
1553  uint64_t size = 0,
1554  ::flatbuffers::Offset<tt::target::TensorDesc> desc = 0) {
1555  TensorRefBuilder builder_(_fbb);
1556  builder_.add_size(size);
1557  builder_.add_address(address);
1558  builder_.add_desc(desc);
1559  builder_.add_global_id(global_id);
1560  return builder_.Finish();
1561 }
1562 
1564  using type = TensorRef;
1565  static auto constexpr Create = CreateTensorRef;
1566 };
1567 
1568 struct CBRef FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
1570  struct Traits;
1571  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1572  VT_GLOBAL_ID = 4,
1574  VT_ADDRESS = 8,
1575  VT_DESC = 10
1576  };
1577  uint32_t global_id() const {
1578  return GetField<uint32_t>(VT_GLOBAL_ID, 0);
1579  }
1580  const tt::target::TensorRef *tensor_ref() const {
1581  return GetPointer<const tt::target::TensorRef *>(VT_TENSOR_REF);
1582  }
1583  uint64_t address() const {
1584  return GetField<uint64_t>(VT_ADDRESS, 0);
1585  }
1586  const tt::target::CBDesc *desc() const {
1587  return GetPointer<const tt::target::CBDesc *>(VT_DESC);
1588  }
1589  bool Verify(::flatbuffers::Verifier &verifier) const {
1590  return VerifyTableStart(verifier) &&
1591  VerifyField<uint32_t>(verifier, VT_GLOBAL_ID, 4) &&
1592  VerifyOffset(verifier, VT_TENSOR_REF) &&
1593  verifier.VerifyTable(tensor_ref()) &&
1594  VerifyField<uint64_t>(verifier, VT_ADDRESS, 8) &&
1595  VerifyOffset(verifier, VT_DESC) &&
1596  verifier.VerifyTable(desc()) &&
1597  verifier.EndTable();
1598  }
1599 };
1600 
1602  typedef CBRef Table;
1603  ::flatbuffers::FlatBufferBuilder &fbb_;
1604  ::flatbuffers::uoffset_t start_;
1605  void add_global_id(uint32_t global_id) {
1606  fbb_.AddElement<uint32_t>(CBRef::VT_GLOBAL_ID, global_id, 0);
1607  }
1608  void add_tensor_ref(::flatbuffers::Offset<tt::target::TensorRef> tensor_ref) {
1609  fbb_.AddOffset(CBRef::VT_TENSOR_REF, tensor_ref);
1610  }
1611  void add_address(uint64_t address) {
1612  fbb_.AddElement<uint64_t>(CBRef::VT_ADDRESS, address, 0);
1613  }
1614  void add_desc(::flatbuffers::Offset<tt::target::CBDesc> desc) {
1615  fbb_.AddOffset(CBRef::VT_DESC, desc);
1616  }
1617  explicit CBRefBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
1618  : fbb_(_fbb) {
1619  start_ = fbb_.StartTable();
1620  }
1621  ::flatbuffers::Offset<CBRef> Finish() {
1622  const auto end = fbb_.EndTable(start_);
1623  auto o = ::flatbuffers::Offset<CBRef>(end);
1624  return o;
1625  }
1626 };
1627 
1628 inline ::flatbuffers::Offset<CBRef> CreateCBRef(
1629  ::flatbuffers::FlatBufferBuilder &_fbb,
1630  uint32_t global_id = 0,
1631  ::flatbuffers::Offset<tt::target::TensorRef> tensor_ref = 0,
1632  uint64_t address = 0,
1633  ::flatbuffers::Offset<tt::target::CBDesc> desc = 0) {
1634  CBRefBuilder builder_(_fbb);
1635  builder_.add_address(address);
1636  builder_.add_desc(desc);
1637  builder_.add_tensor_ref(tensor_ref);
1638  builder_.add_global_id(global_id);
1639  return builder_.Finish();
1640 }
1641 
1643  using type = CBRef;
1644  static auto constexpr Create = CreateCBRef;
1645 };
1646 
1647 struct ChipDesc FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
1649  struct Traits;
1650  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1651  VT_ARCH = 4,
1666  VT_NUM_CBS = 34
1667  };
1669  return static_cast<tt::target::Arch>(GetField<uint32_t>(VT_ARCH, 0));
1670  }
1671  const tt::target::Dim2d *grid_size() const {
1672  return GetStruct<const tt::target::Dim2d *>(VT_GRID_SIZE);
1673  }
1674  uint64_t l1_size() const {
1675  return GetField<uint64_t>(VT_L1_SIZE, 0);
1676  }
1677  uint32_t num_dram_channels() const {
1678  return GetField<uint32_t>(VT_NUM_DRAM_CHANNELS, 0);
1679  }
1680  uint64_t dram_channel_size() const {
1681  return GetField<uint64_t>(VT_DRAM_CHANNEL_SIZE, 0);
1682  }
1683  uint32_t noc_l1_address_align_bytes() const {
1684  return GetField<uint32_t>(VT_NOC_L1_ADDRESS_ALIGN_BYTES, 0);
1685  }
1686  uint32_t pcie_address_align_bytes() const {
1687  return GetField<uint32_t>(VT_PCIE_ADDRESS_ALIGN_BYTES, 0);
1688  }
1689  uint32_t noc_dram_address_align_bytes() const {
1690  return GetField<uint32_t>(VT_NOC_DRAM_ADDRESS_ALIGN_BYTES, 0);
1691  }
1692  uint32_t l1_unreserved_base() const {
1693  return GetField<uint32_t>(VT_L1_UNRESERVED_BASE, 0);
1694  }
1695  uint32_t erisc_l1_unreserved_base() const {
1696  return GetField<uint32_t>(VT_ERISC_L1_UNRESERVED_BASE, 0);
1697  }
1698  uint32_t dram_unreserved_base() const {
1699  return GetField<uint32_t>(VT_DRAM_UNRESERVED_BASE, 0);
1700  }
1701  uint32_t dram_unreserved_end() const {
1702  return GetField<uint32_t>(VT_DRAM_UNRESERVED_END, 0);
1703  }
1704  const tt::target::ChipPhysicalCores *physical_cores() const {
1705  return GetPointer<const tt::target::ChipPhysicalCores *>(VT_PHYSICAL_CORES);
1706  }
1707  const ::flatbuffers::Vector<tt::target::DataType> *supported_data_types() const {
1708  return GetPointer<const ::flatbuffers::Vector<tt::target::DataType> *>(VT_SUPPORTED_DATA_TYPES);
1709  }
1710  const ::flatbuffers::Vector<const tt::target::Dim2d *> *supported_tile_sizes() const {
1711  return GetPointer<const ::flatbuffers::Vector<const tt::target::Dim2d *> *>(VT_SUPPORTED_TILE_SIZES);
1712  }
1713  uint32_t num_cbs() const {
1714  return GetField<uint32_t>(VT_NUM_CBS, 0);
1715  }
1716  bool Verify(::flatbuffers::Verifier &verifier) const {
1717  return VerifyTableStart(verifier) &&
1718  VerifyField<uint32_t>(verifier, VT_ARCH, 4) &&
1719  VerifyField<tt::target::Dim2d>(verifier, VT_GRID_SIZE, 4) &&
1720  VerifyField<uint64_t>(verifier, VT_L1_SIZE, 8) &&
1721  VerifyField<uint32_t>(verifier, VT_NUM_DRAM_CHANNELS, 4) &&
1722  VerifyField<uint64_t>(verifier, VT_DRAM_CHANNEL_SIZE, 8) &&
1723  VerifyField<uint32_t>(verifier, VT_NOC_L1_ADDRESS_ALIGN_BYTES, 4) &&
1724  VerifyField<uint32_t>(verifier, VT_PCIE_ADDRESS_ALIGN_BYTES, 4) &&
1725  VerifyField<uint32_t>(verifier, VT_NOC_DRAM_ADDRESS_ALIGN_BYTES, 4) &&
1726  VerifyField<uint32_t>(verifier, VT_L1_UNRESERVED_BASE, 4) &&
1727  VerifyField<uint32_t>(verifier, VT_ERISC_L1_UNRESERVED_BASE, 4) &&
1728  VerifyField<uint32_t>(verifier, VT_DRAM_UNRESERVED_BASE, 4) &&
1729  VerifyField<uint32_t>(verifier, VT_DRAM_UNRESERVED_END, 4) &&
1730  VerifyOffset(verifier, VT_PHYSICAL_CORES) &&
1731  verifier.VerifyTable(physical_cores()) &&
1732  VerifyOffset(verifier, VT_SUPPORTED_DATA_TYPES) &&
1733  verifier.VerifyVector(supported_data_types()) &&
1734  VerifyOffset(verifier, VT_SUPPORTED_TILE_SIZES) &&
1735  verifier.VerifyVector(supported_tile_sizes()) &&
1736  VerifyField<uint32_t>(verifier, VT_NUM_CBS, 4) &&
1737  verifier.EndTable();
1738  }
1739 };
1740 
1742  typedef ChipDesc Table;
1743  ::flatbuffers::FlatBufferBuilder &fbb_;
1744  ::flatbuffers::uoffset_t start_;
1746  fbb_.AddElement<uint32_t>(ChipDesc::VT_ARCH, static_cast<uint32_t>(arch), 0);
1747  }
1748  void add_grid_size(const tt::target::Dim2d *grid_size) {
1749  fbb_.AddStruct(ChipDesc::VT_GRID_SIZE, grid_size);
1750  }
1751  void add_l1_size(uint64_t l1_size) {
1752  fbb_.AddElement<uint64_t>(ChipDesc::VT_L1_SIZE, l1_size, 0);
1753  }
1754  void add_num_dram_channels(uint32_t num_dram_channels) {
1755  fbb_.AddElement<uint32_t>(ChipDesc::VT_NUM_DRAM_CHANNELS, num_dram_channels, 0);
1756  }
1757  void add_dram_channel_size(uint64_t dram_channel_size) {
1758  fbb_.AddElement<uint64_t>(ChipDesc::VT_DRAM_CHANNEL_SIZE, dram_channel_size, 0);
1759  }
1760  void add_noc_l1_address_align_bytes(uint32_t noc_l1_address_align_bytes) {
1761  fbb_.AddElement<uint32_t>(ChipDesc::VT_NOC_L1_ADDRESS_ALIGN_BYTES, noc_l1_address_align_bytes, 0);
1762  }
1763  void add_pcie_address_align_bytes(uint32_t pcie_address_align_bytes) {
1764  fbb_.AddElement<uint32_t>(ChipDesc::VT_PCIE_ADDRESS_ALIGN_BYTES, pcie_address_align_bytes, 0);
1765  }
1766  void add_noc_dram_address_align_bytes(uint32_t noc_dram_address_align_bytes) {
1767  fbb_.AddElement<uint32_t>(ChipDesc::VT_NOC_DRAM_ADDRESS_ALIGN_BYTES, noc_dram_address_align_bytes, 0);
1768  }
1769  void add_l1_unreserved_base(uint32_t l1_unreserved_base) {
1770  fbb_.AddElement<uint32_t>(ChipDesc::VT_L1_UNRESERVED_BASE, l1_unreserved_base, 0);
1771  }
1772  void add_erisc_l1_unreserved_base(uint32_t erisc_l1_unreserved_base) {
1773  fbb_.AddElement<uint32_t>(ChipDesc::VT_ERISC_L1_UNRESERVED_BASE, erisc_l1_unreserved_base, 0);
1774  }
1775  void add_dram_unreserved_base(uint32_t dram_unreserved_base) {
1776  fbb_.AddElement<uint32_t>(ChipDesc::VT_DRAM_UNRESERVED_BASE, dram_unreserved_base, 0);
1777  }
1778  void add_dram_unreserved_end(uint32_t dram_unreserved_end) {
1779  fbb_.AddElement<uint32_t>(ChipDesc::VT_DRAM_UNRESERVED_END, dram_unreserved_end, 0);
1780  }
1781  void add_physical_cores(::flatbuffers::Offset<tt::target::ChipPhysicalCores> physical_cores) {
1782  fbb_.AddOffset(ChipDesc::VT_PHYSICAL_CORES, physical_cores);
1783  }
1784  void add_supported_data_types(::flatbuffers::Offset<::flatbuffers::Vector<tt::target::DataType>> supported_data_types) {
1785  fbb_.AddOffset(ChipDesc::VT_SUPPORTED_DATA_TYPES, supported_data_types);
1786  }
1787  void add_supported_tile_sizes(::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2d *>> supported_tile_sizes) {
1788  fbb_.AddOffset(ChipDesc::VT_SUPPORTED_TILE_SIZES, supported_tile_sizes);
1789  }
1790  void add_num_cbs(uint32_t num_cbs) {
1791  fbb_.AddElement<uint32_t>(ChipDesc::VT_NUM_CBS, num_cbs, 0);
1792  }
1793  explicit ChipDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
1794  : fbb_(_fbb) {
1795  start_ = fbb_.StartTable();
1796  }
1797  ::flatbuffers::Offset<ChipDesc> Finish() {
1798  const auto end = fbb_.EndTable(start_);
1799  auto o = ::flatbuffers::Offset<ChipDesc>(end);
1800  return o;
1801  }
1802 };
1803 
1804 inline ::flatbuffers::Offset<ChipDesc> CreateChipDesc(
1805  ::flatbuffers::FlatBufferBuilder &_fbb,
1807  const tt::target::Dim2d *grid_size = nullptr,
1808  uint64_t l1_size = 0,
1809  uint32_t num_dram_channels = 0,
1810  uint64_t dram_channel_size = 0,
1811  uint32_t noc_l1_address_align_bytes = 0,
1812  uint32_t pcie_address_align_bytes = 0,
1813  uint32_t noc_dram_address_align_bytes = 0,
1814  uint32_t l1_unreserved_base = 0,
1815  uint32_t erisc_l1_unreserved_base = 0,
1816  uint32_t dram_unreserved_base = 0,
1817  uint32_t dram_unreserved_end = 0,
1818  ::flatbuffers::Offset<tt::target::ChipPhysicalCores> physical_cores = 0,
1819  ::flatbuffers::Offset<::flatbuffers::Vector<tt::target::DataType>> supported_data_types = 0,
1820  ::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2d *>> supported_tile_sizes = 0,
1821  uint32_t num_cbs = 0) {
1822  ChipDescBuilder builder_(_fbb);
1823  builder_.add_dram_channel_size(dram_channel_size);
1824  builder_.add_l1_size(l1_size);
1825  builder_.add_num_cbs(num_cbs);
1826  builder_.add_supported_tile_sizes(supported_tile_sizes);
1827  builder_.add_supported_data_types(supported_data_types);
1828  builder_.add_physical_cores(physical_cores);
1829  builder_.add_dram_unreserved_end(dram_unreserved_end);
1830  builder_.add_dram_unreserved_base(dram_unreserved_base);
1831  builder_.add_erisc_l1_unreserved_base(erisc_l1_unreserved_base);
1832  builder_.add_l1_unreserved_base(l1_unreserved_base);
1833  builder_.add_noc_dram_address_align_bytes(noc_dram_address_align_bytes);
1834  builder_.add_pcie_address_align_bytes(pcie_address_align_bytes);
1835  builder_.add_noc_l1_address_align_bytes(noc_l1_address_align_bytes);
1836  builder_.add_num_dram_channels(num_dram_channels);
1837  builder_.add_grid_size(grid_size);
1838  builder_.add_arch(arch);
1839  return builder_.Finish();
1840 }
1841 
1843  using type = ChipDesc;
1844  static auto constexpr Create = CreateChipDesc;
1845 };
1846 
1847 inline ::flatbuffers::Offset<ChipDesc> CreateChipDescDirect(
1848  ::flatbuffers::FlatBufferBuilder &_fbb,
1850  const tt::target::Dim2d *grid_size = nullptr,
1851  uint64_t l1_size = 0,
1852  uint32_t num_dram_channels = 0,
1853  uint64_t dram_channel_size = 0,
1854  uint32_t noc_l1_address_align_bytes = 0,
1855  uint32_t pcie_address_align_bytes = 0,
1856  uint32_t noc_dram_address_align_bytes = 0,
1857  uint32_t l1_unreserved_base = 0,
1858  uint32_t erisc_l1_unreserved_base = 0,
1859  uint32_t dram_unreserved_base = 0,
1860  uint32_t dram_unreserved_end = 0,
1861  ::flatbuffers::Offset<tt::target::ChipPhysicalCores> physical_cores = 0,
1862  const std::vector<tt::target::DataType> *supported_data_types = nullptr,
1863  const std::vector<tt::target::Dim2d> *supported_tile_sizes = nullptr,
1864  uint32_t num_cbs = 0) {
1865  auto supported_data_types__ = supported_data_types ? _fbb.CreateVector<tt::target::DataType>(*supported_data_types) : 0;
1866  auto supported_tile_sizes__ = supported_tile_sizes ? _fbb.CreateVectorOfStructs<tt::target::Dim2d>(*supported_tile_sizes) : 0;
1868  _fbb,
1869  arch,
1870  grid_size,
1871  l1_size,
1872  num_dram_channels,
1873  dram_channel_size,
1874  noc_l1_address_align_bytes,
1875  pcie_address_align_bytes,
1876  noc_dram_address_align_bytes,
1877  l1_unreserved_base,
1878  erisc_l1_unreserved_base,
1879  dram_unreserved_base,
1880  dram_unreserved_end,
1881  physical_cores,
1882  supported_data_types__,
1883  supported_tile_sizes__,
1884  num_cbs);
1885 }
1886 
1887 struct ChipPhysicalCores FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
1889  struct Traits;
1890  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1892  VT_DRAM = 6,
1893  VT_ETH = 8,
1894  VT_ETH_INACTIVE = 10
1895  };
1896  const ::flatbuffers::Vector<const tt::target::Dim2d *> *worker() const {
1897  return GetPointer<const ::flatbuffers::Vector<const tt::target::Dim2d *> *>(VT_WORKER);
1898  }
1899  const ::flatbuffers::Vector<const tt::target::Dim2d *> *dram() const {
1900  return GetPointer<const ::flatbuffers::Vector<const tt::target::Dim2d *> *>(VT_DRAM);
1901  }
1902  const ::flatbuffers::Vector<const tt::target::Dim2d *> *eth() const {
1903  return GetPointer<const ::flatbuffers::Vector<const tt::target::Dim2d *> *>(VT_ETH);
1904  }
1905  const ::flatbuffers::Vector<const tt::target::Dim2d *> *eth_inactive() const {
1906  return GetPointer<const ::flatbuffers::Vector<const tt::target::Dim2d *> *>(VT_ETH_INACTIVE);
1907  }
1908  bool Verify(::flatbuffers::Verifier &verifier) const {
1909  return VerifyTableStart(verifier) &&
1910  VerifyOffset(verifier, VT_WORKER) &&
1911  verifier.VerifyVector(worker()) &&
1912  VerifyOffset(verifier, VT_DRAM) &&
1913  verifier.VerifyVector(dram()) &&
1914  VerifyOffset(verifier, VT_ETH) &&
1915  verifier.VerifyVector(eth()) &&
1916  VerifyOffset(verifier, VT_ETH_INACTIVE) &&
1917  verifier.VerifyVector(eth_inactive()) &&
1918  verifier.EndTable();
1919  }
1920 };
1921 
1923  typedef ChipPhysicalCores Table;
1924  ::flatbuffers::FlatBufferBuilder &fbb_;
1925  ::flatbuffers::uoffset_t start_;
1926  void add_worker(::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2d *>> worker) {
1927  fbb_.AddOffset(ChipPhysicalCores::VT_WORKER, worker);
1928  }
1929  void add_dram(::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2d *>> dram) {
1930  fbb_.AddOffset(ChipPhysicalCores::VT_DRAM, dram);
1931  }
1932  void add_eth(::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2d *>> eth) {
1933  fbb_.AddOffset(ChipPhysicalCores::VT_ETH, eth);
1934  }
1935  void add_eth_inactive(::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2d *>> eth_inactive) {
1936  fbb_.AddOffset(ChipPhysicalCores::VT_ETH_INACTIVE, eth_inactive);
1937  }
1938  explicit ChipPhysicalCoresBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
1939  : fbb_(_fbb) {
1940  start_ = fbb_.StartTable();
1941  }
1942  ::flatbuffers::Offset<ChipPhysicalCores> Finish() {
1943  const auto end = fbb_.EndTable(start_);
1944  auto o = ::flatbuffers::Offset<ChipPhysicalCores>(end);
1945  return o;
1946  }
1947 };
1948 
1949 inline ::flatbuffers::Offset<ChipPhysicalCores> CreateChipPhysicalCores(
1950  ::flatbuffers::FlatBufferBuilder &_fbb,
1951  ::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2d *>> worker = 0,
1952  ::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2d *>> dram = 0,
1953  ::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2d *>> eth = 0,
1954  ::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::Dim2d *>> eth_inactive = 0) {
1955  ChipPhysicalCoresBuilder builder_(_fbb);
1956  builder_.add_eth_inactive(eth_inactive);
1957  builder_.add_eth(eth);
1958  builder_.add_dram(dram);
1959  builder_.add_worker(worker);
1960  return builder_.Finish();
1961 }
1962 
1964  using type = ChipPhysicalCores;
1965  static auto constexpr Create = CreateChipPhysicalCores;
1966 };
1967 
1968 inline ::flatbuffers::Offset<ChipPhysicalCores> CreateChipPhysicalCoresDirect(
1969  ::flatbuffers::FlatBufferBuilder &_fbb,
1970  const std::vector<tt::target::Dim2d> *worker = nullptr,
1971  const std::vector<tt::target::Dim2d> *dram = nullptr,
1972  const std::vector<tt::target::Dim2d> *eth = nullptr,
1973  const std::vector<tt::target::Dim2d> *eth_inactive = nullptr) {
1974  auto worker__ = worker ? _fbb.CreateVectorOfStructs<tt::target::Dim2d>(*worker) : 0;
1975  auto dram__ = dram ? _fbb.CreateVectorOfStructs<tt::target::Dim2d>(*dram) : 0;
1976  auto eth__ = eth ? _fbb.CreateVectorOfStructs<tt::target::Dim2d>(*eth) : 0;
1977  auto eth_inactive__ = eth_inactive ? _fbb.CreateVectorOfStructs<tt::target::Dim2d>(*eth_inactive) : 0;
1979  _fbb,
1980  worker__,
1981  dram__,
1982  eth__,
1983  eth_inactive__);
1984 }
1985 
1986 struct CPUDesc FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
1988  struct Traits;
1989  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1990  VT_ROLE = 4,
1991  VT_TARGET_TRIPLE = 6
1992  };
1994  return static_cast<tt::target::CPURole>(GetField<uint8_t>(VT_ROLE, 0));
1995  }
1996  const ::flatbuffers::String *target_triple() const {
1997  return GetPointer<const ::flatbuffers::String *>(VT_TARGET_TRIPLE);
1998  }
1999  bool Verify(::flatbuffers::Verifier &verifier) const {
2000  return VerifyTableStart(verifier) &&
2001  VerifyField<uint8_t>(verifier, VT_ROLE, 1) &&
2002  VerifyOffset(verifier, VT_TARGET_TRIPLE) &&
2003  verifier.VerifyString(target_triple()) &&
2004  verifier.EndTable();
2005  }
2006 };
2007 
2009  typedef CPUDesc Table;
2010  ::flatbuffers::FlatBufferBuilder &fbb_;
2011  ::flatbuffers::uoffset_t start_;
2013  fbb_.AddElement<uint8_t>(CPUDesc::VT_ROLE, static_cast<uint8_t>(role), 0);
2014  }
2015  void add_target_triple(::flatbuffers::Offset<::flatbuffers::String> target_triple) {
2016  fbb_.AddOffset(CPUDesc::VT_TARGET_TRIPLE, target_triple);
2017  }
2018  explicit CPUDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
2019  : fbb_(_fbb) {
2020  start_ = fbb_.StartTable();
2021  }
2022  ::flatbuffers::Offset<CPUDesc> Finish() {
2023  const auto end = fbb_.EndTable(start_);
2024  auto o = ::flatbuffers::Offset<CPUDesc>(end);
2025  return o;
2026  }
2027 };
2028 
2029 inline ::flatbuffers::Offset<CPUDesc> CreateCPUDesc(
2030  ::flatbuffers::FlatBufferBuilder &_fbb,
2032  ::flatbuffers::Offset<::flatbuffers::String> target_triple = 0) {
2033  CPUDescBuilder builder_(_fbb);
2034  builder_.add_target_triple(target_triple);
2035  builder_.add_role(role);
2036  return builder_.Finish();
2037 }
2038 
2040  using type = CPUDesc;
2041  static auto constexpr Create = CreateCPUDesc;
2042 };
2043 
2044 inline ::flatbuffers::Offset<CPUDesc> CreateCPUDescDirect(
2045  ::flatbuffers::FlatBufferBuilder &_fbb,
2047  const char *target_triple = nullptr) {
2048  auto target_triple__ = target_triple ? _fbb.CreateString(target_triple) : 0;
2050  _fbb,
2051  role,
2052  target_triple__);
2053 }
2054 
2055 struct SystemDesc FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
2057  struct Traits;
2058  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2064  VT_CHIP_CHANNELS = 14
2065  };
2066  const ::flatbuffers::Vector<::flatbuffers::Offset<tt::target::CPUDesc>> *cpu_descs() const {
2067  return GetPointer<const ::flatbuffers::Vector<::flatbuffers::Offset<tt::target::CPUDesc>> *>(VT_CPU_DESCS);
2068  }
2069  const ::flatbuffers::Vector<::flatbuffers::Offset<tt::target::ChipDesc>> *chip_descs() const {
2070  return GetPointer<const ::flatbuffers::Vector<::flatbuffers::Offset<tt::target::ChipDesc>> *>(VT_CHIP_DESCS);
2071  }
2072  const ::flatbuffers::Vector<uint32_t> *chip_desc_indices() const {
2073  return GetPointer<const ::flatbuffers::Vector<uint32_t> *>(VT_CHIP_DESC_INDICES);
2074  }
2075  const ::flatbuffers::Vector<tt::target::ChipCapability> *chip_capabilities() const {
2076  return GetPointer<const ::flatbuffers::Vector<tt::target::ChipCapability> *>(VT_CHIP_CAPABILITIES);
2077  }
2078  const ::flatbuffers::Vector<const tt::target::ChipCoord *> *chip_coords() const {
2079  return GetPointer<const ::flatbuffers::Vector<const tt::target::ChipCoord *> *>(VT_CHIP_COORDS);
2080  }
2081  const ::flatbuffers::Vector<const tt::target::ChipChannel *> *chip_channels() const {
2082  return GetPointer<const ::flatbuffers::Vector<const tt::target::ChipChannel *> *>(VT_CHIP_CHANNELS);
2083  }
2084  bool Verify(::flatbuffers::Verifier &verifier) const {
2085  return VerifyTableStart(verifier) &&
2086  VerifyOffset(verifier, VT_CPU_DESCS) &&
2087  verifier.VerifyVector(cpu_descs()) &&
2088  verifier.VerifyVectorOfTables(cpu_descs()) &&
2089  VerifyOffset(verifier, VT_CHIP_DESCS) &&
2090  verifier.VerifyVector(chip_descs()) &&
2091  verifier.VerifyVectorOfTables(chip_descs()) &&
2092  VerifyOffset(verifier, VT_CHIP_DESC_INDICES) &&
2093  verifier.VerifyVector(chip_desc_indices()) &&
2094  VerifyOffset(verifier, VT_CHIP_CAPABILITIES) &&
2095  verifier.VerifyVector(chip_capabilities()) &&
2096  VerifyOffset(verifier, VT_CHIP_COORDS) &&
2097  verifier.VerifyVector(chip_coords()) &&
2098  VerifyOffset(verifier, VT_CHIP_CHANNELS) &&
2099  verifier.VerifyVector(chip_channels()) &&
2100  verifier.EndTable();
2101  }
2102 };
2103 
2105  typedef SystemDesc Table;
2106  ::flatbuffers::FlatBufferBuilder &fbb_;
2107  ::flatbuffers::uoffset_t start_;
2108  void add_cpu_descs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<tt::target::CPUDesc>>> cpu_descs) {
2109  fbb_.AddOffset(SystemDesc::VT_CPU_DESCS, cpu_descs);
2110  }
2111  void add_chip_descs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<tt::target::ChipDesc>>> chip_descs) {
2112  fbb_.AddOffset(SystemDesc::VT_CHIP_DESCS, chip_descs);
2113  }
2114  void add_chip_desc_indices(::flatbuffers::Offset<::flatbuffers::Vector<uint32_t>> chip_desc_indices) {
2115  fbb_.AddOffset(SystemDesc::VT_CHIP_DESC_INDICES, chip_desc_indices);
2116  }
2117  void add_chip_capabilities(::flatbuffers::Offset<::flatbuffers::Vector<tt::target::ChipCapability>> chip_capabilities) {
2118  fbb_.AddOffset(SystemDesc::VT_CHIP_CAPABILITIES, chip_capabilities);
2119  }
2120  void add_chip_coords(::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::ChipCoord *>> chip_coords) {
2121  fbb_.AddOffset(SystemDesc::VT_CHIP_COORDS, chip_coords);
2122  }
2123  void add_chip_channels(::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::ChipChannel *>> chip_channels) {
2124  fbb_.AddOffset(SystemDesc::VT_CHIP_CHANNELS, chip_channels);
2125  }
2126  explicit SystemDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
2127  : fbb_(_fbb) {
2128  start_ = fbb_.StartTable();
2129  }
2130  ::flatbuffers::Offset<SystemDesc> Finish() {
2131  const auto end = fbb_.EndTable(start_);
2132  auto o = ::flatbuffers::Offset<SystemDesc>(end);
2133  return o;
2134  }
2135 };
2136 
2137 inline ::flatbuffers::Offset<SystemDesc> CreateSystemDesc(
2138  ::flatbuffers::FlatBufferBuilder &_fbb,
2139  ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<tt::target::CPUDesc>>> cpu_descs = 0,
2140  ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<tt::target::ChipDesc>>> chip_descs = 0,
2141  ::flatbuffers::Offset<::flatbuffers::Vector<uint32_t>> chip_desc_indices = 0,
2142  ::flatbuffers::Offset<::flatbuffers::Vector<tt::target::ChipCapability>> chip_capabilities = 0,
2143  ::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::ChipCoord *>> chip_coords = 0,
2144  ::flatbuffers::Offset<::flatbuffers::Vector<const tt::target::ChipChannel *>> chip_channels = 0) {
2145  SystemDescBuilder builder_(_fbb);
2146  builder_.add_chip_channels(chip_channels);
2147  builder_.add_chip_coords(chip_coords);
2148  builder_.add_chip_capabilities(chip_capabilities);
2149  builder_.add_chip_desc_indices(chip_desc_indices);
2150  builder_.add_chip_descs(chip_descs);
2151  builder_.add_cpu_descs(cpu_descs);
2152  return builder_.Finish();
2153 }
2154 
2156  using type = SystemDesc;
2157  static auto constexpr Create = CreateSystemDesc;
2158 };
2159 
2160 inline ::flatbuffers::Offset<SystemDesc> CreateSystemDescDirect(
2161  ::flatbuffers::FlatBufferBuilder &_fbb,
2162  const std::vector<::flatbuffers::Offset<tt::target::CPUDesc>> *cpu_descs = nullptr,
2163  const std::vector<::flatbuffers::Offset<tt::target::ChipDesc>> *chip_descs = nullptr,
2164  const std::vector<uint32_t> *chip_desc_indices = nullptr,
2165  const std::vector<tt::target::ChipCapability> *chip_capabilities = nullptr,
2166  const std::vector<tt::target::ChipCoord> *chip_coords = nullptr,
2167  const std::vector<tt::target::ChipChannel> *chip_channels = nullptr) {
2168  auto cpu_descs__ = cpu_descs ? _fbb.CreateVector<::flatbuffers::Offset<tt::target::CPUDesc>>(*cpu_descs) : 0;
2169  auto chip_descs__ = chip_descs ? _fbb.CreateVector<::flatbuffers::Offset<tt::target::ChipDesc>>(*chip_descs) : 0;
2170  auto chip_desc_indices__ = chip_desc_indices ? _fbb.CreateVector<uint32_t>(*chip_desc_indices) : 0;
2171  auto chip_capabilities__ = chip_capabilities ? _fbb.CreateVector<tt::target::ChipCapability>(*chip_capabilities) : 0;
2172  auto chip_coords__ = chip_coords ? _fbb.CreateVectorOfStructs<tt::target::ChipCoord>(*chip_coords) : 0;
2173  auto chip_channels__ = chip_channels ? _fbb.CreateVectorOfStructs<tt::target::ChipChannel>(*chip_channels) : 0;
2175  _fbb,
2176  cpu_descs__,
2177  chip_descs__,
2178  chip_desc_indices__,
2179  chip_capabilities__,
2180  chip_coords__,
2181  chip_channels__);
2182 }
2183 
2184 struct DeviceRef FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
2186  struct Traits;
2187  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2188  VT_GLOBAL_ID = 4
2189  };
2190  uint32_t global_id() const {
2191  return GetField<uint32_t>(VT_GLOBAL_ID, 0);
2192  }
2193  bool Verify(::flatbuffers::Verifier &verifier) const {
2194  return VerifyTableStart(verifier) &&
2195  VerifyField<uint32_t>(verifier, VT_GLOBAL_ID, 4) &&
2196  verifier.EndTable();
2197  }
2198 };
2199 
2201  typedef DeviceRef Table;
2202  ::flatbuffers::FlatBufferBuilder &fbb_;
2203  ::flatbuffers::uoffset_t start_;
2204  void add_global_id(uint32_t global_id) {
2205  fbb_.AddElement<uint32_t>(DeviceRef::VT_GLOBAL_ID, global_id, 0);
2206  }
2207  explicit DeviceRefBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
2208  : fbb_(_fbb) {
2209  start_ = fbb_.StartTable();
2210  }
2211  ::flatbuffers::Offset<DeviceRef> Finish() {
2212  const auto end = fbb_.EndTable(start_);
2213  auto o = ::flatbuffers::Offset<DeviceRef>(end);
2214  return o;
2215  }
2216 };
2217 
2218 inline ::flatbuffers::Offset<DeviceRef> CreateDeviceRef(
2219  ::flatbuffers::FlatBufferBuilder &_fbb,
2220  uint32_t global_id = 0) {
2221  DeviceRefBuilder builder_(_fbb);
2222  builder_.add_global_id(global_id);
2223  return builder_.Finish();
2224 }
2225 
2227  using type = DeviceRef;
2228  static auto constexpr Create = CreateDeviceRef;
2229 };
2230 
2231 struct EventRef FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
2233  struct Traits;
2234  enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2235  VT_GLOBAL_ID = 4
2236  };
2237  uint32_t global_id() const {
2238  return GetField<uint32_t>(VT_GLOBAL_ID, 0);
2239  }
2240  bool Verify(::flatbuffers::Verifier &verifier) const {
2241  return VerifyTableStart(verifier) &&
2242  VerifyField<uint32_t>(verifier, VT_GLOBAL_ID, 4) &&
2243  verifier.EndTable();
2244  }
2245 };
2246 
2248  typedef EventRef Table;
2249  ::flatbuffers::FlatBufferBuilder &fbb_;
2250  ::flatbuffers::uoffset_t start_;
2251  void add_global_id(uint32_t global_id) {
2252  fbb_.AddElement<uint32_t>(EventRef::VT_GLOBAL_ID, global_id, 0);
2253  }
2254  explicit EventRefBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
2255  : fbb_(_fbb) {
2256  start_ = fbb_.StartTable();
2257  }
2258  ::flatbuffers::Offset<EventRef> Finish() {
2259  const auto end = fbb_.EndTable(start_);
2260  auto o = ::flatbuffers::Offset<EventRef>(end);
2261  return o;
2262  }
2263 };
2264 
2265 inline ::flatbuffers::Offset<EventRef> CreateEventRef(
2266  ::flatbuffers::FlatBufferBuilder &_fbb,
2267  uint32_t global_id = 0) {
2268  EventRefBuilder builder_(_fbb);
2269  builder_.add_global_id(global_id);
2270  return builder_.Finish();
2271 }
2272 
2274  using type = EventRef;
2275  static auto constexpr Create = CreateEventRef;
2276 };
2277 
2278 inline bool VerifyDistributedTensorConfig(::flatbuffers::Verifier &verifier, const void *obj, DistributedTensorConfig type) {
2279  switch (type) {
2281  return true;
2282  }
2284  auto ptr = reinterpret_cast<const tt::target::ReplicateTensor *>(obj);
2285  return verifier.VerifyTable(ptr);
2286  }
2288  auto ptr = reinterpret_cast<const tt::target::ShardTensor *>(obj);
2289  return verifier.VerifyTable(ptr);
2290  }
2292  auto ptr = reinterpret_cast<const tt::target::ShardTensor2D *>(obj);
2293  return verifier.VerifyTable(ptr);
2294  }
2296  auto ptr = reinterpret_cast<const tt::target::AllGatherTensor *>(obj);
2297  return verifier.VerifyTable(ptr);
2298  }
2299  default: return true;
2300  }
2301 }
2302 
2303 inline bool VerifyDistributedTensorConfigVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset<void>> *values, const ::flatbuffers::Vector<DistributedTensorConfig> *types) {
2304  if (!values || !types) return !values && !types;
2305  if (values->size() != types->size()) return false;
2306  for (::flatbuffers::uoffset_t i = 0; i < values->size(); ++i) {
2308  verifier, values->Get(i), types->GetEnum<DistributedTensorConfig>(i))) {
2309  return false;
2310  }
2311  }
2312  return true;
2313 }
2314 
2315 } // namespace target
2316 } // namespace tt
2317 
2318 #endif // FLATBUFFERS_GENERATED_TYPES_TT_TARGET_H_
VT_LAYOUT
Definition: program_generated.h:792
VT_STRATEGY
Definition: program_generated.h:1256
inline ::flatbuffers::Offset< CPUDesc > CreateCPUDesc(::flatbuffers::FlatBufferBuilder &_fbb, tt::target::CPURole role=tt::target::CPURole::Host, ::flatbuffers::Offset<::flatbuffers::String > target_triple=0)
Definition: types_generated.h:2029
const char *const * EnumNamesOOBVal()
Definition: types_generated.h:198
ChipCapability
Definition: types_generated.h:252
TensorLayout
Definition: types_generated.h:325
const char * EnumNameChipCapability(ChipCapability e)
Definition: types_generated.h:277
const char *const * EnumNamesArch()
Definition: types_generated.h:101
inline ::flatbuffers::Offset< LayoutDesc > CreateLayoutDescDirect(::flatbuffers::FlatBufferBuilder &_fbb, const std::vector< int32_t > *stride=nullptr, tt::target::OOBVal oob_val=tt::target::OOBVal::Undef, const std::vector< tt::target::Dim2dRange > *core_range_set=nullptr, ::flatbuffers::Offset< tt::target::MemoryDesc > memory_desc=0, ::flatbuffers::Offset< tt::target::DistributionStrategy > strategy=0)
Definition: types_generated.h:1324
const char * EnumNameMemorySpace(MemorySpace e)
Definition: types_generated.h:246
inline ::flatbuffers::Offset< MemoryConfigDesc > CreateMemoryConfigDesc(::flatbuffers::FlatBufferBuilder &_fbb, tt::target::TensorMemoryLayout tensor_memory_layout=tt::target::TensorMemoryLayout::None, tt::target::BufferType buffer_type=tt::target::BufferType::DRAM, ::flatbuffers::Offset< tt::target::ShardSpec > shard_spec=0)
Definition: types_generated.h:836
const char *const * EnumNamesMeshShardType()
Definition: types_generated.h:446
const char *const * EnumNamesMemorySpace()
Definition: types_generated.h:235
inline ::flatbuffers::Offset< DeviceRef > CreateDeviceRef(::flatbuffers::FlatBufferBuilder &_fbb, uint32_t global_id=0)
Definition: types_generated.h:2218
const MeshShardType(& EnumValuesMeshShardType())[4]
Definition: types_generated.h:436
inline ::flatbuffers::Offset< ShardTensor2D > CreateShardTensor2D(::flatbuffers::FlatBufferBuilder &_fbb, const tt::target::Dim2d *shard_mesh=nullptr)
Definition: types_generated.h:981
const char * EnumNameCPURole(CPURole e)
Definition: types_generated.h:549
const BufferType(& EnumValuesBufferType())[5]
Definition: types_generated.h:368
const char * EnumNameMathFidelity(MathFidelity e)
Definition: types_generated.h:586
inline ::flatbuffers::Offset< CBDesc > CreateCBDesc(::flatbuffers::FlatBufferBuilder &_fbb, uint32_t port=0, ::flatbuffers::Offset< tt::target::MemoryDesc > memory_desc=0, uint64_t page_size=0, uint64_t num_buffers=0)
Definition: types_generated.h:1471
const char * EnumNameOOBVal(OOBVal e)
Definition: types_generated.h:210
const MeshShardDirection(& EnumValuesMeshShardDirection())[2]
Definition: types_generated.h:404
inline ::flatbuffers::Offset< LayoutDesc > CreateLayoutDesc(::flatbuffers::FlatBufferBuilder &_fbb, ::flatbuffers::Offset<::flatbuffers::Vector< int32_t >> stride=0, tt::target::OOBVal oob_val=tt::target::OOBVal::Undef, ::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2dRange * >> core_range_set=0, ::flatbuffers::Offset< tt::target::MemoryDesc > memory_desc=0, ::flatbuffers::Offset< tt::target::DistributionStrategy > strategy=0)
Definition: types_generated.h:1303
const char *const * EnumNamesMeshShardDirection()
Definition: types_generated.h:412
const char * EnumNameArch(Arch e)
Definition: types_generated.h:111
const char * EnumNameTensorMemoryLayout(TensorMemoryLayout e)
Definition: types_generated.h:319
const char * EnumNameDistributedTensorConfig(DistributedTensorConfig e)
Definition: types_generated.h:496
Arch
Definition: types_generated.h:84
const DataType(& EnumValuesDataType())[12]
Definition: types_generated.h:134
const char *const * EnumNamesTensorLayout()
Definition: types_generated.h:342
inline ::flatbuffers::Offset< MemoryDesc > CreateMemoryDescDirect(::flatbuffers::FlatBufferBuilder &_fbb, const std::vector< int32_t > *shape=nullptr, const tt::target::Dim2d *tile_shape=nullptr, tt::target::DataType data_type=tt::target::DataType::Float32, tt::target::MemorySpace memory_space=tt::target::MemorySpace::System, tt::target::TensorMemoryLayout memory_layout=tt::target::TensorMemoryLayout::None, uint64_t size=0)
Definition: types_generated.h:1214
MathFidelity
Definition: types_generated.h:555
inline ::flatbuffers::Offset< ChipPhysicalCores > CreateChipPhysicalCoresDirect(::flatbuffers::FlatBufferBuilder &_fbb, const std::vector< tt::target::Dim2d > *worker=nullptr, const std::vector< tt::target::Dim2d > *dram=nullptr, const std::vector< tt::target::Dim2d > *eth=nullptr, const std::vector< tt::target::Dim2d > *eth_inactive=nullptr)
Definition: types_generated.h:1968
MemorySpace
Definition: types_generated.h:216
inline ::flatbuffers::Offset< ShardSpec > CreateShardSpecDirect(::flatbuffers::FlatBufferBuilder &_fbb, const std::vector< int64_t > *shard_shape=nullptr)
Definition: types_generated.h:776
inline ::flatbuffers::Offset< ChipDesc > CreateChipDescDirect(::flatbuffers::FlatBufferBuilder &_fbb, tt::target::Arch arch=tt::target::Arch::Grayskull, const tt::target::Dim2d *grid_size=nullptr, uint64_t l1_size=0, uint32_t num_dram_channels=0, uint64_t dram_channel_size=0, uint32_t noc_l1_address_align_bytes=0, uint32_t pcie_address_align_bytes=0, uint32_t noc_dram_address_align_bytes=0, uint32_t l1_unreserved_base=0, uint32_t erisc_l1_unreserved_base=0, uint32_t dram_unreserved_base=0, uint32_t dram_unreserved_end=0, ::flatbuffers::Offset< tt::target::ChipPhysicalCores > physical_cores=0, const std::vector< tt::target::DataType > *supported_data_types=nullptr, const std::vector< tt::target::Dim2d > *supported_tile_sizes=nullptr, uint32_t num_cbs=0)
Definition: types_generated.h:1847
const ChipCapability(& EnumValuesChipCapability())[2]
Definition: types_generated.h:260
inline ::flatbuffers::Offset< ChipPhysicalCores > CreateChipPhysicalCores(::flatbuffers::FlatBufferBuilder &_fbb, ::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2d * >> worker=0, ::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2d * >> dram=0, ::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2d * >> eth=0, ::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2d * >> eth_inactive=0)
Definition: types_generated.h:1949
inline ::flatbuffers::Offset< ChipDesc > CreateChipDesc(::flatbuffers::FlatBufferBuilder &_fbb, tt::target::Arch arch=tt::target::Arch::Grayskull, const tt::target::Dim2d *grid_size=nullptr, uint64_t l1_size=0, uint32_t num_dram_channels=0, uint64_t dram_channel_size=0, uint32_t noc_l1_address_align_bytes=0, uint32_t pcie_address_align_bytes=0, uint32_t noc_dram_address_align_bytes=0, uint32_t l1_unreserved_base=0, uint32_t erisc_l1_unreserved_base=0, uint32_t dram_unreserved_base=0, uint32_t dram_unreserved_end=0, ::flatbuffers::Offset< tt::target::ChipPhysicalCores > physical_cores=0, ::flatbuffers::Offset<::flatbuffers::Vector< tt::target::DataType >> supported_data_types=0, ::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2d * >> supported_tile_sizes=0, uint32_t num_cbs=0)
Definition: types_generated.h:1804
const char *const * EnumNamesMathFidelity()
Definition: types_generated.h:574
inline ::flatbuffers::Offset< SystemDesc > CreateSystemDesc(::flatbuffers::FlatBufferBuilder &_fbb, ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset< tt::target::CPUDesc >>> cpu_descs=0, ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset< tt::target::ChipDesc >>> chip_descs=0, ::flatbuffers::Offset<::flatbuffers::Vector< uint32_t >> chip_desc_indices=0, ::flatbuffers::Offset<::flatbuffers::Vector< tt::target::ChipCapability >> chip_capabilities=0, ::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::ChipCoord * >> chip_coords=0, ::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::ChipChannel * >> chip_channels=0)
Definition: types_generated.h:2137
bool VerifyDistributedTensorConfigVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset< void >> *values, const ::flatbuffers::Vector< DistributedTensorConfig > *types)
Definition: types_generated.h:2303
const char * EnumNameMeshShardDirection(MeshShardDirection e)
Definition: types_generated.h:421
inline ::flatbuffers::Offset< MemoryDesc > CreateMemoryDesc(::flatbuffers::FlatBufferBuilder &_fbb, ::flatbuffers::Offset<::flatbuffers::Vector< int32_t >> shape=0, const tt::target::Dim2d *tile_shape=nullptr, tt::target::DataType data_type=tt::target::DataType::Float32, tt::target::MemorySpace memory_space=tt::target::MemorySpace::System, tt::target::TensorMemoryLayout memory_layout=tt::target::TensorMemoryLayout::None, uint64_t size=0)
Definition: types_generated.h:1191
const char *const * EnumNamesCPURole()
Definition: types_generated.h:540
inline ::flatbuffers::Offset< SystemDesc > CreateSystemDescDirect(::flatbuffers::FlatBufferBuilder &_fbb, const std::vector<::flatbuffers::Offset< tt::target::CPUDesc >> *cpu_descs=nullptr, const std::vector<::flatbuffers::Offset< tt::target::ChipDesc >> *chip_descs=nullptr, const std::vector< uint32_t > *chip_desc_indices=nullptr, const std::vector< tt::target::ChipCapability > *chip_capabilities=nullptr, const std::vector< tt::target::ChipCoord > *chip_coords=nullptr, const std::vector< tt::target::ChipChannel > *chip_channels=nullptr)
Definition: types_generated.h:2160
const MemorySpace(& EnumValuesMemorySpace())[4]
Definition: types_generated.h:225
const char * EnumNameTensorLayout(TensorLayout e)
Definition: types_generated.h:352
OOBVal
Definition: types_generated.h:177
const char *const * EnumNamesChipCapability()
Definition: types_generated.h:268
const DistributedTensorConfig(& EnumValuesDistributedTensorConfig())[5]
Definition: types_generated.h:473
inline ::flatbuffers::Offset< DistributionStrategy > CreateDistributionStrategy(::flatbuffers::FlatBufferBuilder &_fbb, tt::target::DistributedTensorConfig strategy_type=tt::target::DistributedTensorConfig::NONE, ::flatbuffers::Offset< void > strategy=0)
Definition: types_generated.h:1101
inline ::flatbuffers::Offset< TensorDesc > CreateTensorDesc(::flatbuffers::FlatBufferBuilder &_fbb, ::flatbuffers::Offset<::flatbuffers::Vector< int32_t >> shape=0, ::flatbuffers::Offset< tt::target::LayoutDesc > layout=0)
Definition: types_generated.h:1386
const char * EnumNameDataType(DataType e)
Definition: types_generated.h:171
MeshShardType
Definition: types_generated.h:427
const char *const * EnumNamesDataType()
Definition: types_generated.h:152
const OOBVal(& EnumValuesOOBVal())[5]
Definition: types_generated.h:187
const char * EnumNameMeshShardType(MeshShardType e)
Definition: types_generated.h:457
const TensorMemoryLayout(& EnumValuesTensorMemoryLayout())[6]
Definition: types_generated.h:294
BufferType
Definition: types_generated.h:358
const MathFidelity(& EnumValuesMathFidelity())[4]
Definition: types_generated.h:564
const char *const * EnumNamesBufferType()
Definition: types_generated.h:379
const CPURole(& EnumValuesCPURole())[2]
Definition: types_generated.h:532
const Arch(& EnumValuesArch())[3]
Definition: types_generated.h:92
inline ::flatbuffers::Offset< TensorDesc > CreateTensorDescDirect(::flatbuffers::FlatBufferBuilder &_fbb, const std::vector< int32_t > *shape=nullptr, ::flatbuffers::Offset< tt::target::LayoutDesc > layout=0)
Definition: types_generated.h:1401
const TensorLayout(& EnumValuesTensorLayout())[3]
Definition: types_generated.h:333
FLATBUFFERS_MANUALLY_ALIGNED_STRUCT(4) Dim2d FLATBUFFERS_FINAL_CLASS
Definition: types_generated.h:592
inline ::flatbuffers::Offset< TensorRef > CreateTensorRef(::flatbuffers::FlatBufferBuilder &_fbb, uint32_t global_id=0, uint64_t address=0, uint64_t size=0, ::flatbuffers::Offset< tt::target::TensorDesc > desc=0)
Definition: types_generated.h:1549
inline ::flatbuffers::Offset< EventRef > CreateEventRef(::flatbuffers::FlatBufferBuilder &_fbb, uint32_t global_id=0)
Definition: types_generated.h:2265
const char * EnumNameBufferType(BufferType e)
Definition: types_generated.h:391
inline ::flatbuffers::Offset< ReplicateTensor > CreateReplicateTensor(::flatbuffers::FlatBufferBuilder &_fbb, uint32_t replication_factor=0)
Definition: types_generated.h:887
inline ::flatbuffers::Offset< ShardSpec > CreateShardSpec(::flatbuffers::FlatBufferBuilder &_fbb, ::flatbuffers::Offset<::flatbuffers::Vector< int64_t >> shard_shape=0)
Definition: types_generated.h:763
TensorMemoryLayout
Definition: types_generated.h:283
const char *const * EnumNamesDistributedTensorConfig()
Definition: types_generated.h:484
MeshShardDirection
Definition: types_generated.h:397
inline ::flatbuffers::Offset< CPUDesc > CreateCPUDescDirect(::flatbuffers::FlatBufferBuilder &_fbb, tt::target::CPURole role=tt::target::CPURole::Host, const char *target_triple=nullptr)
Definition: types_generated.h:2044
const char *const * EnumNamesTensorMemoryLayout()
Definition: types_generated.h:306
CPURole
Definition: types_generated.h:525
DataType
Definition: types_generated.h:117
inline ::flatbuffers::Offset< ShardTensor > CreateShardTensor(::flatbuffers::FlatBufferBuilder &_fbb, uint32_t shard_dim=0)
Definition: types_generated.h:934
inline ::flatbuffers::Offset< AllGatherTensor > CreateAllGatherTensor(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:1018
bool VerifyDistributedTensorConfig(::flatbuffers::Verifier &verifier, const void *obj, DistributedTensorConfig type)
Definition: types_generated.h:2278
inline ::flatbuffers::Offset< CBRef > CreateCBRef(::flatbuffers::FlatBufferBuilder &_fbb, uint32_t global_id=0, ::flatbuffers::Offset< tt::target::TensorRef > tensor_ref=0, uint64_t address=0, ::flatbuffers::Offset< tt::target::CBDesc > desc=0)
Definition: types_generated.h:1628
DistributedTensorConfig
Definition: types_generated.h:463
FLATBUFFERS_STRUCT_END(Dim2d, 8)
Definition: debug_info_generated.h:18
Definition: types_generated.h:1003
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:1005
::flatbuffers::Offset< AllGatherTensor > Finish()
Definition: types_generated.h:1011
AllGatherTensor Table
Definition: types_generated.h:1004
::flatbuffers::uoffset_t start_
Definition: types_generated.h:1006
AllGatherTensorBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:1007
Definition: types_generated.h:1024
static constexpr auto Create
Definition: types_generated.h:1026
AllGatherTensor type
Definition: types_generated.h:1025
Definition: types_generated.h:1444
CBDesc Table
Definition: types_generated.h:1445
void add_port(uint32_t port)
Definition: types_generated.h:1448
::flatbuffers::uoffset_t start_
Definition: types_generated.h:1447
void add_num_buffers(uint64_t num_buffers)
Definition: types_generated.h:1457
void add_page_size(uint64_t page_size)
Definition: types_generated.h:1454
void add_memory_desc(::flatbuffers::Offset< tt::target::MemoryDesc > memory_desc)
Definition: types_generated.h:1451
CBDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:1460
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:1446
::flatbuffers::Offset< CBDesc > Finish()
Definition: types_generated.h:1464
Definition: types_generated.h:1485
CBDesc type
Definition: types_generated.h:1486
static constexpr auto Create
Definition: types_generated.h:1487
Definition: types_generated.h:1601
::flatbuffers::uoffset_t start_
Definition: types_generated.h:1604
void add_tensor_ref(::flatbuffers::Offset< tt::target::TensorRef > tensor_ref)
Definition: types_generated.h:1608
void add_address(uint64_t address)
Definition: types_generated.h:1611
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:1603
CBRefBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:1617
CBRef Table
Definition: types_generated.h:1602
void add_global_id(uint32_t global_id)
Definition: types_generated.h:1605
::flatbuffers::Offset< CBRef > Finish()
Definition: types_generated.h:1621
void add_desc(::flatbuffers::Offset< tt::target::CBDesc > desc)
Definition: types_generated.h:1614
Definition: types_generated.h:1642
CBRef type
Definition: types_generated.h:1643
static constexpr auto Create
Definition: types_generated.h:1644
Definition: types_generated.h:2008
CPUDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:2018
::flatbuffers::Offset< CPUDesc > Finish()
Definition: types_generated.h:2022
::flatbuffers::uoffset_t start_
Definition: types_generated.h:2011
void add_role(tt::target::CPURole role)
Definition: types_generated.h:2012
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:2010
void add_target_triple(::flatbuffers::Offset<::flatbuffers::String > target_triple)
Definition: types_generated.h:2015
CPUDesc Table
Definition: types_generated.h:2009
Definition: types_generated.h:2039
CPUDesc type
Definition: types_generated.h:2040
static constexpr auto Create
Definition: types_generated.h:2041
Definition: types_generated.h:724
ChipChannel type
Definition: types_generated.h:725
Definition: types_generated.h:684
ChipCoord type
Definition: types_generated.h:685
Definition: types_generated.h:1741
void add_l1_unreserved_base(uint32_t l1_unreserved_base)
Definition: types_generated.h:1769
void add_supported_data_types(::flatbuffers::Offset<::flatbuffers::Vector< tt::target::DataType >> supported_data_types)
Definition: types_generated.h:1784
void add_erisc_l1_unreserved_base(uint32_t erisc_l1_unreserved_base)
Definition: types_generated.h:1772
void add_dram_unreserved_end(uint32_t dram_unreserved_end)
Definition: types_generated.h:1778
void add_supported_tile_sizes(::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2d * >> supported_tile_sizes)
Definition: types_generated.h:1787
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:1743
void add_arch(tt::target::Arch arch)
Definition: types_generated.h:1745
ChipDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:1793
::flatbuffers::uoffset_t start_
Definition: types_generated.h:1744
void add_grid_size(const tt::target::Dim2d *grid_size)
Definition: types_generated.h:1748
void add_pcie_address_align_bytes(uint32_t pcie_address_align_bytes)
Definition: types_generated.h:1763
void add_num_cbs(uint32_t num_cbs)
Definition: types_generated.h:1790
void add_dram_channel_size(uint64_t dram_channel_size)
Definition: types_generated.h:1757
void add_noc_dram_address_align_bytes(uint32_t noc_dram_address_align_bytes)
Definition: types_generated.h:1766
::flatbuffers::Offset< ChipDesc > Finish()
Definition: types_generated.h:1797
void add_l1_size(uint64_t l1_size)
Definition: types_generated.h:1751
void add_noc_l1_address_align_bytes(uint32_t noc_l1_address_align_bytes)
Definition: types_generated.h:1760
void add_num_dram_channels(uint32_t num_dram_channels)
Definition: types_generated.h:1754
void add_physical_cores(::flatbuffers::Offset< tt::target::ChipPhysicalCores > physical_cores)
Definition: types_generated.h:1781
void add_dram_unreserved_base(uint32_t dram_unreserved_base)
Definition: types_generated.h:1775
ChipDesc Table
Definition: types_generated.h:1742
Definition: types_generated.h:1842
ChipDesc type
Definition: types_generated.h:1843
static constexpr auto Create
Definition: types_generated.h:1844
Definition: types_generated.h:1922
ChipPhysicalCores Table
Definition: types_generated.h:1923
::flatbuffers::Offset< ChipPhysicalCores > Finish()
Definition: types_generated.h:1942
ChipPhysicalCoresBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:1938
::flatbuffers::uoffset_t start_
Definition: types_generated.h:1925
void add_worker(::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2d * >> worker)
Definition: types_generated.h:1926
void add_eth_inactive(::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2d * >> eth_inactive)
Definition: types_generated.h:1935
void add_eth(::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2d * >> eth)
Definition: types_generated.h:1932
void add_dram(::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2d * >> dram)
Definition: types_generated.h:1929
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:1924
Definition: types_generated.h:1963
static constexpr auto Create
Definition: types_generated.h:1965
ChipPhysicalCores type
Definition: types_generated.h:1964
Definition: types_generated.h:2200
::flatbuffers::uoffset_t start_
Definition: types_generated.h:2203
DeviceRef Table
Definition: types_generated.h:2201
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:2202
DeviceRefBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:2207
::flatbuffers::Offset< DeviceRef > Finish()
Definition: types_generated.h:2211
void add_global_id(uint32_t global_id)
Definition: types_generated.h:2204
Definition: types_generated.h:2226
static constexpr auto Create
Definition: types_generated.h:2228
DeviceRef type
Definition: types_generated.h:2227
Definition: types_generated.h:644
Dim2dRange type
Definition: types_generated.h:645
Definition: types_generated.h:616
Dim2d type
Definition: types_generated.h:617
Definition: types_generated.h:502
static const DistributedTensorConfig enum_value
Definition: types_generated.h:503
Definition: types_generated.h:1080
::flatbuffers::Offset< DistributionStrategy > Finish()
Definition: types_generated.h:1094
void add_strategy(::flatbuffers::Offset< void > strategy)
Definition: types_generated.h:1087
void add_strategy_type(tt::target::DistributedTensorConfig strategy_type)
Definition: types_generated.h:1084
::flatbuffers::uoffset_t start_
Definition: types_generated.h:1083
DistributionStrategyBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:1090
DistributionStrategy Table
Definition: types_generated.h:1081
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:1082
Definition: types_generated.h:1111
static constexpr auto Create
Definition: types_generated.h:1113
DistributionStrategy type
Definition: types_generated.h:1112
Definition: types_generated.h:2247
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:2249
void add_global_id(uint32_t global_id)
Definition: types_generated.h:2251
::flatbuffers::uoffset_t start_
Definition: types_generated.h:2250
EventRef Table
Definition: types_generated.h:2248
EventRefBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:2254
::flatbuffers::Offset< EventRef > Finish()
Definition: types_generated.h:2258
Definition: types_generated.h:2273
static constexpr auto Create
Definition: types_generated.h:2275
EventRef type
Definition: types_generated.h:2274
Definition: debug_info_generated.h:36
ShardTensor2DBuilder Builder
Definition: types_generated.h:948
uint32_t shard_dim() const
Definition: types_generated.h:906
CPUDescBuilder Builder
Definition: types_generated.h:1987
uint32_t noc_dram_address_align_bytes() const
Definition: types_generated.h:1689
const ::flatbuffers::Vector< const tt::target::Dim2d * > * eth_inactive() const
Definition: types_generated.h:1905
ReplicateTensorBuilder Builder
Definition: types_generated.h:854
tt::target::DistributedTensorConfig strategy_type() const
Definition: types_generated.h:1036
LayoutDescBuilder Builder
Definition: types_generated.h:1234
tt::target::TensorMemoryLayout tensor_memory_layout() const
Definition: types_generated.h:793
uint64_t dram_channel_size() const
Definition: types_generated.h:1680
const ::flatbuffers::Vector< int32_t > * stride() const
Definition: types_generated.h:1243
tt::target::OOBVal oob_val() const
Definition: types_generated.h:1246
uint32_t erisc_l1_unreserved_base() const
Definition: types_generated.h:1695
const ::flatbuffers::Vector< tt::target::ChipCapability > * chip_capabilities() const
Definition: types_generated.h:2075
ShardTensorBuilder Builder
Definition: types_generated.h:901
const ::flatbuffers::Vector< const tt::target::Dim2d * > * worker() const
Definition: types_generated.h:1896
const ::flatbuffers::Vector< const tt::target::ChipCoord * > * chip_coords() const
Definition: types_generated.h:2078
CBDescBuilder Builder
Definition: types_generated.h:1413
const tt::target::TensorDesc * desc() const
Definition: types_generated.h:1508
uint32_t port() const
Definition: types_generated.h:1421
tt::target::TensorMemoryLayout memory_layout() const
Definition: types_generated.h:1139
const tt::target::ChipPhysicalCores * physical_cores() const
Definition: types_generated.h:1704
tt::target::Arch arch() const
Definition: types_generated.h:1668
const tt::target::ShardSpec * shard_spec() const
Definition: types_generated.h:799
SystemDescBuilder Builder
Definition: types_generated.h:2056
DeviceRefBuilder Builder
Definition: types_generated.h:2185
uint32_t num_dram_channels() const
Definition: types_generated.h:1677
uint32_t replication_factor() const
Definition: types_generated.h:859
const tt::target::ShardTensor2D * strategy_as_ShardTensor2D() const
Definition: types_generated.h:1049
uint32_t pcie_address_align_bytes() const
Definition: types_generated.h:1686
uint64_t size() const
Definition: types_generated.h:1142
const ::flatbuffers::Vector< const tt::target::Dim2dRange * > * core_range_set() const
Definition: types_generated.h:1249
const tt::target::CBDesc * desc() const
Definition: types_generated.h:1586
const void * strategy() const
Definition: types_generated.h:1039
const ::flatbuffers::Vector< tt::target::DataType > * supported_data_types() const
Definition: types_generated.h:1707
uint32_t global_id() const
Definition: types_generated.h:1499
uint64_t l1_size() const
Definition: types_generated.h:1674
uint32_t l1_unreserved_base() const
Definition: types_generated.h:1692
tt::target::DataType data_type() const
Definition: types_generated.h:1133
const ::flatbuffers::Vector< const tt::target::Dim2d * > * supported_tile_sizes() const
Definition: types_generated.h:1710
AllGatherTensorBuilder Builder
Definition: types_generated.h:995
const tt::target::Dim2d * shard_mesh() const
Definition: types_generated.h:953
uint64_t page_size() const
Definition: types_generated.h:1427
EventRefBuilder Builder
Definition: types_generated.h:2232
const ::flatbuffers::Vector<::flatbuffers::Offset< tt::target::ChipDesc > > * chip_descs() const
Definition: types_generated.h:2069
uint32_t noc_l1_address_align_bytes() const
Definition: types_generated.h:1683
const ::flatbuffers::Vector< const tt::target::Dim2d * > * eth() const
Definition: types_generated.h:1902
MemoryDescBuilder Builder
Definition: types_generated.h:1117
tt::target::BufferType buffer_type() const
Definition: types_generated.h:796
TensorRefBuilder Builder
Definition: types_generated.h:1491
uint32_t dram_unreserved_base() const
Definition: types_generated.h:1698
const tt::target::LayoutDesc * layout() const
Definition: types_generated.h:1352
const tt::target::ReplicateTensor * strategy_as_ReplicateTensor() const
Definition: types_generated.h:1043
const tt::target::Dim2d * grid_size() const
Definition: types_generated.h:1671
const tt::target::TensorRef * tensor_ref() const
Definition: types_generated.h:1580
const ::flatbuffers::Vector< int32_t > * shape() const
Definition: types_generated.h:1127
const ::flatbuffers::Vector< const tt::target::ChipChannel * > * chip_channels() const
Definition: types_generated.h:2081
uint32_t dram_unreserved_end() const
Definition: types_generated.h:1701
const ::flatbuffers::Vector<::flatbuffers::Offset< tt::target::CPUDesc > > * cpu_descs() const
Definition: types_generated.h:2066
uint32_t num_cbs() const
Definition: types_generated.h:1713
uint64_t address() const
Definition: types_generated.h:1502
ShardSpecBuilder Builder
Definition: types_generated.h:729
const tt::target::AllGatherTensor * strategy_as_AllGatherTensor() const
Definition: types_generated.h:1052
const tt::target::DistributionStrategy * strategy() const
Definition: types_generated.h:1255
MemoryConfigDescBuilder Builder
Definition: types_generated.h:786
TensorDescBuilder Builder
Definition: types_generated.h:1343
bool Verify(::flatbuffers::Verifier &verifier) const
Definition: types_generated.h:737
uint64_t num_buffers() const
Definition: types_generated.h:1430
DistributionStrategyBuilder Builder
Definition: types_generated.h:1030
ChipPhysicalCoresBuilder Builder
Definition: types_generated.h:1888
const ::flatbuffers::Vector< int64_t > * shard_shape() const
Definition: types_generated.h:734
const tt::target::Dim2d * tile_shape() const
Definition: types_generated.h:1130
const ::flatbuffers::Vector< uint32_t > * chip_desc_indices() const
Definition: types_generated.h:2072
const tt::target::MemoryDesc * memory_desc() const
Definition: types_generated.h:1252
const ::flatbuffers::Vector< const tt::target::Dim2d * > * dram() const
Definition: types_generated.h:1899
const ::flatbuffers::String * target_triple() const
Definition: types_generated.h:1996
tt::target::MemorySpace memory_space() const
Definition: types_generated.h:1136
ChipDescBuilder Builder
Definition: types_generated.h:1648
const tt::target::ShardTensor * strategy_as_ShardTensor() const
Definition: types_generated.h:1046
CBRefBuilder Builder
Definition: types_generated.h:1569
tt::target::CPURole role() const
Definition: types_generated.h:1993
Definition: types_generated.h:1273
::flatbuffers::Offset< LayoutDesc > Finish()
Definition: types_generated.h:1296
void add_oob_val(tt::target::OOBVal oob_val)
Definition: types_generated.h:1280
void add_strategy(::flatbuffers::Offset< tt::target::DistributionStrategy > strategy)
Definition: types_generated.h:1289
void add_core_range_set(::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::Dim2dRange * >> core_range_set)
Definition: types_generated.h:1283
void add_memory_desc(::flatbuffers::Offset< tt::target::MemoryDesc > memory_desc)
Definition: types_generated.h:1286
void add_stride(::flatbuffers::Offset<::flatbuffers::Vector< int32_t >> stride)
Definition: types_generated.h:1277
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:1275
LayoutDesc Table
Definition: types_generated.h:1274
LayoutDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:1292
::flatbuffers::uoffset_t start_
Definition: types_generated.h:1276
Definition: types_generated.h:1319
static constexpr auto Create
Definition: types_generated.h:1321
LayoutDesc type
Definition: types_generated.h:1320
Definition: types_generated.h:812
::flatbuffers::uoffset_t start_
Definition: types_generated.h:815
MemoryConfigDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:825
MemoryConfigDesc Table
Definition: types_generated.h:813
::flatbuffers::Offset< MemoryConfigDesc > Finish()
Definition: types_generated.h:829
void add_buffer_type(tt::target::BufferType buffer_type)
Definition: types_generated.h:819
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:814
void add_tensor_memory_layout(tt::target::TensorMemoryLayout tensor_memory_layout)
Definition: types_generated.h:816
void add_shard_spec(::flatbuffers::Offset< tt::target::ShardSpec > shard_spec)
Definition: types_generated.h:822
Definition: types_generated.h:848
static constexpr auto Create
Definition: types_generated.h:850
MemoryConfigDesc type
Definition: types_generated.h:849
Definition: types_generated.h:1158
void add_tile_shape(const tt::target::Dim2d *tile_shape)
Definition: types_generated.h:1165
void add_shape(::flatbuffers::Offset<::flatbuffers::Vector< int32_t >> shape)
Definition: types_generated.h:1162
void add_memory_layout(tt::target::TensorMemoryLayout memory_layout)
Definition: types_generated.h:1174
void add_data_type(tt::target::DataType data_type)
Definition: types_generated.h:1168
void add_size(uint64_t size)
Definition: types_generated.h:1177
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:1160
MemoryDesc Table
Definition: types_generated.h:1159
void add_memory_space(tt::target::MemorySpace memory_space)
Definition: types_generated.h:1171
MemoryDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:1180
::flatbuffers::uoffset_t start_
Definition: types_generated.h:1161
::flatbuffers::Offset< MemoryDesc > Finish()
Definition: types_generated.h:1184
Definition: types_generated.h:1209
MemoryDesc type
Definition: types_generated.h:1210
static constexpr auto Create
Definition: types_generated.h:1211
Definition: types_generated.h:869
ReplicateTensor Table
Definition: types_generated.h:870
::flatbuffers::uoffset_t start_
Definition: types_generated.h:872
ReplicateTensorBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:876
void add_replication_factor(uint32_t replication_factor)
Definition: types_generated.h:873
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:871
::flatbuffers::Offset< ReplicateTensor > Finish()
Definition: types_generated.h:880
Definition: types_generated.h:895
static constexpr auto Create
Definition: types_generated.h:897
ReplicateTensor type
Definition: types_generated.h:896
Definition: types_generated.h:745
::flatbuffers::Offset< ShardSpec > Finish()
Definition: types_generated.h:756
void add_shard_shape(::flatbuffers::Offset<::flatbuffers::Vector< int64_t >> shard_shape)
Definition: types_generated.h:749
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:747
ShardSpecBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:752
ShardSpec Table
Definition: types_generated.h:746
::flatbuffers::uoffset_t start_
Definition: types_generated.h:748
Definition: types_generated.h:771
static constexpr auto Create
Definition: types_generated.h:773
ShardSpec type
Definition: types_generated.h:772
Definition: types_generated.h:963
::flatbuffers::uoffset_t start_
Definition: types_generated.h:966
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:965
ShardTensor2DBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:970
::flatbuffers::Offset< ShardTensor2D > Finish()
Definition: types_generated.h:974
void add_shard_mesh(const tt::target::Dim2d *shard_mesh)
Definition: types_generated.h:967
ShardTensor2D Table
Definition: types_generated.h:964
Definition: types_generated.h:989
static constexpr auto Create
Definition: types_generated.h:991
ShardTensor2D type
Definition: types_generated.h:990
Definition: types_generated.h:916
ShardTensorBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:923
void add_shard_dim(uint32_t shard_dim)
Definition: types_generated.h:920
::flatbuffers::Offset< ShardTensor > Finish()
Definition: types_generated.h:927
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:918
ShardTensor Table
Definition: types_generated.h:917
::flatbuffers::uoffset_t start_
Definition: types_generated.h:919
Definition: types_generated.h:942
static constexpr auto Create
Definition: types_generated.h:944
ShardTensor type
Definition: types_generated.h:943
Definition: types_generated.h:2104
::flatbuffers::Offset< SystemDesc > Finish()
Definition: types_generated.h:2130
SystemDesc Table
Definition: types_generated.h:2105
void add_chip_desc_indices(::flatbuffers::Offset<::flatbuffers::Vector< uint32_t >> chip_desc_indices)
Definition: types_generated.h:2114
void add_cpu_descs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset< tt::target::CPUDesc >>> cpu_descs)
Definition: types_generated.h:2108
void add_chip_channels(::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::ChipChannel * >> chip_channels)
Definition: types_generated.h:2123
SystemDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:2126
void add_chip_coords(::flatbuffers::Offset<::flatbuffers::Vector< const tt::target::ChipCoord * >> chip_coords)
Definition: types_generated.h:2120
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:2106
::flatbuffers::uoffset_t start_
Definition: types_generated.h:2107
void add_chip_capabilities(::flatbuffers::Offset<::flatbuffers::Vector< tt::target::ChipCapability >> chip_capabilities)
Definition: types_generated.h:2117
void add_chip_descs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset< tt::target::ChipDesc >>> chip_descs)
Definition: types_generated.h:2111
Definition: types_generated.h:2155
static constexpr auto Create
Definition: types_generated.h:2157
SystemDesc type
Definition: types_generated.h:2156
Definition: types_generated.h:1365
void add_shape(::flatbuffers::Offset<::flatbuffers::Vector< int32_t >> shape)
Definition: types_generated.h:1369
void add_layout(::flatbuffers::Offset< tt::target::LayoutDesc > layout)
Definition: types_generated.h:1372
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:1367
::flatbuffers::uoffset_t start_
Definition: types_generated.h:1368
TensorDesc Table
Definition: types_generated.h:1366
::flatbuffers::Offset< TensorDesc > Finish()
Definition: types_generated.h:1379
TensorDescBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:1375
Definition: types_generated.h:1396
static constexpr auto Create
Definition: types_generated.h:1398
TensorDesc type
Definition: types_generated.h:1397
Definition: types_generated.h:1522
void add_address(uint64_t address)
Definition: types_generated.h:1529
::flatbuffers::uoffset_t start_
Definition: types_generated.h:1525
void add_size(uint64_t size)
Definition: types_generated.h:1532
void add_desc(::flatbuffers::Offset< tt::target::TensorDesc > desc)
Definition: types_generated.h:1535
void add_global_id(uint32_t global_id)
Definition: types_generated.h:1526
::flatbuffers::Offset< TensorRef > Finish()
Definition: types_generated.h:1542
::flatbuffers::FlatBufferBuilder & fbb_
Definition: types_generated.h:1524
TensorRef Table
Definition: types_generated.h:1523
TensorRefBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
Definition: types_generated.h:1538
Definition: types_generated.h:1563
static constexpr auto Create
Definition: types_generated.h:1565
TensorRef type
Definition: types_generated.h:1564
VT_NUM_DRAM_CHANNELS
Definition: types_generated.h:1654
VT_STRATEGY_TYPE
Definition: types_generated.h:1033
VT_TILE_SHAPE
Definition: types_generated.h:1121
VT_DRAM_CHANNEL_SIZE
Definition: types_generated.h:1655
VT_PCIE_ADDRESS_ALIGN_BYTES
Definition: types_generated.h:1657
VT_TENSOR_REF
Definition: types_generated.h:1573
VT_SHAPE
Definition: types_generated.h:1120
VT_PORT
Definition: types_generated.h:1416
VT_GRID_SIZE
Definition: types_generated.h:1652
VT_SUPPORTED_TILE_SIZES
Definition: types_generated.h:1665
VT_ETH
Definition: types_generated.h:1893
VT_NOC_L1_ADDRESS_ALIGN_BYTES
Definition: types_generated.h:1656
VT_STRIDE
Definition: types_generated.h:1237
VT_DATA_TYPE
Definition: types_generated.h:1122
VT_NOC_DRAM_ADDRESS_ALIGN_BYTES
Definition: types_generated.h:1658
VT_DRAM_UNRESERVED_BASE
Definition: types_generated.h:1661
VT_ROLE
Definition: types_generated.h:1990
VT_CORE_RANGE_SET
Definition: types_generated.h:1239
VT_TENSOR_MEMORY_LAYOUT
Definition: types_generated.h:789
VT_PHYSICAL_CORES
Definition: types_generated.h:1663
VT_PAGE_SIZE
Definition: types_generated.h:1418
VT_BUFFER_TYPE
Definition: types_generated.h:790
VT_GLOBAL_ID
Definition: types_generated.h:1494
VT_OOB_VAL
Definition: types_generated.h:1238
VT_CPU_DESCS
Definition: types_generated.h:2059
VT_CHIP_DESC_INDICES
Definition: types_generated.h:2061
VT_MEMORY_SPACE
Definition: types_generated.h:1123
VT_CHIP_CAPABILITIES
Definition: types_generated.h:2062
VT_ADDRESS
Definition: types_generated.h:1495
VT_L1_UNRESERVED_BASE
Definition: types_generated.h:1659
VT_ARCH
Definition: types_generated.h:1651
VT_SUPPORTED_DATA_TYPES
Definition: types_generated.h:1664
VT_ERISC_L1_UNRESERVED_BASE
Definition: types_generated.h:1660
VT_WORKER
Definition: types_generated.h:1891
VT_DRAM
Definition: types_generated.h:1892
VT_SIZE
Definition: types_generated.h:1496
VT_CHIP_DESCS
Definition: types_generated.h:2060
VT_MEMORY_DESC
Definition: types_generated.h:1240
VT_CHIP_COORDS
Definition: types_generated.h:2063
VT_MEMORY_LAYOUT
Definition: types_generated.h:1124
VT_DRAM_UNRESERVED_END
Definition: types_generated.h:1662
VT_L1_SIZE
Definition: types_generated.h:1653