Lines Matching defs:memref
56 // Returns the load op count for 'memref'.
57 unsigned Node::getLoadOpCount(Value memref) const {
60 if (memref == cast<AffineReadOpInterface>(loadOp).getMemRef())
66 // Returns the store op count for 'memref'.
67 unsigned Node::getStoreOpCount(Value memref) const {
70 if (memref == cast<AffineWriteOpInterface>(storeOp).getMemRef())
76 // Returns all store ops in 'storeOps' which access 'memref'.
77 void Node::getStoreOpsForMemref(Value memref,
80 if (memref == cast<AffineWriteOpInterface>(storeOp).getMemRef())
85 // Returns all load ops in 'loadOps' which access 'memref'.
86 void Node::getLoadOpsForMemref(Value memref,
89 if (memref == cast<AffineReadOpInterface>(loadOp).getMemRef())
103 auto memref = cast<AffineWriteOpInterface>(storeOp).getMemRef();
104 if (loadMemrefs.count(memref) > 0)
105 loadAndStoreMemrefSet->insert(memref);
113 // Map from a memref to the set of ids of the nodes that have ops accessing
114 // the memref.
131 auto memref = cast<AffineReadOpInterface>(opInst).getMemRef();
132 memrefAccesses[memref].insert(node.id);
136 auto memref = cast<AffineWriteOpInterface>(opInst).getMemRef();
137 memrefAccesses[memref].insert(node.id);
145 auto memref = cast<AffineReadOpInterface>(op).getMemRef();
146 memrefAccesses[memref].insert(node.id);
152 auto memref = cast<AffineWriteOpInterface>(op).getMemRef();
153 memrefAccesses[memref].insert(node.id);
216 // Walk memref access lists and add graph edges between dependent nodes.
279 // Returns true if node 'id' writes to any memref which escapes (or is an
284 auto memref = cast<AffineWriteOpInterface>(storeOpInst).getMemRef();
285 auto *op = memref.getDefiningOp();
286 // Return true if 'memref' is a block argument.
289 // Return true if any use of 'memref' does not deference it in an affine
291 for (auto *user : memref.getUsers())
386 // Returns the input edge count for node 'id' and 'memref' from src nodes
387 // which access 'memref' with a store operation.
389 Value memref) {
393 if (inEdge.value == memref) {
395 // Only count in edges from 'srcNode' if 'srcNode' accesses 'memref'
396 if (srcNode->getStoreOpCount(memref) > 0)
402 // Returns the output edge count for node 'id' and 'memref' (if non-null),
404 unsigned MemRefDependenceGraph::getOutEdgeCount(unsigned id, Value memref) {
408 if (!memref || outEdge.value == memref)
417 // By definition of edge, if the edge value is a non-memref value,
503 // private memref.
511 // Add edge from 'inEdge.id' to 'dstId' if it's not a private memref.
531 // replaced by a private memref). These edges could come from nodes
583 // memref dependence.
591 // memref dependence.
598 // Calls 'callback' for each edge in 'edges' which carries a memref
603 // Skip if 'edge' is not a memref dependence edge.
939 return cast<MemRefType>(memref.getType()).getRank();
945 auto memRefType = cast<MemRefType>(memref.getType());
950 assert(rank == cst.getNumDimVars() && "inconsistent memref region");
953 // memref dimension with static size added to guard against potential
966 // Find a constant upper bound on the extent of this memref region along each
981 // memref's dim size if the latter has a constant size along this dim.
1006 auto memRefType = cast<MemRefType>(memref.getType());
1009 assert(rank == cst.getNumDimVars() && "inconsistent memref region");
1023 assert(memref == other.memref);
1027 /// Computes the memory region accessed by this memref with the region
1030 // For example, the memref region for this load operation at loopDepth = 1 will
1039 // region: {memref = %A, write = false, {%i <= m0 <= %i + 7} }
1042 // TODO: extend this to any other memref dereferencing ops
1051 memref = access.memref;
1066 // A 0-d memref has a 0-d region.
1148 // symbolic variables - so that the ones corresponding to the memref
1149 // dimensions are the dimensional variables for the memref region.
1153 // this memref region is symbolic.
1176 // Add upper/lower bounds for each memref dimension with static size
1178 // TODO: Support dynamic memref dimensions.
1180 auto memRefType = cast<MemRefType>(memref.getType());
1216 auto memRefType = cast<MemRefType>(memref.getType());
1224 // Indices for the original memref being DMAed from/to.
1241 /// Returns the size of memref data in bytes if it's statically shaped,
1242 /// std::nullopt otherwise. If the element of the memref has vector type, takes
1295 // Check for overflow: d_i >= memref dim size.
1300 << "memref out of upper bound access along dimension #" << (r + 1);
1311 << "memref out of lower bound access along dimension #" << (r + 1);
1430 if (srcAccess.memref != dstAccess.memref)
1821 // Constructs MemRefAccess populating it with the memref, its indices and
1825 memref = loadOp.getMemRef();
1833 memref = storeOp.getMemRef();
1839 return cast<MemRefType>(memref.getType()).getRank();
1859 /// time) when considering the memref, the affine maps and their respective
1865 if (memref != rhs.memref)
1922 // Compute the memref region symbolic in any IVs enclosing this block.
1930 auto [it, inserted] = regions.try_emplace(region->memref);