Jlm
LocalAliasAnalysis.cpp
Go to the documentation of this file.
1 /*
2  * Copyright 2025 HÃ¥vard Krogstie <krogstie.havard@gmail.com>
3  * See COPYING for terms of redistribution.
4  */
5 
7 
12 #include <jlm/llvm/ir/Trace.hpp>
13 #include <jlm/llvm/ir/types.hpp>
14 #include <jlm/rvsdg/gamma.hpp>
15 #include <jlm/rvsdg/lambda.hpp>
16 #include <jlm/rvsdg/MatchType.hpp>
17 #include <jlm/rvsdg/theta.hpp>
18 
19 #include <numeric>
20 #include <queue>
21 
22 namespace jlm::llvm::aa
23 {
24 
26 
27 LocalAliasAnalysis::~LocalAliasAnalysis() noexcept = default;
28 
29 std::string
31 {
32  return "LocalAA";
33 }
34 
46 {
48  std::optional<int64_t> Offset;
49 };
50 
55 {
61  std::unordered_map<const rvsdg::Output *, std::optional<int64_t>> AllTracedOutputs;
62 
68  std::unordered_map<const rvsdg::Output *, std::optional<int64_t>> TopOrigins;
69 };
70 
72 LocalAliasAnalysis::Query(const rvsdg::Output & p1, size_t s1, const rvsdg::Output & p2, size_t s2)
73 {
74  const auto & p1Norm = llvm::traceOutput(p1);
75  const auto & p2Norm = llvm::traceOutput(p2);
76 
77  // If the two pointers are the same value, they must alias
78  if (&p1Norm == &p2Norm)
79  return MustAlias;
80 
81  // Trace through GEP operations to get closer to the origins of the pointers
82  // Only trace through GEPs where the offset is known at compile time,
83  // to avoid giving up on MustAlias prematurely
84  const auto p1Traced = TracePointerOriginPrecise(p1Norm);
85  const auto p2Traced = TracePointerOriginPrecise(p2Norm);
86  JLM_ASSERT(p1Traced.Offset.has_value() && p2Traced.Offset.has_value());
87 
88  if (p1Traced.BasePointer == p2Traced.BasePointer)
89  {
90  // The pointers share base, but may have different offsets
91  // p1 = base + p1Offset
92  // p2 = base + p2Offset
93 
94  return QueryOffsets(p1Traced.Offset, s1, p2Traced.Offset, s2);
95  }
96 
97  // Keep tracing back to all sources
98  TraceCollection p1TraceCollection;
99  TraceCollection p2TraceCollection;
100 
101  // If tracing reaches too many possible outputs, it may give up
102  if (!TraceAllPointerOrigins(p1Traced, p1TraceCollection))
103  return MayAlias;
104 
105  if (!TraceAllPointerOrigins(p2Traced, p2TraceCollection))
106  return MayAlias;
107 
108  // Removes top origins that can not possibly be valid targets due to being too small.
109  // If p1 + s1 is outside the range of a top origin, then p1 can not target it
110  RemoveTopOriginsWithRemainingSizeBelow(p1TraceCollection, s1);
111  RemoveTopOriginsWithRemainingSizeBelow(p2TraceCollection, s2);
112 
113  // If each trace collection has only one top origin, check if they have the same base pointer
114  if (p1TraceCollection.TopOrigins.size() == 1 && p2TraceCollection.TopOrigins.size() == 1)
115  {
116  const auto & [p1Base, p1Offset] = *p1TraceCollection.TopOrigins.begin();
117  const auto & [p2Base, p2Offset] = *p2TraceCollection.TopOrigins.begin();
118  if (p1Base == p2Base)
119  return QueryOffsets(p1Offset, s1, p2Offset, s2);
120  }
121 
122  // From this point on we give up on MustAlias
123 
124  // Since we only have inbound GEPs, a pointer p = b + 12 must point at least 12 bytes into
125  // the memory region it points to
126  auto minimumP1OffsetFromStart = GetMinimumOffsetFromStart(p1TraceCollection);
127  auto minimumP2OffsetFromStart = GetMinimumOffsetFromStart(p2TraceCollection);
128 
129  // In case the trace collections contain unknown offsets, also try using the
130  // precise p1Traced and p2Traced, which always have a known offset
131  if (*p1Traced.Offset > 0)
132  minimumP1OffsetFromStart =
133  std::max(minimumP1OffsetFromStart, static_cast<size_t>(*p1Traced.Offset));
134  if (*p2Traced.Offset > 0)
135  minimumP2OffsetFromStart =
136  std::max(minimumP2OffsetFromStart, static_cast<size_t>(*p2Traced.Offset));
137 
138  // Since we have given up on MustAlias, we can remove some targets even if they are valid.
139  // Even if p1 might point to an 4-byte int foo,
140  // if p2 is an 8-byte operation, or p2 is at least 4 bytes into its target,
141  // we can safely discard that p1 might target foo.
142  RemoveTopOriginsSmallerThanSize(p2TraceCollection, minimumP1OffsetFromStart + s1);
143  RemoveTopOriginsSmallerThanSize(p1TraceCollection, minimumP2OffsetFromStart + s2);
144 
145  // If we know that p2 is at least 12 bytes into the memory region it targets,
146  // then any use of p1 where p1 + s1 is within the first 12 bytes of its memory region can be
147  // ignored.
148  RemoveTopOriginsWithinTheFirstNBytes(p1TraceCollection, s1, minimumP2OffsetFromStart);
149  RemoveTopOriginsWithinTheFirstNBytes(p2TraceCollection, s2, minimumP1OffsetFromStart);
150 
151  // Any direct overlap in the collections' top sets means there is a possibility of aliasing
152  if (DoTraceCollectionsOverlap(p1TraceCollection, s1, p2TraceCollection, s2))
153  return MayAlias;
154 
155  // Even if there is no direct overlap in the trace collections, the pointers may still alias
156  // Take for example the top sets { ALLOCA[a]+40, ALLOCA[b] } and { ALLOCA[c], o4+20 }
157  // o4 is some output that can not be traced further, but it is also not original.
158  // It is possible for o4 to be a pointer to ALLOCA[a]+20, in which case there is aliasing.
159 
160  // We already know that there is no direct overlap in the top origin sets,
161  // so if both trace collections only contain original pointers, there is NoAlias.
162  const bool p1AllTopsOriginal = HasOnlyOriginalTopOrigins(p1TraceCollection);
163  const bool p2AllTopsOriginal = HasOnlyOriginalTopOrigins(p2TraceCollection);
164 
165  if (p1AllTopsOriginal && p2AllTopsOriginal)
166  return NoAlias;
167 
168  // If one of the pointers has a top origin set containing only fully traceable ALLOCAs,
169  // it is not possible for the other pointer to target any of them,
170  // as they would already be explicitly included in its top origin set.
171  const bool p1OnlyTraceable = HasOnlyFullyTraceableTopOrigins(p1TraceCollection);
172  const bool p2OnlyTraceable = HasOnlyFullyTraceableTopOrigins(p2TraceCollection);
173 
174  if (p1OnlyTraceable || p2OnlyTraceable)
175  return NoAlias;
176 
177  return MayAlias;
178 }
179 
189 static std::optional<int64_t>
191  const rvsdg::SimpleNode & gepNode,
192  size_t inputIndex,
193  const rvsdg::Type & type)
194 {
195  // If we have no more input index values, we are not offsetting into the type
196  if (inputIndex >= gepNode.ninputs())
197  return 0;
198 
199  // GEP input 0 is the pointer being offset
200  // GEP input 1 is the number of whole types
201  // Intra-type offsets start at input 2 and beyond
202  JLM_ASSERT(inputIndex >= 2);
203 
204  auto & gepInput = *gepNode.input(inputIndex)->origin();
205  auto indexingValue = tryGetConstantSignedInteger(gepInput);
206 
207  // Any unknown indexing value means the GEP offset is unknown overall
208  if (!indexingValue.has_value())
209  return std::nullopt;
210 
211  if (auto array = dynamic_cast<const ArrayType *>(&type))
212  {
213  const auto & elementType = array->GetElementType();
214  int64_t offset = *indexingValue * GetTypeAllocSize(*elementType);
215 
216  // Get the offset into the element type as well, if any
217  const auto subOffset = CalculateIntraTypeGepOffset(gepNode, inputIndex + 1, *elementType);
218  if (subOffset.has_value())
219  return offset + *subOffset;
220 
221  return std::nullopt;
222  }
223  if (auto strct = dynamic_cast<const StructType *>(&type))
224  {
225  if (*indexingValue < 0 || static_cast<size_t>(*indexingValue) >= strct->numElements())
226  throw std::logic_error("Struct type has fewer fields than requested by GEP");
227 
228  const auto & fieldType = strct->getElementType(*indexingValue);
229  int64_t offset = strct->GetFieldOffset(*indexingValue);
230 
231  const auto subOffset = CalculateIntraTypeGepOffset(gepNode, inputIndex + 1, *fieldType);
232  if (subOffset.has_value())
233  return offset + *subOffset;
234 
235  return std::nullopt;
236  }
237 
238  JLM_UNREACHABLE("Unknown GEP type");
239 }
240 
241 std::optional<int64_t>
243 {
244  const auto gep = util::assertedCast<const GetElementPtrOperation>(&gepNode.GetOperation());
245 
246  // The pointee type. Gets updated by the loop below if the GEP has multiple levels of offsets
247  const auto & pointeeType = gep->GetPointeeType();
248 
249  const auto & wholeTypeIndexingOrigin = *gepNode.input(1)->origin();
250  const auto wholeTypeIndexing = tryGetConstantSignedInteger(wholeTypeIndexingOrigin);
251 
252  if (!wholeTypeIndexing.has_value())
253  return std::nullopt;
254 
255  int64_t offset = *wholeTypeIndexing * GetTypeAllocSize(pointeeType);
256 
257  // In addition to offsetting by whole types, a GEP can also offset within a type
258  const auto subOffset = CalculateIntraTypeGepOffset(gepNode, 2, pointeeType);
259  if (!subOffset.has_value())
260  return std::nullopt;
261 
262  return offset + *subOffset;
263 }
264 
267 {
268  // The original pointer p is always equal to base + byte offset
269  const rvsdg::Output * base = &p;
270  int64_t offset = 0;
271 
272  while (true)
273  {
274  // Use normalization function to get past all trivially invariant operations
275  base = &llvm::traceOutput(*base);
276 
277  if (const auto [node, gep] =
278  rvsdg::TryGetSimpleNodeAndOptionalOp<GetElementPtrOperation>(*base);
279  gep)
280  {
281  auto calculatedOffset = CalculateGepOffset(*node);
282 
283  // Only trace through GEPs with statically known offsets
284  if (!calculatedOffset.has_value())
285  break;
286 
287  base = node->input(0)->origin();
288  offset += *calculatedOffset;
289  }
290 
291  // We were not able to trace further
292  break;
293  }
294 
295  return TracedPointerOrigin{ base, offset };
296 }
297 
300  std::optional<int64_t> offset1,
301  size_t s1,
302  std::optional<int64_t> offset2,
303  size_t s2)
304 {
305  // If either offset is unknown, return MayAlias
306  if (!offset1.has_value() || !offset2.has_value())
307  return MayAlias;
308 
309  auto difference = *offset2 - *offset1;
310  if (difference == 0)
311  return MustAlias;
312 
313  // p2 starts at or after p1+s1
314  if (difference >= 0 && static_cast<size_t>(difference) >= s1)
315  return NoAlias;
316 
317  // p1 starts at or after p2+s2
318  if (difference <= 0 && static_cast<size_t>(-difference) >= s2)
319  return NoAlias;
320 
321  // We have a partial alias
322  return MayAlias;
323 }
324 
325 bool
327 {
328  if (traceCollection.AllTracedOutputs.size() >= MaxTraceCollectionSize)
329  return false;
330 
331  // Normalize the pointer first, to avoid tracing trivial temporary outputs
333 
334  auto it = traceCollection.AllTracedOutputs.find(p.BasePointer);
335  if (it != traceCollection.AllTracedOutputs.end())
336  {
337  // If the base pointer has already been traced with an unknown offset, we have nothing to add
338  if (!it->second.has_value())
339  return true;
340 
341  // The offset used for the base pointer the last time it was traced
342  const auto prevOffset = *it->second;
343 
344  // If we are visiting the same base pointer again with the same offset, we have nothing to add
345  if (p.Offset.has_value() && *p.Offset == prevOffset)
346  return true;
347 
348  // We have different offsets to last time, collapse to unknown offset
349  p.Offset = std::nullopt;
350  }
351 
352  traceCollection.AllTracedOutputs[p.BasePointer] = p.Offset;
353 
354  // If it is a GEP, we can trace through it, but possibly lose precise offset information
355  if (const auto [node, gep] =
356  rvsdg::TryGetSimpleNodeAndOptionalOp<GetElementPtrOperation>(*p.BasePointer);
357  gep)
358  {
359  // Update the base pointer and offset to represent the other side of the GEP
360  p.BasePointer = node->input(0)->origin();
361 
362  // If we have precisely tracked the offset so far, try updating it with the GEPs offset
363  if (p.Offset.has_value())
364  {
365  const auto gepOffset = CalculateGepOffset(*node);
366  if (gepOffset.has_value())
367  p.Offset = *p.Offset + *gepOffset;
368  else
369  p.Offset = std::nullopt;
370  }
371 
372  return TraceAllPointerOrigins(p, traceCollection);
373  }
374 
375  // If the node is a \ref SelectOperation, trace through both possible inputs
376  if (const auto [node, select] =
377  rvsdg::TryGetSimpleNodeAndOptionalOp<SelectOperation>(*p.BasePointer);
378  select)
379  {
380  auto leftTrace = p;
381  leftTrace.BasePointer = node->input(1)->origin();
382  auto rightTrace = p;
383  rightTrace.BasePointer = node->input(2)->origin();
384 
385  return TraceAllPointerOrigins(leftTrace, traceCollection)
386  && TraceAllPointerOrigins(rightTrace, traceCollection);
387  }
388 
389  // If we reach undef nodes, do not include them in the TopOrigins
390  if (rvsdg::IsOwnerNodeOperation<UndefValueOperation>(*p.BasePointer))
391  {
392  return true;
393  }
394 
395  // Trace into gamma nodes
396  if (auto gamma = rvsdg::TryGetOwnerNode<rvsdg::GammaNode>(*p.BasePointer))
397  {
398  auto exitVar = gamma->MapOutputExitVar(*p.BasePointer);
399  for (auto result : exitVar.branchResult)
400  {
401  TracedPointerOrigin inside = { result->origin(), p.Offset };
402 
403  // If tracing gives up, we give up
404  if (!TraceAllPointerOrigins(inside, traceCollection))
405  return false;
406  }
407 
408  return true;
409  }
410 
411  // Trace into theta nodes
412  if (auto theta = rvsdg::TryGetOwnerNode<rvsdg::ThetaNode>(*p.BasePointer))
413  {
414  auto loopVar = theta->MapOutputLoopVar(*p.BasePointer);
415 
416  // Invariant loop variables should already have been handled by normalization
418 
419  TracedPointerOrigin inside = { loopVar.post->origin(), p.Offset };
420  return TraceAllPointerOrigins(inside, traceCollection);
421  }
422 
423  // We could not trace further, add p as a TopOrigin
424  traceCollection.TopOrigins[p.BasePointer] = p.Offset;
425  return true;
426 }
427 
428 bool
430 {
431  // Each GraphImport represents a unique object
432  if (dynamic_cast<const LlvmGraphImport *>(&pointer))
433  return true;
434 
435  if (rvsdg::TryGetOwnerNode<rvsdg::DeltaNode>(pointer))
436  return true;
437 
438  if (rvsdg::TryGetOwnerNode<rvsdg::LambdaNode>(pointer))
439  return true;
440 
441  // Is pointer the output of one of the nodes
442  if (const auto node = rvsdg::TryGetOwnerNode<rvsdg::SimpleNode>(pointer))
443  {
444  if (is<AllocaOperation>(node->GetOperation()))
445  return true;
446 
447  if (is<MallocOperation>(node->GetOperation()))
448  return true;
449  }
450 
451  return false;
452 }
453 
454 bool
456 {
457  for (auto [output, offset] : traces.TopOrigins)
458  {
459  if (!IsOriginalOrigin(*output))
460  return false;
461  }
462  return true;
463 }
464 
465 std::optional<size_t>
467 {
468  if (auto delta = rvsdg::TryGetOwnerNode<rvsdg::DeltaNode>(pointer))
469  return GetTypeAllocSize(*delta->GetOperation().Type());
470  if (auto import = dynamic_cast<const LlvmGraphImport *>(&pointer))
471  {
472  auto size = GetTypeAllocSize(*import->ValueType());
473  // Workaround for imported incomplete types appearing to have size 0 in the LLVM IR
474  if (size == 0)
475  return std::nullopt;
476 
477  return size;
478  }
479  if (const auto [node, allocaOp] = rvsdg::TryGetSimpleNodeAndOptionalOp<AllocaOperation>(pointer);
480  allocaOp)
481  {
482  const auto elementCount = tryGetConstantSignedInteger(*node->input(0)->origin());
483  if (elementCount.has_value())
484  return *elementCount * GetTypeAllocSize(*allocaOp->ValueType());
485  }
486  if (const auto [node, mallocOp] = rvsdg::TryGetSimpleNodeAndOptionalOp<MallocOperation>(pointer);
487  mallocOp)
488  {
489  const auto mallocSize =
491  if (mallocSize.has_value())
492  return *mallocSize;
493  }
494 
495  return std::nullopt;
496 }
497 
498 std::optional<size_t>
500 {
501  const auto totalSize = GetOriginalOriginSize(*trace.BasePointer);
502  if (!totalSize.has_value())
503  return std::nullopt;
504 
505  if (!trace.Offset.has_value())
506  return *totalSize;
507 
508  // Avoid wrap-around by truncating remaining size to min 0
509  if (*trace.Offset > 0 && static_cast<size_t>(*trace.Offset) > *totalSize)
510  return 0;
511 
512  return *totalSize - *trace.Offset;
513 }
514 
515 void
517 {
518  auto it = traces.TopOrigins.begin();
519  while (it != traces.TopOrigins.end())
520  {
521  const auto remainingSize = GetRemainingSize({ it->first, it->second });
522  if (remainingSize.has_value())
523  {
524  // This top origin leaves too little room, and can be fully removed
525  if (*remainingSize < s)
526  {
527  it = traces.TopOrigins.erase(it);
528  continue;
529  }
530 
531  // If a top origin is exactly large enough for s, any unknown offset must be 0
532  if (*remainingSize == s && !it->second.has_value())
533  it->second = 0;
534  }
535  it++;
536  }
537 }
538 
539 size_t
541 {
542  std::optional<size_t> minimumOffset;
543  for (auto [output, offset] : traces.TopOrigins)
544  {
545  // If one of the possible targets has an unknown offset, just use the access size
546  if (!offset.has_value())
547  return 0;
548 
549  if (*offset < 0)
550  return 0;
551 
552  if (minimumOffset.has_value())
553  minimumOffset = std::min(*minimumOffset, static_cast<size_t>(*offset));
554  else
555  minimumOffset = offset;
556  }
557 
558  if (minimumOffset.has_value())
559  return *minimumOffset;
560 
561  // We only get here if the top origins is empty, in which case the return value
562  // does not matter, as the query will return NoAlias anyway.
563  return 0;
564 }
565 
566 void
568 {
569  auto it = traces.TopOrigins.begin();
570  while (it != traces.TopOrigins.end())
571  {
572  auto originSize = GetOriginalOriginSize(*it->first);
573  if (originSize.has_value() && *originSize < s)
574  it = traces.TopOrigins.erase(it);
575  else
576  it++;
577  }
578 }
579 
580 void
582  TraceCollection & traces,
583  size_t s,
584  size_t N)
585 {
586  auto it = traces.TopOrigins.begin();
587  while (it != traces.TopOrigins.end())
588  {
589  const auto offset = it->second;
590 
591  // If the pointer is original, it is also pointing to the beginning of the memory region.
592  // The offset thus tells us exactly which bytes within the memory region we touch.
593  if (IsOriginalOrigin(*it->first) && offset.has_value() && *offset + s <= N)
594  it = traces.TopOrigins.erase(it);
595  else
596  it++;
597  }
598 }
599 
600 bool
602  TraceCollection & tc1,
603  size_t s1,
604  TraceCollection & tc2,
605  size_t s2)
606 {
607  for (auto [p1Origin, p1Offset] : tc1.TopOrigins)
608  {
609  auto p2Find = tc2.TopOrigins.find(p1Origin);
610  if (p2Find == tc2.TopOrigins.end())
611  continue;
612 
613  auto p2Offset = p2Find->second;
614  if (QueryOffsets(p1Offset, s1, p2Offset, s2) != NoAlias)
615  return true;
616  }
617 
618  return false;
619 }
620 
621 bool
623 {
624  // The only original origins that can be fully traced for escaping are ALLOCAs
625  if (!rvsdg::IsOwnerNodeOperation<AllocaOperation>(pointer))
626  return false;
627 
628  // Check if the result for this ALLOCA is already memoized
629  auto it = IsFullyTraceable_.find(&pointer);
630  if (it != IsFullyTraceable_.end())
631  return it->second;
632 
633  // Use a queue to find all users of the ALLOCA's address
634  std::queue<const rvsdg::Output *> qu;
635  std::unordered_set<const rvsdg::Output *> added;
636 
637  const auto Enqueue = [&](const rvsdg::Output & p)
638  {
639  // Only enqueue new outputs
640  auto [_, inserted] = added.insert(&p);
641  if (inserted)
642  qu.push(&p);
643  };
644 
645  Enqueue(pointer);
646  while (!qu.empty())
647  {
648  auto & p = *qu.front();
649  qu.pop();
650 
651  // Handle all inputs that are users of p
652  for (auto & user : p.Users())
653  {
654  if (auto gamma = rvsdg::TryGetOwnerNode<rvsdg::GammaNode>(user))
655  {
656  auto input = gamma->MapInput(user);
657 
658  // A pointer must always be an EntryVar, as the MatchVar has a ControlType
659  auto entry = std::get_if<rvsdg::GammaNode::EntryVar>(&input);
660  JLM_ASSERT(entry);
661 
662  for (auto output : entry->branchArgument)
663  Enqueue(*output);
664 
665  continue;
666  }
667  if (auto gamma = rvsdg::TryGetRegionParentNode<rvsdg::GammaNode>(user))
668  {
669  // user is a gamma result, find the corresponding gamma output
670  auto exitVar = gamma->MapBranchResultExitVar(user);
671  Enqueue(*exitVar.output);
672 
673  continue;
674  }
675 
676  if (auto theta = rvsdg::TryGetOwnerNode<rvsdg::ThetaNode>(user))
677  {
678  auto loopVar = theta->MapInputLoopVar(user);
679 
680  // The loop always runs at least once, so map it to the inside
681  Enqueue(*loopVar.pre);
682 
683  continue;
684  }
685  if (auto theta = rvsdg::TryGetRegionParentNode<rvsdg::ThetaNode>(user))
686  {
687  // user is a theta result, find the corresponding loop variable
688  auto loopVar = theta->MapPostLoopVar(user);
689  Enqueue(*loopVar.pre);
690  Enqueue(*loopVar.output);
691 
692  continue;
693  }
694 
695  if (auto node = rvsdg::TryGetOwnerNode<rvsdg::SimpleNode>(user))
696  {
697  bool do_continue = MatchTypeWithDefault(
698  node->GetOperation(),
699  [&](const IOBarrierOperation &)
700  {
701  // The pointer input must be the node's first input
702  JLM_ASSERT(user.index() == 0);
703  Enqueue(*node->output(0));
704  return true;
705  },
706  [&](const GetElementPtrOperation &)
707  {
708  // The pointer input must be the node's first input
709  JLM_ASSERT(user.index() == 0);
710  Enqueue(*node->output(0));
711  return true;
712  },
713  [&](const SelectOperation &)
714  {
715  // Select operations are fine, if the output is still fully traceable
716  Enqueue(*node->output(0));
717  return true;
718  },
719  [&](const LoadOperation &)
720  {
721  // Loads are always fine
722  return true;
723  },
724  [&](const StoreOperation &)
725  {
726  // Stores are only fine if the pointer itself is not being stored somewhere
727  if (&user == &StoreOperation::AddressInput(*node))
728  return true;
729  else
730  return false;
731  },
732  []()
733  {
734  return false;
735  });
736  if (do_continue)
737  continue;
738  }
739 
740  // We were unable to handle this user, so the original pointer escapes tracing
741  IsFullyTraceable_[&pointer] = false;
742  return false;
743  }
744  }
745 
746  // The entire queue was processed without reaching a single untraceable user of the pointer
747  IsFullyTraceable_[&pointer] = true;
748  return true;
749 }
750 
751 bool
753 {
754  for (auto [topOrigin, _] : traces.TopOrigins)
755  {
756  if (!IsOriginalOriginFullyTraceable(*topOrigin))
757  return false;
758  }
759 
760  return true;
761 }
762 
763 }
static rvsdg::Input & sizeInput(const rvsdg::Node &node)
Definition: operators.hpp:2436
StructType class.
Definition: types.hpp:184
static void RemoveTopOriginsWithinTheFirstNBytes(TraceCollection &traces, size_t s, size_t N)
static bool IsOriginalOrigin(const rvsdg::Output &pointer)
~LocalAliasAnalysis() noexcept override
bool HasOnlyFullyTraceableTopOrigins(TraceCollection &traces)
AliasQueryResponse Query(const rvsdg::Output &p1, size_t s1, const rvsdg::Output &p2, size_t s2) override
std::unordered_map< const rvsdg::Output *, bool > IsFullyTraceable_
static TracedPointerOrigin TracePointerOriginPrecise(const rvsdg::Output &p)
static bool DoTraceCollectionsOverlap(TraceCollection &tc1, size_t s1, TraceCollection &tc2, size_t s2)
static void RemoveTopOriginsSmallerThanSize(TraceCollection &traces, size_t s)
static std::optional< int64_t > CalculateGepOffset(const rvsdg::SimpleNode &gepNode)
static bool HasOnlyOriginalTopOrigins(TraceCollection &traces)
static size_t GetMinimumOffsetFromStart(TraceCollection &traces)
static constexpr size_t MaxTraceCollectionSize
bool IsOriginalOriginFullyTraceable(const rvsdg::Output &pointer)
static void RemoveTopOriginsWithRemainingSizeBelow(TraceCollection &traces, size_t s)
static AliasQueryResponse QueryOffsets(std::optional< int64_t > offset1, size_t s1, std::optional< int64_t > offset2, size_t s2)
static bool TraceAllPointerOrigins(TracedPointerOrigin p, TraceCollection &traceCollection)
static std::optional< size_t > GetOriginalOriginSize(const rvsdg::Output &pointer)
static std::optional< size_t > GetRemainingSize(TracedPointerOrigin trace)
Output * origin() const noexcept
Definition: node.hpp:58
size_t ninputs() const noexcept
Definition: node.hpp:609
const SimpleOperation & GetOperation() const noexcept override
Definition: simple-node.cpp:48
NodeInput * input(size_t index) const noexcept
Definition: simple-node.hpp:82
#define JLM_ASSERT(x)
Definition: common.hpp:16
#define JLM_UNREACHABLE(msg)
Definition: common.hpp:43
static std::optional< int64_t > CalculateIntraTypeGepOffset(const rvsdg::SimpleNode &gepNode, size_t inputIndex, const rvsdg::Type &type)
size_t GetTypeAllocSize(const rvsdg::Type &type)
Definition: types.cpp:473
rvsdg::Output & traceOutput(rvsdg::Output &output)
Definition: Trace.cpp:39
static std::string ToString(const std::vector< MemoryNodeId > &memoryNodeIds)
std::optional< int64_t > tryGetConstantSignedInteger(const rvsdg::Output &output)
Definition: Trace.cpp:46
void MatchTypeWithDefault(T &obj, const Fns &... fns)
Pattern match over subclass type of given object with default handler.
static bool ThetaLoopVarIsInvariant(const ThetaNode::LoopVar &loopVar) noexcept
Definition: theta.hpp:227
static std::string type(const Node *n)
Definition: view.cpp:255
std::unordered_map< const rvsdg::Output *, std::optional< int64_t > > TopOrigins
std::unordered_map< const rvsdg::Output *, std::optional< int64_t > > AllTracedOutputs