xref: /netbsd-src/external/gpl3/gcc/dist/gcc/analyzer/diagnostic-manager.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* Classes for saving, deduplicating, and emitting analyzer diagnostics.
2    Copyright (C) 2019-2022 Free Software Foundation, Inc.
3    Contributed by David Malcolm <dmalcolm@redhat.com>.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15 General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "pretty-print.h"
26 #include "gcc-rich-location.h"
27 #include "gimple-pretty-print.h"
28 #include "function.h"
29 #include "diagnostic-core.h"
30 #include "diagnostic-event-id.h"
31 #include "diagnostic-path.h"
32 #include "alloc-pool.h"
33 #include "fibonacci_heap.h"
34 #include "shortest-paths.h"
35 #include "sbitmap.h"
36 #include "bitmap.h"
37 #include "tristate.h"
38 #include "selftest.h"
39 #include "ordered-hash-map.h"
40 #include "json.h"
41 #include "analyzer/analyzer.h"
42 #include "analyzer/analyzer-logging.h"
43 #include "analyzer/sm.h"
44 #include "analyzer/pending-diagnostic.h"
45 #include "analyzer/diagnostic-manager.h"
46 #include "analyzer/call-string.h"
47 #include "analyzer/program-point.h"
48 #include "analyzer/store.h"
49 #include "analyzer/region-model.h"
50 #include "analyzer/constraint-manager.h"
51 #include "cfg.h"
52 #include "basic-block.h"
53 #include "gimple.h"
54 #include "gimple-iterator.h"
55 #include "cgraph.h"
56 #include "digraph.h"
57 #include "analyzer/supergraph.h"
58 #include "analyzer/program-state.h"
59 #include "analyzer/exploded-graph.h"
60 #include "analyzer/trimmed-graph.h"
61 #include "analyzer/feasible-graph.h"
62 #include "analyzer/checker-path.h"
63 #include "analyzer/reachability.h"
64 
65 #if ENABLE_ANALYZER
66 
67 namespace ana {
68 
69 class feasible_worklist;
70 
71 /* State for finding the shortest feasible exploded_path for a
72    saved_diagnostic.
73    This is shared between all diagnostics, so that we avoid repeating work.  */
74 
75 class epath_finder
76 {
77 public:
epath_finder(const exploded_graph & eg)78   epath_finder (const exploded_graph &eg)
79   : m_eg (eg),
80     m_sep (NULL)
81   {
82     /* This is shared by all diagnostics, but only needed if
83        !flag_analyzer_feasibility.  */
84     if (!flag_analyzer_feasibility)
85       m_sep = new shortest_exploded_paths (eg, eg.get_origin (),
86 					   SPS_FROM_GIVEN_ORIGIN);
87   }
88 
~epath_finder()89   ~epath_finder () { delete m_sep; }
90 
get_logger() const91   logger *get_logger () const { return m_eg.get_logger (); }
92 
93   exploded_path *get_best_epath (const exploded_node *target_enode,
94 				 const char *desc, unsigned diag_idx,
95 				 feasibility_problem **out_problem);
96 
97 private:
98   DISABLE_COPY_AND_ASSIGN(epath_finder);
99 
100   exploded_path *explore_feasible_paths (const exploded_node *target_enode,
101 					 const char *desc, unsigned diag_idx);
102   bool process_worklist_item (feasible_worklist *worklist,
103 			      const trimmed_graph &tg,
104 			      feasible_graph *fg,
105 			      const exploded_node *target_enode,
106 			      unsigned diag_idx,
107 			      exploded_path **out_best_path) const;
108   void dump_trimmed_graph (const exploded_node *target_enode,
109 			   const char *desc, unsigned diag_idx,
110 			   const trimmed_graph &tg,
111 			   const shortest_paths<eg_traits, exploded_path> &sep);
112   void dump_feasible_graph (const exploded_node *target_enode,
113 			    const char *desc, unsigned diag_idx,
114 			    const feasible_graph &fg);
115   void dump_feasible_path (const exploded_node *target_enode,
116 			   unsigned diag_idx,
117 			   const feasible_graph &fg,
118 			   const feasible_node &fnode) const;
119 
120   const exploded_graph &m_eg;
121   shortest_exploded_paths *m_sep;
122 };
123 
124 /* class epath_finder.  */
125 
126 /* Get the "best" exploded_path for reaching ENODE from the origin,
127    returning ownership of it to the caller.
128 
129    Ideally we want to report the shortest feasible path.
130    Return NULL if we could not find a feasible path
131    (when flag_analyzer_feasibility is true).
132 
133    If flag_analyzer_feasibility is false, then simply return the
134    shortest path.
135 
136    Use DESC and DIAG_IDX when logging.
137 
138    Write any feasibility_problem to *OUT_PROBLEM.  */
139 
140 exploded_path *
get_best_epath(const exploded_node * enode,const char * desc,unsigned diag_idx,feasibility_problem ** out_problem)141 epath_finder::get_best_epath (const exploded_node *enode,
142 			      const char *desc, unsigned diag_idx,
143 			      feasibility_problem **out_problem)
144 {
145   logger *logger = get_logger ();
146   LOG_SCOPE (logger);
147 
148   unsigned snode_idx = enode->get_supernode ()->m_index;
149   if (logger)
150     logger->log ("considering %qs at EN: %i, SN: %i (sd: %i)",
151 		 desc, enode->m_index, snode_idx, diag_idx);
152 
153   /* State-merging means that not every path in the egraph corresponds
154      to a feasible one w.r.t. states.
155 
156      We want to find the shortest feasible path from the origin to ENODE
157      in the egraph.  */
158 
159   if (flag_analyzer_feasibility)
160     {
161       /* Attempt to find the shortest feasible path using feasible_graph.  */
162       if (logger)
163 	logger->log ("trying to find shortest feasible path");
164       if (exploded_path *epath = explore_feasible_paths (enode, desc, diag_idx))
165 	{
166 	  if (logger)
167 	    logger->log ("accepting %qs at EN: %i, SN: %i (sd: %i)"
168 			 " with feasible path (length: %i)",
169 			 desc, enode->m_index, snode_idx, diag_idx,
170 			 epath->length ());
171 	  return epath;
172 	}
173       else
174 	{
175 	  if (logger)
176 	    logger->log ("rejecting %qs at EN: %i, SN: %i (sd: %i)"
177 			 " due to not finding feasible path",
178 			 desc, enode->m_index, snode_idx, diag_idx);
179 	  return NULL;
180 	}
181     }
182   else
183     {
184       /* As a crude approximation to shortest feasible path, simply find
185 	 the shortest path, and note whether it is feasible.
186 	 There could be longer feasible paths within the egraph, so this
187 	 approach would lead to diagnostics being falsely rejected
188 	 (PR analyzer/96374).  */
189       if (logger)
190 	logger->log ("trying to find shortest path ignoring feasibility");
191       gcc_assert (m_sep);
192       exploded_path *epath
193 	= new exploded_path (m_sep->get_shortest_path (enode));
194       if (epath->feasible_p (logger, out_problem, m_eg.get_engine (), &m_eg))
195 	{
196 	  if (logger)
197 	    logger->log ("accepting %qs at EN: %i, SN: %i (sn: %i)"
198 			 " with feasible path (length: %i)",
199 			 desc, enode->m_index, snode_idx, diag_idx,
200 			 epath->length ());
201 	}
202       else
203 	{
204 	  if (logger)
205 	    logger->log ("accepting %qs at EN: %i, SN: %i (sn: %i) (length: %i)"
206 			 " despite infeasible path (due to %qs)",
207 			 desc, enode->m_index, snode_idx, diag_idx,
208 			 epath->length (),
209 			 "-fno-analyzer-feasibility");
210 	}
211       return epath;
212     }
213 }
214 
215 /* A class for managing the worklist of feasible_nodes in
216    epath_finder::explore_feasible_paths, prioritizing them
217    so that shorter paths appear earlier in the queue.  */
218 
219 class feasible_worklist
220 {
221 public:
feasible_worklist(const shortest_paths<eg_traits,exploded_path> & sep)222   feasible_worklist (const shortest_paths<eg_traits, exploded_path> &sep)
223   : m_queue (key_t (*this, NULL)),
224     m_sep (sep)
225   {
226   }
227 
take_next()228   feasible_node *take_next () { return m_queue.extract_min (); }
229 
add_node(feasible_node * fnode)230   void add_node (feasible_node *fnode)
231   {
232     m_queue.insert (key_t (*this, fnode), fnode);
233   }
234 
235 private:
236   struct key_t
237   {
key_tana::feasible_worklist::key_t238     key_t (const feasible_worklist &w, feasible_node *fnode)
239     : m_worklist (w), m_fnode (fnode)
240     {}
241 
operator <ana::feasible_worklist::key_t242     bool operator< (const key_t &other) const
243     {
244       return cmp (*this, other) < 0;
245     }
246 
operator ==ana::feasible_worklist::key_t247     bool operator== (const key_t &other) const
248     {
249       return cmp (*this, other) == 0;
250     }
251 
operator >ana::feasible_worklist::key_t252     bool operator> (const key_t &other) const
253     {
254       return !(*this == other || *this < other);
255     }
256 
257   private:
cmpana::feasible_worklist::key_t258     static int cmp (const key_t &ka, const key_t &kb)
259     {
260       /* Choose the node for which if the remaining path were feasible,
261 	 it would be the shortest path (summing the length of the
262 	 known-feasible path so far with that of the remaining
263 	 possibly-feasible path).  */
264       int ca = ka.m_worklist.get_estimated_cost (ka.m_fnode);
265       int cb = kb.m_worklist.get_estimated_cost (kb.m_fnode);
266       return ca - cb;
267     }
268 
269     const feasible_worklist &m_worklist;
270     feasible_node *m_fnode;
271   };
272 
273   /* Get the estimated length of a path involving FNODE from
274      the origin to the target enode.
275      Sum the length of the known-feasible path so far with
276      that of the remaining possibly-feasible path.  */
277 
get_estimated_cost(const feasible_node * fnode) const278   int get_estimated_cost (const feasible_node *fnode) const
279   {
280     unsigned length_so_far = fnode->get_path_length ();
281     int shortest_remaining_path
282       = m_sep.get_shortest_distance (fnode->get_inner_node ());
283 
284     gcc_assert (shortest_remaining_path >= 0);
285     /* This should be true since we're only exploring nodes within
286        the trimmed graph (and we anticipate it being much smaller
287        than this, and thus not overflowing the sum).  */
288     gcc_assert (shortest_remaining_path < INT_MAX);
289 
290     return length_so_far + shortest_remaining_path;
291   }
292 
293   /* Priority queue, backed by a fibonacci_heap.  */
294   typedef fibonacci_heap<key_t, feasible_node> queue_t;
295   queue_t m_queue;
296   const shortest_paths<eg_traits, exploded_path> &m_sep;
297 };
298 
299 /* When we're building the exploded graph we want to simplify
300    overly-complicated symbolic values down to "UNKNOWN" to try to avoid
301    state explosions and unbounded chains of exploration.
302 
303    However, when we're building the feasibility graph for a diagnostic
304    (actually a tree), we don't want UNKNOWN values, as conditions on them
305    are also unknown: we don't want to have a contradiction such as a path
306    where (VAL != 0) and then (VAL == 0) along the same path.
307 
308    Hence this is an RAII class for temporarily disabling complexity-checking
309    in the region_model_manager, for use within
310    epath_finder::explore_feasible_paths.
311 
312    We also disable the creation of unknown_svalue instances during feasibility
313    checking, instead creating unique svalues, to avoid paradoxes in paths.  */
314 
315 class auto_checking_feasibility
316 {
317 public:
auto_checking_feasibility(region_model_manager * mgr)318   auto_checking_feasibility (region_model_manager *mgr) : m_mgr (mgr)
319   {
320     m_mgr->begin_checking_feasibility ();
321   }
~auto_checking_feasibility()322   ~auto_checking_feasibility ()
323   {
324     m_mgr->end_checking_feasibility ();
325   }
326 private:
327   region_model_manager *m_mgr;
328 };
329 
330 /* Attempt to find the shortest feasible path from the origin to
331    TARGET_ENODE by iteratively building a feasible_graph, in which
332    every path to a feasible_node is feasible by construction.
333 
334    We effectively explore the tree of feasible paths in order of shortest
335    path until we either find a feasible path to TARGET_ENODE, or hit
336    a limit and give up.
337 
338    Preliminaries:
339    - Find the shortest path from each node to the TARGET_ENODE (without
340    checking feasibility), so that we can prioritize our worklist.
341    - Construct a trimmed_graph: the subset of nodes/edges that
342    are on a path that eventually reaches TARGET_ENODE.  We will only need
343    to consider these when considering the shortest feasible path.
344 
345    Build a feasible_graph, in which every path to a feasible_node
346    is feasible by construction.
347    We use a worklist to flatten the exploration into an iteration.
348    Starting at the origin, find feasible out-edges within the trimmed graph.
349    At each stage, choose the node for which if the remaining path were feasible,
350    it would be the shortest path (summing the length of the known-feasible path
351    so far with that of the remaining possibly-feasible path).
352    This way, the first feasible path we find to TARGET_ENODE is the shortest.
353    We start by trying the shortest possible path, but if that fails,
354    we explore progressively longer paths, eventually trying iterations through
355    loops.  The exploration is captured in the feasible_graph, which can be
356    dumped as a .dot file to visualize the exploration.  The indices of the
357    feasible_nodes show the order in which they were created.
358 
359    This is something of a brute-force approach, but the trimmed_graph
360    hopefully keeps the complexity manageable.
361 
362    Terminate with failure when the number of infeasible edges exceeds
363    a threshold (--param=analyzer-max-infeasible-edges=).
364    This is guaranteed to eventually lead to terminatation, as
365    we can't keep creating feasible nodes without eventually
366    either reaching an infeasible edge, or reaching the
367    TARGET_ENODE.  Specifically, there can't be a cycle of
368    feasible edges that doesn't reach the target_enode without
369    an out-edge that either fails feasibility or gets closer
370    to the TARGET_ENODE: on each iteration we are either:
371    - effectively getting closer to the TARGET_ENODE (which can't
372      continue forever without reaching the target), or
373    - getting monotonically closer to the termination threshold.  */
374 
375 exploded_path *
explore_feasible_paths(const exploded_node * target_enode,const char * desc,unsigned diag_idx)376 epath_finder::explore_feasible_paths (const exploded_node *target_enode,
377 				      const char *desc, unsigned diag_idx)
378 {
379   logger *logger = get_logger ();
380   LOG_SCOPE (logger);
381 
382   region_model_manager *mgr = m_eg.get_engine ()->get_model_manager ();
383 
384   /* Determine the shortest path to TARGET_ENODE from each node in
385      the exploded graph.  */
386   shortest_paths<eg_traits, exploded_path> sep
387     (m_eg, target_enode, SPS_TO_GIVEN_TARGET);
388 
389   /* Construct a trimmed_graph: the subset of nodes/edges that
390      are on a path that eventually reaches TARGET_ENODE.
391      We only need to consider these when considering the shortest
392      feasible path.  */
393   trimmed_graph tg (m_eg, target_enode);
394 
395   if (flag_dump_analyzer_feasibility)
396     dump_trimmed_graph (target_enode, desc, diag_idx, tg, sep);
397 
398   feasible_graph fg;
399   feasible_worklist worklist (sep);
400 
401   /* Populate the worklist with the origin node.  */
402   {
403     feasibility_state init_state (mgr, m_eg.get_supergraph ());
404     feasible_node *origin = fg.add_node (m_eg.get_origin (), init_state, 0);
405     worklist.add_node (origin);
406   }
407 
408   /* Iteratively explore the tree of feasible paths in order of shortest
409      path until we either find a feasible path to TARGET_ENODE, or hit
410      a limit.  */
411 
412   /* Set this if we find a feasible path to TARGET_ENODE.  */
413   exploded_path *best_path = NULL;
414 
415   {
416     auto_checking_feasibility sentinel (mgr);
417 
418     while (process_worklist_item (&worklist, tg, &fg, target_enode, diag_idx,
419 				  &best_path))
420       {
421 	/* Empty; the work is done within process_worklist_item.  */
422       }
423   }
424 
425   if (logger)
426     {
427       logger->log ("tg for sd: %i:", diag_idx);
428       logger->inc_indent ();
429       tg.log_stats (logger);
430       logger->dec_indent ();
431 
432       logger->log ("fg for sd: %i:", diag_idx);
433       logger->inc_indent ();
434       fg.log_stats (logger);
435       logger->dec_indent ();
436     }
437 
438   /* Dump the feasible_graph.  */
439   if (flag_dump_analyzer_feasibility)
440     dump_feasible_graph (target_enode, desc, diag_idx, fg);
441 
442   return best_path;
443 }
444 
445 /* Process the next item in WORKLIST, potentially adding new items
446    based on feasible out-edges, and extending FG accordingly.
447    Use TG to ignore out-edges that don't lead to TARGET_ENODE.
448    Return true if the worklist processing should continue.
449    Return false if the processing of the worklist should stop
450    (either due to reaching TARGET_ENODE, or hitting a limit).
451    Write to *OUT_BEST_PATH if stopping due to finding a feasible path
452    to TARGET_ENODE.  */
453 
454 bool
process_worklist_item(feasible_worklist * worklist,const trimmed_graph & tg,feasible_graph * fg,const exploded_node * target_enode,unsigned diag_idx,exploded_path ** out_best_path) const455 epath_finder::process_worklist_item (feasible_worklist *worklist,
456 				     const trimmed_graph &tg,
457 				     feasible_graph *fg,
458 				     const exploded_node *target_enode,
459 				     unsigned diag_idx,
460 				     exploded_path **out_best_path) const
461 {
462   logger *logger = get_logger ();
463 
464   feasible_node *fnode = worklist->take_next ();
465   if (!fnode)
466     {
467       if (logger)
468 	logger->log ("drained worklist for sd: %i"
469 		     " without finding feasible path",
470 		     diag_idx);
471       return false;
472     }
473 
474   log_scope s (logger, "fg worklist item",
475 	       "considering FN: %i (EN: %i) for sd: %i",
476 	       fnode->get_index (), fnode->get_inner_node ()->m_index,
477 	       diag_idx);
478 
479   /* Iterate through all out-edges from this item.  */
480   unsigned i;
481   exploded_edge *succ_eedge;
482   FOR_EACH_VEC_ELT (fnode->get_inner_node ()->m_succs, i, succ_eedge)
483     {
484       log_scope s (logger, "edge", "considering edge: EN:%i -> EN:%i",
485 		   succ_eedge->m_src->m_index,
486 		   succ_eedge->m_dest->m_index);
487       /* Reject edges that aren't in the trimmed graph.  */
488       if (!tg.contains_p (succ_eedge))
489 	{
490 	  if (logger)
491 	    logger->log ("rejecting: not in trimmed graph");
492 	  continue;
493 	}
494 
495       feasibility_state succ_state (fnode->get_state ());
496       rejected_constraint *rc = NULL;
497       if (succ_state.maybe_update_for_edge (logger, succ_eedge, &rc))
498 	{
499 	  gcc_assert (rc == NULL);
500 	  feasible_node *succ_fnode
501 	    = fg->add_node (succ_eedge->m_dest,
502 			    succ_state,
503 			    fnode->get_path_length () + 1);
504 	  if (logger)
505 	    logger->log ("accepting as FN: %i", succ_fnode->get_index ());
506 	  fg->add_edge (new feasible_edge (fnode, succ_fnode, succ_eedge));
507 
508 	  /* Have we reached TARGET_ENODE?  */
509 	  if (succ_fnode->get_inner_node () == target_enode)
510 	    {
511 	      if (logger)
512 		logger->log ("success: got feasible path to EN: %i (sd: %i)"
513 			     " (length: %i)",
514 			     target_enode->m_index, diag_idx,
515 			     succ_fnode->get_path_length ());
516 	      *out_best_path = fg->make_epath (succ_fnode);
517 	      if (flag_dump_analyzer_feasibility)
518 		dump_feasible_path (target_enode, diag_idx, *fg, *succ_fnode);
519 
520 	      /* Success: stop the worklist iteration.  */
521 	      return false;
522 	    }
523 	  else
524 	    worklist->add_node (succ_fnode);
525 	}
526       else
527 	{
528 	  if (logger)
529 	    logger->log ("infeasible");
530 	  gcc_assert (rc);
531 	  fg->add_feasibility_problem (fnode,
532 				       succ_eedge,
533 				       rc);
534 
535 	  /* Give up if there have been too many infeasible edges.  */
536 	  if (fg->get_num_infeasible ()
537 	      > (unsigned)param_analyzer_max_infeasible_edges)
538 	    {
539 	      if (logger)
540 		logger->log ("too many infeasible edges (%i); giving up",
541 			     fg->get_num_infeasible ());
542 	      return false;
543 	    }
544 	}
545     }
546 
547   /* Continue the worklist iteration.  */
548   return true;
549 }
550 
551 /* Helper class for epath_finder::dump_trimmed_graph
552    to dump extra per-node information.
553    Use SEP to add the length of the shortest path from each
554    node to the target node to each node's dump.  */
555 
556 class dump_eg_with_shortest_path : public eg_traits::dump_args_t
557 {
558 public:
dump_eg_with_shortest_path(const exploded_graph & eg,const shortest_paths<eg_traits,exploded_path> & sep)559   dump_eg_with_shortest_path
560     (const exploded_graph &eg,
561      const shortest_paths<eg_traits, exploded_path> &sep)
562   : dump_args_t (eg),
563     m_sep (sep)
564   {
565   }
566 
dump_extra_info(const exploded_node * enode,pretty_printer * pp) const567   void dump_extra_info (const exploded_node *enode,
568 			pretty_printer *pp) const FINAL OVERRIDE
569   {
570     pp_printf (pp, "sp: %i", m_sep.get_shortest_path (enode).length ());
571     pp_newline (pp);
572   }
573 
574 private:
575   const shortest_paths<eg_traits, exploded_path> &m_sep;
576 };
577 
578 /* Dump TG to "BASE_NAME.DESC.DIAG_IDX.to-enN.tg.dot",
579    annotating each node with the length of the shortest path
580    from that node to TARGET_ENODE (using SEP).  */
581 
582 void
583 epath_finder::
dump_trimmed_graph(const exploded_node * target_enode,const char * desc,unsigned diag_idx,const trimmed_graph & tg,const shortest_paths<eg_traits,exploded_path> & sep)584 dump_trimmed_graph (const exploded_node *target_enode,
585 		    const char *desc, unsigned diag_idx,
586 		    const trimmed_graph &tg,
587 		    const shortest_paths<eg_traits, exploded_path> &sep)
588 {
589   auto_timevar tv (TV_ANALYZER_DUMP);
590   dump_eg_with_shortest_path inner_args (m_eg, sep);
591   trimmed_graph::dump_args_t args (inner_args);
592   pretty_printer pp;
593   pp_printf (&pp, "%s.%s.%i.to-en%i.tg.dot",
594 	     dump_base_name, desc, diag_idx, target_enode->m_index);
595   char *filename = xstrdup (pp_formatted_text (&pp));
596   tg.dump_dot (filename, NULL, args);
597   free (filename);
598 }
599 
600 /* Dump FG to "BASE_NAME.DESC.DIAG_IDX.to-enN.fg.dot".  */
601 
602 void
dump_feasible_graph(const exploded_node * target_enode,const char * desc,unsigned diag_idx,const feasible_graph & fg)603 epath_finder::dump_feasible_graph (const exploded_node *target_enode,
604 				   const char *desc, unsigned diag_idx,
605 				   const feasible_graph &fg)
606 {
607   auto_timevar tv (TV_ANALYZER_DUMP);
608   exploded_graph::dump_args_t inner_args (m_eg);
609   feasible_graph::dump_args_t args (inner_args);
610   pretty_printer pp;
611   pp_printf (&pp, "%s.%s.%i.to-en%i.fg.dot",
612 	     dump_base_name, desc, diag_idx, target_enode->m_index);
613   char *filename = xstrdup (pp_formatted_text (&pp));
614   fg.dump_dot (filename, NULL, args);
615   free (filename);
616 }
617 
618 /* Dump the path to FNODE to "BASE_NAME.DIAG_IDX.to-enN.fpath.txt".  */
619 
620 void
dump_feasible_path(const exploded_node * target_enode,unsigned diag_idx,const feasible_graph & fg,const feasible_node & fnode) const621 epath_finder::dump_feasible_path (const exploded_node *target_enode,
622 				  unsigned diag_idx,
623 				  const feasible_graph &fg,
624 				  const feasible_node &fnode) const
625 {
626   auto_timevar tv (TV_ANALYZER_DUMP);
627   pretty_printer pp;
628   pp_printf (&pp, "%s.%i.to-en%i.fpath.txt",
629 	     dump_base_name, diag_idx, target_enode->m_index);
630   char *filename = xstrdup (pp_formatted_text (&pp));
631   fg.dump_feasible_path (fnode, filename);
632   free (filename);
633 }
634 
635 /* class saved_diagnostic.  */
636 
637 /* saved_diagnostic's ctor.
638    Take ownership of D and STMT_FINDER.  */
639 
saved_diagnostic(const state_machine * sm,const exploded_node * enode,const supernode * snode,const gimple * stmt,stmt_finder * stmt_finder,tree var,const svalue * sval,state_machine::state_t state,pending_diagnostic * d,unsigned idx)640 saved_diagnostic::saved_diagnostic (const state_machine *sm,
641 				    const exploded_node *enode,
642 				    const supernode *snode, const gimple *stmt,
643 				    stmt_finder *stmt_finder,
644 				    tree var,
645 				    const svalue *sval,
646 				    state_machine::state_t state,
647 				    pending_diagnostic *d,
648 				    unsigned idx)
649 : m_sm (sm), m_enode (enode), m_snode (snode), m_stmt (stmt),
650  /* stmt_finder could be on-stack; we want our own copy that can
651     outlive that.  */
652   m_stmt_finder (stmt_finder ? stmt_finder->clone () : NULL),
653   m_var (var), m_sval (sval), m_state (state),
654   m_d (d), m_trailing_eedge (NULL),
655   m_idx (idx),
656   m_best_epath (NULL), m_problem (NULL),
657   m_notes ()
658 {
659   gcc_assert (m_stmt || m_stmt_finder);
660 
661   /* We must have an enode in order to be able to look for paths
662      through the exploded_graph to this diagnostic.  */
663   gcc_assert (m_enode);
664 }
665 
666 /* saved_diagnostic's dtor.  */
667 
~saved_diagnostic()668 saved_diagnostic::~saved_diagnostic ()
669 {
670   delete m_stmt_finder;
671   delete m_d;
672   delete m_best_epath;
673   delete m_problem;
674 }
675 
676 bool
operator ==(const saved_diagnostic & other) const677 saved_diagnostic::operator== (const saved_diagnostic &other) const
678 {
679   if (m_notes.length () != other.m_notes.length ())
680     return false;
681   for (unsigned i = 0; i < m_notes.length (); i++)
682     if (!m_notes[i]->equal_p (*other.m_notes[i]))
683       return false;
684   return (m_sm == other.m_sm
685 	  /* We don't compare m_enode.  */
686 	  && m_snode == other.m_snode
687 	  && m_stmt == other.m_stmt
688 	  /* We don't compare m_stmt_finder.  */
689 	  && pending_diagnostic::same_tree_p (m_var, other.m_var)
690 	  && m_state == other.m_state
691 	  && m_d->equal_p (*other.m_d)
692 	  && m_trailing_eedge == other.m_trailing_eedge);
693 }
694 
695 /* Add PN to this diagnostic, taking ownership of it.  */
696 
697 void
add_note(pending_note * pn)698 saved_diagnostic::add_note (pending_note *pn)
699 {
700   gcc_assert (pn);
701   m_notes.safe_push (pn);
702 }
703 
704 /* Return a new json::object of the form
705    {"sm": optional str,
706     "enode": int,
707     "snode": int,
708     "sval": optional str,
709     "state": optional str,
710     "path_length": optional int,
711     "pending_diagnostic": str,
712     "idx": int}.  */
713 
714 json::object *
to_json() const715 saved_diagnostic::to_json () const
716 {
717   json::object *sd_obj = new json::object ();
718 
719   if (m_sm)
720     sd_obj->set ("sm", new json::string (m_sm->get_name ()));
721   sd_obj->set ("enode", new json::integer_number (m_enode->m_index));
722   sd_obj->set ("snode", new json::integer_number (m_snode->m_index));
723   if (m_sval)
724     sd_obj->set ("sval", m_sval->to_json ());
725   if (m_state)
726     sd_obj->set ("state", m_state->to_json ());
727   if (m_best_epath)
728     sd_obj->set ("path_length", new json::integer_number (get_epath_length ()));
729   sd_obj->set ("pending_diagnostic", new json::string (m_d->get_kind ()));
730   sd_obj->set ("idx", new json::integer_number (m_idx));
731 
732   /* We're not yet JSONifying the following fields:
733      const gimple *m_stmt;
734      stmt_finder *m_stmt_finder;
735      tree m_var;
736      exploded_edge *m_trailing_eedge;
737      enum status m_status;
738      feasibility_problem *m_problem;
739      auto_delete_vec <pending_note> m_notes;
740   */
741 
742   return sd_obj;
743 }
744 
745 /* Dump this to PP in a form suitable for use as an id in .dot output.  */
746 
747 void
dump_dot_id(pretty_printer * pp) const748 saved_diagnostic::dump_dot_id (pretty_printer *pp) const
749 {
750   pp_printf (pp, "sd_%i", m_idx);
751 }
752 
753 /* Dump this to PP in a form suitable for use as a node in .dot output.  */
754 
755 void
dump_as_dot_node(pretty_printer * pp) const756 saved_diagnostic::dump_as_dot_node (pretty_printer *pp) const
757 {
758   dump_dot_id (pp);
759   pp_printf (pp,
760 	     " [shape=none,margin=0,style=filled,fillcolor=\"red\",label=\"");
761   pp_write_text_to_stream (pp);
762 
763   /* Node label.  */
764   pp_printf (pp, "DIAGNOSTIC: %s (sd: %i)\n",
765 	     m_d->get_kind (), m_idx);
766   if (m_sm)
767     {
768       pp_printf (pp, "sm: %s", m_sm->get_name ());
769       if (m_state)
770 	{
771 	  pp_printf (pp, "; state: ");
772 	  m_state->dump_to_pp (pp);
773 	}
774       pp_newline (pp);
775     }
776   if (m_stmt)
777     {
778       pp_string (pp, "stmt: ");
779       pp_gimple_stmt_1 (pp, m_stmt, 0, (dump_flags_t)0);
780       pp_newline (pp);
781     }
782   if (m_var)
783     pp_printf (pp, "var: %qE\n", m_var);
784   if (m_sval)
785     {
786       pp_string (pp, "sval: ");
787       m_sval->dump_to_pp (pp, true);
788       pp_newline (pp);
789     }
790   if (m_best_epath)
791     pp_printf (pp, "path length: %i\n", get_epath_length ());
792 
793   pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/true);
794   pp_string (pp, "\"];\n\n");
795 
796   /* Show links to duplicates.  */
797   for (auto iter : m_duplicates)
798     {
799       dump_dot_id (pp);
800       pp_string (pp, " -> ");
801       iter->dump_dot_id (pp);
802       pp_string (pp, " [style=\"dotted\" arrowhead=\"none\"];");
803       pp_newline (pp);
804     }
805 }
806 
807 /* Use PF to find the best exploded_path for this saved_diagnostic,
808    and store it in m_best_epath.
809    If m_stmt is still NULL, use m_stmt_finder on the epath to populate
810    m_stmt.
811    Return true if a best path was found.  */
812 
813 bool
calc_best_epath(epath_finder * pf)814 saved_diagnostic::calc_best_epath (epath_finder *pf)
815 {
816   logger *logger = pf->get_logger ();
817   LOG_SCOPE (logger);
818   delete m_best_epath;
819   delete m_problem;
820   m_problem = NULL;
821 
822   m_best_epath = pf->get_best_epath (m_enode, m_d->get_kind (), m_idx,
823 				     &m_problem);
824 
825   /* Handle failure to find a feasible path.  */
826   if (m_best_epath == NULL)
827     return false;
828 
829   gcc_assert (m_best_epath);
830   if (m_stmt == NULL)
831     {
832       gcc_assert (m_stmt_finder);
833       m_stmt = m_stmt_finder->find_stmt (*m_best_epath);
834     }
835   gcc_assert (m_stmt);
836 
837   return true;
838 }
839 
840 unsigned
get_epath_length() const841 saved_diagnostic::get_epath_length () const
842 {
843   gcc_assert (m_best_epath);
844   return m_best_epath->length ();
845 }
846 
847 /* Record that OTHER (and its duplicates) are duplicates
848    of this saved_diagnostic.  */
849 
850 void
add_duplicate(saved_diagnostic * other)851 saved_diagnostic::add_duplicate (saved_diagnostic *other)
852 {
853   gcc_assert (other);
854   m_duplicates.reserve (m_duplicates.length ()
855 			+ other->m_duplicates.length ()
856 			+ 1);
857   m_duplicates.splice (other->m_duplicates);
858   other->m_duplicates.truncate (0);
859   m_duplicates.safe_push (other);
860 }
861 
862 /* Return true if this diagnostic supercedes OTHER, and that OTHER should
863    therefore not be emitted.  */
864 
865 bool
supercedes_p(const saved_diagnostic & other) const866 saved_diagnostic::supercedes_p (const saved_diagnostic &other) const
867 {
868   /* They should be at the same stmt.  */
869   if (m_stmt != other.m_stmt)
870     return false;
871   return m_d->supercedes_p (*other.m_d);
872 }
873 
874 /* Emit any pending notes owned by this diagnostic.  */
875 
876 void
emit_any_notes() const877 saved_diagnostic::emit_any_notes () const
878 {
879   for (auto pn : m_notes)
880     pn->emit ();
881 }
882 
883 /* State for building a checker_path from a particular exploded_path.
884    In particular, this precomputes reachability information: the set of
885    source enodes for which a path be found to the diagnostic enode.  */
886 
887 class path_builder
888 {
889 public:
path_builder(const exploded_graph & eg,const exploded_path & epath,const feasibility_problem * problem,const saved_diagnostic & sd)890   path_builder (const exploded_graph &eg,
891 		const exploded_path &epath,
892 		const feasibility_problem *problem,
893 		const saved_diagnostic &sd)
894   : m_eg (eg),
895     m_diag_enode (epath.get_final_enode ()),
896     m_sd (sd),
897     m_reachability (eg, m_diag_enode),
898     m_feasibility_problem (problem)
899   {}
900 
get_diag_node() const901   const exploded_node *get_diag_node () const { return m_diag_enode; }
902 
get_pending_diagnostic() const903   pending_diagnostic *get_pending_diagnostic () const
904   {
905     return m_sd.m_d;
906   }
907 
reachable_from_p(const exploded_node * src_enode) const908   bool reachable_from_p (const exploded_node *src_enode) const
909   {
910     return m_reachability.reachable_from_p (src_enode);
911   }
912 
get_ext_state() const913   const extrinsic_state &get_ext_state () const { return m_eg.get_ext_state (); }
914 
get_feasibility_problem() const915   const feasibility_problem *get_feasibility_problem () const
916   {
917     return m_feasibility_problem;
918   }
919 
get_sm() const920   const state_machine *get_sm () const { return m_sd.m_sm; }
921 
922 private:
923   typedef reachability<eg_traits> enode_reachability;
924 
925   const exploded_graph &m_eg;
926 
927   /* The enode where the diagnostic occurs.  */
928   const exploded_node *m_diag_enode;
929 
930   const saved_diagnostic &m_sd;
931 
932   /* Precompute all enodes from which the diagnostic is reachable.  */
933   enode_reachability m_reachability;
934 
935   const feasibility_problem *m_feasibility_problem;
936 };
937 
938 /* Determine the emission location for PD at STMT in FUN.  */
939 
940 static location_t
get_emission_location(const gimple * stmt,function * fun,const pending_diagnostic & pd)941 get_emission_location (const gimple *stmt, function *fun,
942 		       const pending_diagnostic &pd)
943 {
944   location_t loc = get_stmt_location (stmt, fun);
945 
946   /* Allow the pending_diagnostic to fix up the location.  */
947   loc = pd.fixup_location (loc);
948 
949   return loc;
950 }
951 
952 /* class diagnostic_manager.  */
953 
954 /* diagnostic_manager's ctor.  */
955 
diagnostic_manager(logger * logger,engine * eng,int verbosity)956 diagnostic_manager::diagnostic_manager (logger *logger, engine *eng,
957 					int verbosity)
958 : log_user (logger), m_eng (eng), m_verbosity (verbosity),
959   m_num_disabled_diagnostics (0)
960 {
961 }
962 
963 /* Queue pending_diagnostic D at ENODE for later emission.
964    Return true/false signifying if the diagnostic was actually added.
965    Take ownership of D (or delete it).  */
966 
967 bool
add_diagnostic(const state_machine * sm,exploded_node * enode,const supernode * snode,const gimple * stmt,stmt_finder * finder,tree var,const svalue * sval,state_machine::state_t state,pending_diagnostic * d)968 diagnostic_manager::add_diagnostic (const state_machine *sm,
969 				    exploded_node *enode,
970 				    const supernode *snode, const gimple *stmt,
971 				    stmt_finder *finder,
972 				    tree var,
973 				    const svalue *sval,
974 				    state_machine::state_t state,
975 				    pending_diagnostic *d)
976 {
977   LOG_FUNC (get_logger ());
978 
979   /* We must have an enode in order to be able to look for paths
980      through the exploded_graph to the diagnostic.  */
981   gcc_assert (enode);
982 
983   /* If this warning is ultimately going to be rejected by a -Wno-analyzer-*
984      flag, reject it now.
985      We can only do this for diagnostics where we already know the stmt,
986      and thus can determine the emission location.  */
987   if (stmt)
988     {
989       location_t loc = get_emission_location (stmt, snode->m_fun, *d);
990       int option = d->get_controlling_option ();
991       if (!warning_enabled_at (loc, option))
992 	{
993 	  if (get_logger ())
994 	    get_logger ()->log ("rejecting disabled warning %qs",
995 				d->get_kind ());
996 	  delete d;
997 	  m_num_disabled_diagnostics++;
998 	  return false;
999 	}
1000     }
1001 
1002   saved_diagnostic *sd
1003     = new saved_diagnostic (sm, enode, snode, stmt, finder, var, sval,
1004 			    state, d, m_saved_diagnostics.length ());
1005   m_saved_diagnostics.safe_push (sd);
1006   enode->add_diagnostic (sd);
1007   if (get_logger ())
1008     log ("adding saved diagnostic %i at SN %i to EN %i: %qs",
1009 	 sd->get_index (),
1010 	 snode->m_index, enode->m_index, d->get_kind ());
1011   return true;
1012 }
1013 
1014 /* Queue pending_diagnostic D at ENODE for later emission.
1015    Return true/false signifying if the diagnostic was actually added.
1016    Take ownership of D (or delete it).  */
1017 
1018 bool
add_diagnostic(exploded_node * enode,const supernode * snode,const gimple * stmt,stmt_finder * finder,pending_diagnostic * d)1019 diagnostic_manager::add_diagnostic (exploded_node *enode,
1020 				    const supernode *snode, const gimple *stmt,
1021 				    stmt_finder *finder,
1022 				    pending_diagnostic *d)
1023 {
1024   gcc_assert (enode);
1025   return add_diagnostic (NULL, enode, snode, stmt, finder, NULL_TREE,
1026 			 NULL, 0, d);
1027 }
1028 
1029 /* Add PN to the most recent saved_diagnostic.  */
1030 
1031 void
add_note(pending_note * pn)1032 diagnostic_manager::add_note (pending_note *pn)
1033 {
1034   LOG_FUNC (get_logger ());
1035   gcc_assert (pn);
1036 
1037   /* Get most recent saved_diagnostic.  */
1038   gcc_assert (m_saved_diagnostics.length () > 0);
1039   saved_diagnostic *sd = m_saved_diagnostics[m_saved_diagnostics.length () - 1];
1040   sd->add_note (pn);
1041 }
1042 
1043 /* Return a new json::object of the form
1044    {"diagnostics"  : [obj for saved_diagnostic]}.  */
1045 
1046 json::object *
to_json() const1047 diagnostic_manager::to_json () const
1048 {
1049   json::object *dm_obj = new json::object ();
1050 
1051   {
1052     json::array *sd_arr = new json::array ();
1053     int i;
1054     saved_diagnostic *sd;
1055     FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
1056       sd_arr->append (sd->to_json ());
1057     dm_obj->set ("diagnostics", sd_arr);
1058   }
1059 
1060   return dm_obj;
1061 }
1062 
1063 /* A class for identifying sets of duplicated pending_diagnostic.
1064 
1065    We want to find the simplest saved_diagnostic amongst those that share a
1066    dedupe_key.  */
1067 
1068 class dedupe_key
1069 {
1070 public:
dedupe_key(const saved_diagnostic & sd)1071   dedupe_key (const saved_diagnostic &sd)
1072   : m_sd (sd), m_stmt (sd.m_stmt)
1073   {
1074     gcc_assert (m_stmt);
1075   }
1076 
hash() const1077   hashval_t hash () const
1078   {
1079     inchash::hash hstate;
1080     hstate.add_ptr (m_stmt);
1081     // TODO: m_sd
1082     return hstate.end ();
1083   }
operator ==(const dedupe_key & other) const1084   bool operator== (const dedupe_key &other) const
1085   {
1086     return (m_sd == other.m_sd
1087 	    && m_stmt == other.m_stmt);
1088   }
1089 
get_location() const1090   location_t get_location () const
1091   {
1092     return m_stmt->location;
1093   }
1094 
1095   /* A qsort comparator for use by dedupe_winners::emit_best
1096      to sort them into location_t order.  */
1097 
1098   static int
comparator(const void * p1,const void * p2)1099   comparator (const void *p1, const void *p2)
1100   {
1101     const dedupe_key *pk1 = *(const dedupe_key * const *)p1;
1102     const dedupe_key *pk2 = *(const dedupe_key * const *)p2;
1103 
1104     location_t loc1 = pk1->get_location ();
1105     location_t loc2 = pk2->get_location ();
1106 
1107     if (int cmp = linemap_compare_locations (line_table, loc2, loc1))
1108       return cmp;
1109     if (int cmp = ((int)pk1->m_sd.get_epath_length ()
1110 		   - (int)pk2->m_sd.get_epath_length ()))
1111       return cmp;
1112     if (int cmp = strcmp (pk1->m_sd.m_d->get_kind (),
1113 			  pk2->m_sd.m_d->get_kind ()))
1114       return cmp;
1115     return 0;
1116   }
1117 
1118   const saved_diagnostic &m_sd;
1119   const gimple *m_stmt;
1120 };
1121 
1122 /* Traits for use by dedupe_winners.  */
1123 
1124 class dedupe_hash_map_traits
1125 {
1126 public:
1127   typedef const dedupe_key *key_type;
1128   typedef saved_diagnostic *value_type;
1129   typedef saved_diagnostic *compare_type;
1130 
hash(const key_type & v)1131   static inline hashval_t hash (const key_type &v)
1132   {
1133     return v->hash ();
1134   }
equal_keys(const key_type & k1,const key_type & k2)1135   static inline bool equal_keys (const key_type &k1, const key_type &k2)
1136   {
1137     return *k1 == *k2;
1138   }
1139   template <typename T>
remove(T &)1140   static inline void remove (T &)
1141   {
1142     // TODO
1143   }
1144   template <typename T>
mark_deleted(T & entry)1145   static inline void mark_deleted (T &entry)
1146   {
1147     entry.m_key = reinterpret_cast<key_type> (1);
1148   }
1149   template <typename T>
mark_empty(T & entry)1150   static inline void mark_empty (T &entry)
1151   {
1152     entry.m_key = NULL;
1153   }
1154   template <typename T>
is_deleted(const T & entry)1155   static inline bool is_deleted (const T &entry)
1156   {
1157     return entry.m_key == reinterpret_cast<key_type> (1);
1158   }
1159   template <typename T>
is_empty(const T & entry)1160   static inline bool is_empty (const T &entry)
1161   {
1162     return entry.m_key == NULL;
1163   }
1164   static const bool empty_zero_p = true;
1165 };
1166 
1167 /* A class for deduplicating diagnostics and finding (and emitting) the
1168    best saved_diagnostic within each partition.  */
1169 
1170 class dedupe_winners
1171 {
1172 public:
~dedupe_winners()1173   ~dedupe_winners ()
1174   {
1175     /* Delete all keys, but not the saved_diagnostics.  */
1176     for (map_t::iterator iter = m_map.begin ();
1177 	 iter != m_map.end ();
1178 	 ++iter)
1179       delete (*iter).first;
1180   }
1181 
1182   /* Determine an exploded_path for SD using PF and, if it's feasible,
1183      determine if SD is the best seen so far for its dedupe_key.
1184      Record the winning SD for each dedupe_key.  */
1185 
add(logger * logger,epath_finder * pf,saved_diagnostic * sd)1186   void add (logger *logger,
1187 	    epath_finder *pf,
1188 	    saved_diagnostic *sd)
1189   {
1190     /* Determine best epath for SD.  */
1191     if (!sd->calc_best_epath (pf))
1192       return;
1193 
1194     dedupe_key *key = new dedupe_key (*sd);
1195     if (saved_diagnostic **slot = m_map.get (key))
1196       {
1197 	if (logger)
1198 	  logger->log ("already have this dedupe_key");
1199 
1200 	saved_diagnostic *cur_best_sd = *slot;
1201 
1202 	if (sd->get_epath_length () < cur_best_sd->get_epath_length ())
1203 	  {
1204 	    /* We've got a shorter path for the key; replace
1205 	       the current candidate, marking it as a duplicate of SD.  */
1206 	    if (logger)
1207 	      logger->log ("length %i is better than existing length %i;"
1208 			   " taking over this dedupe_key",
1209 			   sd->get_epath_length (),
1210 			   cur_best_sd->get_epath_length ());
1211 	    sd->add_duplicate (cur_best_sd);
1212 	    *slot = sd;
1213 	  }
1214 	else
1215 	  /* We haven't beaten the current best candidate; add SD
1216 	     as a duplicate of it.  */
1217 	  {
1218 	    if (logger)
1219 	      logger->log ("length %i isn't better than existing length %i;"
1220 			   " dropping this candidate",
1221 			   sd->get_epath_length (),
1222 			   cur_best_sd->get_epath_length ());
1223 	    cur_best_sd->add_duplicate (sd);
1224 	  }
1225 	delete key;
1226       }
1227     else
1228       {
1229 	/* This is the first candidate for this key.  */
1230 	m_map.put (key, sd);
1231 	if (logger)
1232 	  logger->log ("first candidate for this dedupe_key");
1233       }
1234   }
1235 
1236   /* Handle interactions between the dedupe winners, so that some
1237      diagnostics can supercede others (of different kinds).
1238 
1239      We want use-after-free to supercede use-of-unitialized-value,
1240      so that if we have these at the same stmt, we don't emit
1241      a use-of-uninitialized, just the use-after-free.  */
1242 
handle_interactions(diagnostic_manager * dm)1243   void handle_interactions (diagnostic_manager *dm)
1244   {
1245     LOG_SCOPE (dm->get_logger ());
1246     auto_vec<const dedupe_key *> superceded;
1247     for (auto outer : m_map)
1248       {
1249 	const saved_diagnostic *outer_sd = outer.second;
1250 	for (auto inner : m_map)
1251 	  {
1252 	    const saved_diagnostic *inner_sd = inner.second;
1253 	    if (inner_sd->supercedes_p (*outer_sd))
1254 	      {
1255 		superceded.safe_push (outer.first);
1256 		if (dm->get_logger ())
1257 		  dm->log ("sd[%i] \"%s\" superceded by sd[%i] \"%s\"",
1258 			   outer_sd->get_index (), outer_sd->m_d->get_kind (),
1259 			   inner_sd->get_index (), inner_sd->m_d->get_kind ());
1260 		break;
1261 	      }
1262 	  }
1263       }
1264     for (auto iter : superceded)
1265       m_map.remove (iter);
1266   }
1267 
1268  /* Emit the simplest diagnostic within each set.  */
1269 
emit_best(diagnostic_manager * dm,const exploded_graph & eg)1270   void emit_best (diagnostic_manager *dm,
1271 		  const exploded_graph &eg)
1272   {
1273     LOG_SCOPE (dm->get_logger ());
1274 
1275     /* Get keys into a vec for sorting.  */
1276     auto_vec<const dedupe_key *> keys (m_map.elements ());
1277     for (map_t::iterator iter = m_map.begin ();
1278 	 iter != m_map.end ();
1279 	 ++iter)
1280       keys.quick_push ((*iter).first);
1281 
1282     dm->log ("# keys after de-duplication: %i", keys.length ());
1283 
1284     /* Sort into a good emission order.  */
1285     keys.qsort (dedupe_key::comparator);
1286 
1287     /* Emit the best saved_diagnostics for each key.  */
1288     int i;
1289     const dedupe_key *key;
1290     FOR_EACH_VEC_ELT (keys, i, key)
1291       {
1292 	saved_diagnostic **slot = m_map.get (key);
1293 	gcc_assert (*slot);
1294 	const saved_diagnostic *sd = *slot;
1295 	dm->emit_saved_diagnostic (eg, *sd);
1296       }
1297   }
1298 
1299 private:
1300   /* This maps from each dedupe_key to a current best saved_diagnostic.  */
1301 
1302   typedef hash_map<const dedupe_key *, saved_diagnostic *,
1303 		   dedupe_hash_map_traits> map_t;
1304   map_t m_map;
1305 };
1306 
1307 /* Emit all saved diagnostics.  */
1308 
1309 void
emit_saved_diagnostics(const exploded_graph & eg)1310 diagnostic_manager::emit_saved_diagnostics (const exploded_graph &eg)
1311 {
1312   LOG_SCOPE (get_logger ());
1313   auto_timevar tv (TV_ANALYZER_DIAGNOSTICS);
1314   log ("# saved diagnostics: %i", m_saved_diagnostics.length ());
1315   log ("# disabled diagnostics: %i", m_num_disabled_diagnostics);
1316   if (get_logger ())
1317     {
1318       unsigned i;
1319       saved_diagnostic *sd;
1320       FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
1321 	log ("[%i] sd: %qs at EN: %i, SN: %i",
1322 	     i, sd->m_d->get_kind (), sd->m_enode->m_index,
1323 	     sd->m_snode->m_index);
1324     }
1325 
1326   if (m_saved_diagnostics.length () == 0)
1327     return;
1328 
1329   /* Compute the shortest_paths once, sharing it between all diagnostics.  */
1330   epath_finder pf (eg);
1331 
1332   /* Iterate through all saved diagnostics, adding them to a dedupe_winners
1333      instance.  This partitions the saved diagnostics by dedupe_key,
1334      generating exploded_paths for them, and retaining the best one in each
1335      partition.  */
1336   dedupe_winners best_candidates;
1337 
1338   int i;
1339   saved_diagnostic *sd;
1340   FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
1341     best_candidates.add (get_logger (), &pf, sd);
1342 
1343   best_candidates.handle_interactions (this);
1344 
1345   /* For each dedupe-key, call emit_saved_diagnostic on the "best"
1346      saved_diagnostic.  */
1347   best_candidates.emit_best (this, eg);
1348 }
1349 
1350 /* Given a saved_diagnostic SD with m_best_epath through EG,
1351    create an checker_path of suitable events and use it to call
1352    SD's underlying pending_diagnostic "emit" vfunc to emit a diagnostic.  */
1353 
1354 void
emit_saved_diagnostic(const exploded_graph & eg,const saved_diagnostic & sd)1355 diagnostic_manager::emit_saved_diagnostic (const exploded_graph &eg,
1356 					   const saved_diagnostic &sd)
1357 {
1358   LOG_SCOPE (get_logger ());
1359   log ("sd: %qs at SN: %i", sd.m_d->get_kind (), sd.m_snode->m_index);
1360   log ("num dupes: %i", sd.get_num_dupes ());
1361 
1362   pretty_printer *pp = global_dc->printer->clone ();
1363 
1364   const exploded_path *epath = sd.get_best_epath ();
1365   gcc_assert (epath);
1366 
1367   /* Precompute all enodes from which the diagnostic is reachable.  */
1368   path_builder pb (eg, *epath, sd.get_feasibility_problem (), sd);
1369 
1370   /* This is the diagnostic_path subclass that will be built for
1371      the diagnostic.  */
1372   checker_path emission_path;
1373 
1374   /* Populate emission_path with a full description of EPATH.  */
1375   build_emission_path (pb, *epath, &emission_path);
1376 
1377   /* Now prune it to just cover the most pertinent events.  */
1378   prune_path (&emission_path, sd.m_sm, sd.m_sval, sd.m_state);
1379 
1380   /* Add a final event to the path, covering the diagnostic itself.
1381      We use the final enode from the epath, which might be different from
1382      the sd.m_enode, as the dedupe code doesn't care about enodes, just
1383      snodes.  */
1384   emission_path.add_final_event (sd.m_sm, epath->get_final_enode (), sd.m_stmt,
1385 				 sd.m_var, sd.m_state);
1386 
1387   /* The "final" event might not be final; if the saved_diagnostic has a
1388      trailing eedge stashed, add any events for it.  This is for use
1389      in handling longjmp, to show where a longjmp is rewinding to.  */
1390   if (sd.m_trailing_eedge)
1391     add_events_for_eedge (pb, *sd.m_trailing_eedge, &emission_path, NULL);
1392 
1393   emission_path.prepare_for_emission (sd.m_d);
1394 
1395   location_t loc
1396     = get_emission_location (sd.m_stmt, sd.m_snode->m_fun, *sd.m_d);
1397 
1398   /* Allow the pending_diagnostic to fix up the locations of events.  */
1399   emission_path.fixup_locations (sd.m_d);
1400 
1401   gcc_rich_location rich_loc (loc);
1402   rich_loc.set_path (&emission_path);
1403 
1404   auto_diagnostic_group d;
1405   auto_cfun sentinel (sd.m_snode->m_fun);
1406   if (sd.m_d->emit (&rich_loc))
1407     {
1408       sd.emit_any_notes ();
1409 
1410       unsigned num_dupes = sd.get_num_dupes ();
1411       if (flag_analyzer_show_duplicate_count && num_dupes > 0)
1412 	inform_n (loc, num_dupes,
1413 		  "%i duplicate", "%i duplicates",
1414 		  num_dupes);
1415       if (flag_dump_analyzer_exploded_paths)
1416 	{
1417 	  auto_timevar tv (TV_ANALYZER_DUMP);
1418 	  pretty_printer pp;
1419 	  pp_printf (&pp, "%s.%i.%s.epath.txt",
1420 		     dump_base_name, sd.get_index (), sd.m_d->get_kind ());
1421 	  char *filename = xstrdup (pp_formatted_text (&pp));
1422 	  epath->dump_to_file (filename, eg.get_ext_state ());
1423 	  inform (loc, "exploded path written to %qs", filename);
1424 	  free (filename);
1425 	}
1426     }
1427   delete pp;
1428 }
1429 
1430 /* Emit a "path" of events to EMISSION_PATH describing the exploded path
1431    EPATH within EG.  */
1432 
1433 void
build_emission_path(const path_builder & pb,const exploded_path & epath,checker_path * emission_path) const1434 diagnostic_manager::build_emission_path (const path_builder &pb,
1435 					 const exploded_path &epath,
1436 					 checker_path *emission_path) const
1437 {
1438   LOG_SCOPE (get_logger ());
1439 
1440   interesting_t interest;
1441   pb.get_pending_diagnostic ()->mark_interesting_stuff (&interest);
1442 
1443   /* Add region creation events for any globals of interest, at the
1444      beginning of the path.  */
1445   {
1446     for (auto reg : interest.m_region_creation)
1447       switch (reg->get_memory_space ())
1448 	{
1449 	default:
1450 	  continue;
1451 	case MEMSPACE_CODE:
1452 	case MEMSPACE_GLOBALS:
1453 	case MEMSPACE_READONLY_DATA:
1454 	  {
1455 	    const region *base_reg = reg->get_base_region ();
1456 	    if (tree decl = base_reg->maybe_get_decl ())
1457 	      if (DECL_P (decl)
1458 		  && DECL_SOURCE_LOCATION (decl) != UNKNOWN_LOCATION)
1459 		{
1460 		  emission_path->add_region_creation_event
1461 		    (reg,
1462 		     DECL_SOURCE_LOCATION (decl),
1463 		     NULL_TREE,
1464 		     0);
1465 		}
1466 	  }
1467 	}
1468   }
1469 
1470   /* Walk EPATH, adding events as appropriate.  */
1471   for (unsigned i = 0; i < epath.m_edges.length (); i++)
1472     {
1473       const exploded_edge *eedge = epath.m_edges[i];
1474       add_events_for_eedge (pb, *eedge, emission_path, &interest);
1475     }
1476 }
1477 
1478 /* Subclass of state_change_visitor that creates state_change_event
1479    instances.  */
1480 
1481 class state_change_event_creator : public state_change_visitor
1482 {
1483 public:
state_change_event_creator(const path_builder & pb,const exploded_edge & eedge,checker_path * emission_path)1484   state_change_event_creator (const path_builder &pb,
1485 			      const exploded_edge &eedge,
1486 			      checker_path *emission_path)
1487     : m_pb (pb),
1488       m_eedge (eedge),
1489       m_emission_path (emission_path)
1490   {}
1491 
on_global_state_change(const state_machine & sm,state_machine::state_t src_sm_val,state_machine::state_t dst_sm_val)1492   bool on_global_state_change (const state_machine &sm,
1493 			       state_machine::state_t src_sm_val,
1494 			       state_machine::state_t dst_sm_val)
1495     FINAL OVERRIDE
1496   {
1497     if (&sm != m_pb.get_sm ())
1498       return false;
1499     const exploded_node *src_node = m_eedge.m_src;
1500     const program_point &src_point = src_node->get_point ();
1501     const int src_stack_depth = src_point.get_stack_depth ();
1502     const exploded_node *dst_node = m_eedge.m_dest;
1503     const gimple *stmt = src_point.get_stmt ();
1504     const supernode *supernode = src_point.get_supernode ();
1505     const program_state &dst_state = dst_node->get_state ();
1506 
1507     int stack_depth = src_stack_depth;
1508 
1509     m_emission_path->add_event (new state_change_event (supernode,
1510 							stmt,
1511 							stack_depth,
1512 							sm,
1513 							NULL,
1514 							src_sm_val,
1515 							dst_sm_val,
1516 							NULL,
1517 							dst_state));
1518     return false;
1519   }
1520 
on_state_change(const state_machine & sm,state_machine::state_t src_sm_val,state_machine::state_t dst_sm_val,const svalue * sval,const svalue * dst_origin_sval)1521   bool on_state_change (const state_machine &sm,
1522 			state_machine::state_t src_sm_val,
1523 			state_machine::state_t dst_sm_val,
1524 			const svalue *sval,
1525 			const svalue *dst_origin_sval) FINAL OVERRIDE
1526   {
1527     if (&sm != m_pb.get_sm ())
1528       return false;
1529     const exploded_node *src_node = m_eedge.m_src;
1530     const program_point &src_point = src_node->get_point ();
1531     const int src_stack_depth = src_point.get_stack_depth ();
1532     const exploded_node *dst_node = m_eedge.m_dest;
1533     const gimple *stmt = src_point.get_stmt ();
1534     const supernode *supernode = src_point.get_supernode ();
1535     const program_state &dst_state = dst_node->get_state ();
1536 
1537     int stack_depth = src_stack_depth;
1538 
1539     if (m_eedge.m_sedge
1540 	&& m_eedge.m_sedge->m_kind == SUPEREDGE_CFG_EDGE)
1541       {
1542 	supernode = src_point.get_supernode ();
1543 	stmt = supernode->get_last_stmt ();
1544 	stack_depth = src_stack_depth;
1545       }
1546 
1547     /* Bulletproofing for state changes at calls/returns;
1548        TODO: is there a better way? */
1549     if (!stmt)
1550       return false;
1551 
1552     m_emission_path->add_event (new state_change_event (supernode,
1553 							stmt,
1554 							stack_depth,
1555 							sm,
1556 							sval,
1557 							src_sm_val,
1558 							dst_sm_val,
1559 							dst_origin_sval,
1560 							dst_state));
1561     return false;
1562   }
1563 
1564   const path_builder &m_pb;
1565   const exploded_edge &m_eedge;
1566   checker_path *m_emission_path;
1567 };
1568 
1569 /* Compare SRC_STATE and DST_STATE (which use EXT_STATE), and call
1570    VISITOR's on_state_change for every sm-state change that occurs
1571    to a tree, and on_global_state_change for every global state change
1572    that occurs.
1573 
1574    This determines the state changes that ought to be reported to
1575    the user: a combination of the effects of changes to sm_state_map
1576    (which maps svalues to sm-states), and of region_model changes
1577    (which map trees to svalues).
1578 
1579    Bail out early and return true if any call to on_global_state_change
1580    or on_state_change returns true, otherwise return false.
1581 
1582    This is split out to make it easier to experiment with changes to
1583    exploded_node granularity (so that we can observe what state changes
1584    lead to state_change_events being emitted).  */
1585 
1586 bool
for_each_state_change(const program_state & src_state,const program_state & dst_state,const extrinsic_state & ext_state,state_change_visitor * visitor)1587 for_each_state_change (const program_state &src_state,
1588 		       const program_state &dst_state,
1589 		       const extrinsic_state &ext_state,
1590 		       state_change_visitor *visitor)
1591 {
1592   gcc_assert (src_state.m_checker_states.length ()
1593 	      == ext_state.get_num_checkers ());
1594   gcc_assert (dst_state.m_checker_states.length ()
1595 	      == ext_state.get_num_checkers ());
1596   for (unsigned i = 0; i < ext_state.get_num_checkers (); i++)
1597     {
1598       const state_machine &sm = ext_state.get_sm (i);
1599       const sm_state_map &src_smap = *src_state.m_checker_states[i];
1600       const sm_state_map &dst_smap = *dst_state.m_checker_states[i];
1601 
1602       /* Add events for any global state changes.  */
1603       if (src_smap.get_global_state () != dst_smap.get_global_state ())
1604 	if (visitor->on_global_state_change (sm,
1605 					     src_smap.get_global_state (),
1606 					     dst_smap.get_global_state ()))
1607 	  return true;
1608 
1609       /* Add events for per-svalue state changes.  */
1610       for (sm_state_map::iterator_t iter = dst_smap.begin ();
1611 	   iter != dst_smap.end ();
1612 	   ++iter)
1613 	{
1614 	  const svalue *sval = (*iter).first;
1615 	  state_machine::state_t dst_sm_val = (*iter).second.m_state;
1616 	  state_machine::state_t src_sm_val
1617 	    = src_smap.get_state (sval, ext_state);
1618 	  if (dst_sm_val != src_sm_val)
1619 	    {
1620 	      const svalue *origin_sval = (*iter).second.m_origin;
1621 	      if (visitor->on_state_change (sm, src_sm_val, dst_sm_val,
1622 					    sval, origin_sval))
1623 		return true;
1624 	    }
1625 	}
1626     }
1627   return false;
1628 }
1629 
1630 /* An sm_context for adding state_change_event on assignments to NULL,
1631    where the default state isn't m_start.  Storing such state in the
1632    sm_state_map would lead to bloat of the exploded_graph, so we want
1633    to leave it as a default state, and inject state change events here
1634    when we have a diagnostic.
1635    Find transitions of constants, for handling on_zero_assignment.  */
1636 
1637 struct null_assignment_sm_context : public sm_context
1638 {
null_assignment_sm_contextana::null_assignment_sm_context1639   null_assignment_sm_context (int sm_idx,
1640 			      const state_machine &sm,
1641 			      const program_state *old_state,
1642 			      const program_state *new_state,
1643 			      const gimple *stmt,
1644 			      const program_point *point,
1645 			      checker_path *emission_path,
1646 			      const extrinsic_state &ext_state)
1647   : sm_context (sm_idx, sm), m_old_state (old_state), m_new_state (new_state),
1648     m_stmt (stmt), m_point (point), m_emission_path (emission_path),
1649     m_ext_state (ext_state)
1650   {
1651   }
1652 
get_fndecl_for_callana::null_assignment_sm_context1653   tree get_fndecl_for_call (const gcall */*call*/) FINAL OVERRIDE
1654   {
1655     return NULL_TREE;
1656   }
1657 
get_stateana::null_assignment_sm_context1658   state_machine::state_t get_state (const gimple *stmt ATTRIBUTE_UNUSED,
1659 				    tree var) FINAL OVERRIDE
1660   {
1661     const svalue *var_old_sval
1662       = m_old_state->m_region_model->get_rvalue (var, NULL);
1663     const sm_state_map *old_smap = m_old_state->m_checker_states[m_sm_idx];
1664 
1665     state_machine::state_t current
1666       = old_smap->get_state (var_old_sval, m_ext_state);
1667 
1668     return current;
1669   }
1670 
get_stateana::null_assignment_sm_context1671   state_machine::state_t get_state (const gimple *stmt ATTRIBUTE_UNUSED,
1672 				    const svalue *sval) FINAL OVERRIDE
1673   {
1674     const sm_state_map *old_smap = m_old_state->m_checker_states[m_sm_idx];
1675     state_machine::state_t current = old_smap->get_state (sval, m_ext_state);
1676     return current;
1677   }
1678 
set_next_stateana::null_assignment_sm_context1679   void set_next_state (const gimple *stmt,
1680 		       tree var,
1681 		       state_machine::state_t to,
1682 		       tree origin ATTRIBUTE_UNUSED) FINAL OVERRIDE
1683   {
1684     state_machine::state_t from = get_state (stmt, var);
1685     if (from != m_sm.get_start_state ())
1686       return;
1687 
1688     const svalue *var_new_sval
1689       = m_new_state->m_region_model->get_rvalue (var, NULL);
1690     const supernode *supernode = m_point->get_supernode ();
1691     int stack_depth = m_point->get_stack_depth ();
1692 
1693     m_emission_path->add_event (new state_change_event (supernode,
1694 							m_stmt,
1695 							stack_depth,
1696 							m_sm,
1697 							var_new_sval,
1698 							from, to,
1699 							NULL,
1700 							*m_new_state));
1701   }
1702 
set_next_stateana::null_assignment_sm_context1703   void set_next_state (const gimple *stmt,
1704 		       const svalue *sval,
1705 		       state_machine::state_t to,
1706 		       tree origin ATTRIBUTE_UNUSED) FINAL OVERRIDE
1707   {
1708     state_machine::state_t from = get_state (stmt, sval);
1709     if (from != m_sm.get_start_state ())
1710       return;
1711 
1712     const supernode *supernode = m_point->get_supernode ();
1713     int stack_depth = m_point->get_stack_depth ();
1714 
1715     m_emission_path->add_event (new state_change_event (supernode,
1716 							m_stmt,
1717 							stack_depth,
1718 							m_sm,
1719 							sval,
1720 							from, to,
1721 							NULL,
1722 							*m_new_state));
1723   }
1724 
warnana::null_assignment_sm_context1725   void warn (const supernode *, const gimple *,
1726 	     tree, pending_diagnostic *d) FINAL OVERRIDE
1727   {
1728     delete d;
1729   }
1730 
get_diagnostic_treeana::null_assignment_sm_context1731   tree get_diagnostic_tree (tree expr) FINAL OVERRIDE
1732   {
1733     return expr;
1734   }
1735 
get_diagnostic_treeana::null_assignment_sm_context1736   tree get_diagnostic_tree (const svalue *sval) FINAL OVERRIDE
1737   {
1738     return m_new_state->m_region_model->get_representative_tree (sval);
1739   }
1740 
get_global_stateana::null_assignment_sm_context1741   state_machine::state_t get_global_state () const FINAL OVERRIDE
1742   {
1743     return 0;
1744   }
1745 
set_global_stateana::null_assignment_sm_context1746   void set_global_state (state_machine::state_t) FINAL OVERRIDE
1747   {
1748     /* No-op.  */
1749   }
1750 
on_custom_transitionana::null_assignment_sm_context1751   void on_custom_transition (custom_transition *) FINAL OVERRIDE
1752   {
1753   }
1754 
is_zero_assignmentana::null_assignment_sm_context1755   tree is_zero_assignment (const gimple *stmt) FINAL OVERRIDE
1756   {
1757     const gassign *assign_stmt = dyn_cast <const gassign *> (stmt);
1758     if (!assign_stmt)
1759      return NULL_TREE;
1760     if (const svalue *sval
1761 	= m_new_state->m_region_model->get_gassign_result (assign_stmt, NULL))
1762       if (tree cst = sval->maybe_get_constant ())
1763 	if (::zerop(cst))
1764 	  return gimple_assign_lhs (assign_stmt);
1765     return NULL_TREE;
1766   }
1767 
get_old_program_stateana::null_assignment_sm_context1768   const program_state *get_old_program_state () const FINAL OVERRIDE
1769   {
1770     return m_old_state;
1771   }
1772 
1773   const program_state *m_old_state;
1774   const program_state *m_new_state;
1775   const gimple *m_stmt;
1776   const program_point *m_point;
1777   checker_path *m_emission_path;
1778   const extrinsic_state &m_ext_state;
1779 };
1780 
1781 /* Subroutine of diagnostic_manager::build_emission_path.
1782    Add any events for EEDGE to EMISSION_PATH.  */
1783 
1784 void
add_events_for_eedge(const path_builder & pb,const exploded_edge & eedge,checker_path * emission_path,interesting_t * interest) const1785 diagnostic_manager::add_events_for_eedge (const path_builder &pb,
1786 					  const exploded_edge &eedge,
1787 					  checker_path *emission_path,
1788 					  interesting_t *interest) const
1789 {
1790   const exploded_node *src_node = eedge.m_src;
1791   const program_point &src_point = src_node->get_point ();
1792   const int src_stack_depth = src_point.get_stack_depth ();
1793   const exploded_node *dst_node = eedge.m_dest;
1794   const program_point &dst_point = dst_node->get_point ();
1795   const int dst_stack_depth = dst_point.get_stack_depth ();
1796   if (get_logger ())
1797     {
1798       get_logger ()->start_log_line ();
1799       pretty_printer *pp = get_logger ()->get_printer ();
1800       pp_printf (pp, "EN %i -> EN %i: ",
1801 		 eedge.m_src->m_index,
1802 		 eedge.m_dest->m_index);
1803       src_point.print (pp, format (false));
1804       pp_string (pp, "-> ");
1805       dst_point.print (pp, format (false));
1806       get_logger ()->end_log_line ();
1807     }
1808   const program_state &src_state = src_node->get_state ();
1809   const program_state &dst_state = dst_node->get_state ();
1810 
1811   /* Add state change events for the states that have changed.
1812      We add these before events for superedges, so that if we have a
1813      state_change_event due to following an edge, we'll get this sequence
1814      of events:
1815 
1816       | if (!ptr)
1817       |    ~
1818       |    |
1819       |    (1) assuming 'ptr' is non-NULL  (state_change_event)
1820       |    (2) following 'false' branch... (start_cfg_edge_event)
1821      ...
1822       | do_something (ptr);
1823       | ~~~~~~~~~~~~~^~~~~
1824       |              |
1825       |              (3) ...to here        (end_cfg_edge_event).  */
1826   state_change_event_creator visitor (pb, eedge, emission_path);
1827   for_each_state_change (src_state, dst_state, pb.get_ext_state (),
1828 			 &visitor);
1829 
1830   /* Allow non-standard edges to add events, e.g. when rewinding from
1831      longjmp to a setjmp.  */
1832   if (eedge.m_custom_info)
1833     eedge.m_custom_info->add_events_to_path (emission_path, eedge);
1834 
1835   /* Add events for superedges, function entries, and for statements.  */
1836   switch (dst_point.get_kind ())
1837     {
1838     default:
1839       break;
1840     case PK_BEFORE_SUPERNODE:
1841       if (src_point.get_kind () == PK_AFTER_SUPERNODE)
1842 	{
1843 	  if (eedge.m_sedge)
1844 	    add_events_for_superedge (pb, eedge, emission_path);
1845 	}
1846       /* Add function entry events.  */
1847       if (dst_point.get_supernode ()->entry_p ())
1848 	{
1849 	  emission_path->add_event
1850 	    (new function_entry_event
1851 	     (dst_point.get_supernode ()->get_start_location (),
1852 	      dst_point.get_fndecl (),
1853 	      dst_stack_depth));
1854 	  /* Create region_creation_events for on-stack regions within
1855 	     this frame.  */
1856 	  if (interest)
1857 	    {
1858 	      unsigned i;
1859 	      const region *reg;
1860 	      FOR_EACH_VEC_ELT (interest->m_region_creation, i, reg)
1861 		if (const frame_region *frame = reg->maybe_get_frame_region ())
1862 		  if (frame->get_fndecl () == dst_point.get_fndecl ())
1863 		    {
1864 		      const region *base_reg = reg->get_base_region ();
1865 		      if (tree decl = base_reg->maybe_get_decl ())
1866 			if (DECL_P (decl)
1867 			    && DECL_SOURCE_LOCATION (decl) != UNKNOWN_LOCATION)
1868 			  {
1869 			    emission_path->add_region_creation_event
1870 			      (reg,
1871 			       DECL_SOURCE_LOCATION (decl),
1872 			       dst_point.get_fndecl (),
1873 			       dst_stack_depth);
1874 			  }
1875 		    }
1876 	    }
1877 	}
1878       break;
1879     case PK_BEFORE_STMT:
1880       {
1881 	const gimple *stmt = dst_point.get_stmt ();
1882 	const gcall *call = dyn_cast <const gcall *> (stmt);
1883 	if (call && is_setjmp_call_p (call))
1884 	  emission_path->add_event
1885 	    (new setjmp_event (stmt->location,
1886 			       dst_node,
1887 			       dst_point.get_fndecl (),
1888 			       dst_stack_depth,
1889 			       call));
1890 	else
1891 	  emission_path->add_event
1892 	    (new statement_event (stmt,
1893 				  dst_point.get_fndecl (),
1894 				  dst_stack_depth, dst_state));
1895 
1896 	/* Create state change events for assignment to NULL.
1897 	   Iterate through the stmts in dst_enode, adding state change
1898 	   events for them.  */
1899 	if (dst_state.m_region_model)
1900 	  {
1901 	    program_state iter_state (dst_state);
1902 	    program_point iter_point (dst_point);
1903 	    while (1)
1904 	      {
1905 		const gimple *stmt = iter_point.get_stmt ();
1906 		if (const gassign *assign = dyn_cast<const gassign *> (stmt))
1907 		  {
1908 		    const extrinsic_state &ext_state = pb.get_ext_state ();
1909 		    program_state old_state (iter_state);
1910 		    iter_state.m_region_model->on_assignment (assign, NULL);
1911 		    for (unsigned i = 0; i < ext_state.get_num_checkers (); i++)
1912 		      {
1913 			const state_machine &sm = ext_state.get_sm (i);
1914 			null_assignment_sm_context sm_ctxt (i, sm,
1915 							    &old_state,
1916 							    &iter_state,
1917 							    stmt,
1918 							    &iter_point,
1919 							    emission_path,
1920 							    pb.get_ext_state ());
1921 			sm.on_stmt (&sm_ctxt, dst_point.get_supernode (), stmt);
1922 			// TODO: what about phi nodes?
1923 		      }
1924 		  }
1925 		iter_point.next_stmt ();
1926 		if (iter_point.get_kind () == PK_AFTER_SUPERNODE
1927 		    || (dst_node->m_succs.length () > 1
1928 			&& (iter_point
1929 			    == dst_node->m_succs[0]->m_dest->get_point ())))
1930 		  break;
1931 	      }
1932 
1933 	  }
1934       }
1935       break;
1936     }
1937 
1938   /* Look for changes in dynamic extents, which will identify
1939      the creation of heap-based regions and alloca regions.  */
1940   if (interest)
1941     {
1942       const region_model *src_model = src_state.m_region_model;
1943       const region_model *dst_model = dst_state.m_region_model;
1944       if (src_model->get_dynamic_extents ()
1945 	  != dst_model->get_dynamic_extents ())
1946 	{
1947 	  unsigned i;
1948 	  const region *reg;
1949 	  FOR_EACH_VEC_ELT (interest->m_region_creation, i, reg)
1950 	    {
1951 	      const region *base_reg = reg->get_base_region ();
1952 	      const svalue *old_extents
1953 		= src_model->get_dynamic_extents (base_reg);
1954 	      const svalue *new_extents
1955 		= dst_model->get_dynamic_extents (base_reg);
1956 	      if (old_extents == NULL && new_extents != NULL)
1957 		switch (base_reg->get_kind ())
1958 		  {
1959 		  default:
1960 		    break;
1961 		  case RK_HEAP_ALLOCATED:
1962 		  case RK_ALLOCA:
1963 		    emission_path->add_region_creation_event
1964 		      (reg,
1965 		       src_point.get_location (),
1966 		       src_point.get_fndecl (),
1967 		       src_stack_depth);
1968 		    break;
1969 		  }
1970 	    }
1971 	}
1972     }
1973 
1974   if (pb.get_feasibility_problem ()
1975       && &pb.get_feasibility_problem ()->m_eedge == &eedge)
1976     {
1977       pretty_printer pp;
1978       pp_format_decoder (&pp) = default_tree_printer;
1979       pp_string (&pp,
1980 		 "this path would have been rejected as infeasible"
1981 		 " at this edge: ");
1982       pb.get_feasibility_problem ()->dump_to_pp (&pp);
1983       emission_path->add_event (new precanned_custom_event
1984 				(dst_point.get_location (),
1985 				 dst_point.get_fndecl (),
1986 				 dst_stack_depth,
1987 				 pp_formatted_text (&pp)));
1988     }
1989 }
1990 
1991 /* Return true if EEDGE is a significant edge in the path to the diagnostic
1992    for PB.
1993 
1994    Consider all of the sibling out-eedges from the same source enode
1995    as EEDGE.
1996    If there's no path from the destinations of those eedges to the
1997    diagnostic enode, then we have to take this eedge and thus it's
1998    significant.
1999 
2000    Conversely if there is a path from the destination of any other sibling
2001    eedge to the diagnostic enode, then this edge is insignificant.
2002 
2003    Example 1: redundant if-else:
2004 
2005      (A) if (...)            A
2006      (B)   ...              / \
2007          else              B   C
2008      (C)   ...              \ /
2009      (D) [DIAGNOSTIC]        D
2010 
2011      D is reachable by either B or C, so neither of these edges
2012      are significant.
2013 
2014    Example 2: pertinent if-else:
2015 
2016      (A) if (...)                         A
2017      (B)   ...                           / \
2018          else                           B   C
2019      (C)   [NECESSARY CONDITION]        |   |
2020      (D) [POSSIBLE DIAGNOSTIC]          D1  D2
2021 
2022      D becomes D1 and D2 in the exploded graph, where the diagnostic occurs
2023      at D2.  D2 is only reachable via C, so the A -> C edge is significant.
2024 
2025    Example 3: redundant loop:
2026 
2027      (A) while (...)          +-->A
2028      (B)   ...                |  / \
2029      (C) ...                  +-B  C
2030      (D) [DIAGNOSTIC]              |
2031                                    D
2032 
2033      D is reachable from both B and C, so the A->C edge is not significant.  */
2034 
2035 bool
significant_edge_p(const path_builder & pb,const exploded_edge & eedge) const2036 diagnostic_manager::significant_edge_p (const path_builder &pb,
2037 					const exploded_edge &eedge) const
2038 {
2039   int i;
2040   exploded_edge *sibling;
2041   FOR_EACH_VEC_ELT (eedge.m_src->m_succs, i, sibling)
2042     {
2043       if (sibling == &eedge)
2044 	continue;
2045       if (pb.reachable_from_p (sibling->m_dest))
2046 	{
2047 	  if (get_logger ())
2048 	    get_logger ()->log ("  edge EN: %i -> EN: %i is insignificant as"
2049 				" EN: %i is also reachable via"
2050 				" EN: %i -> EN: %i",
2051 				eedge.m_src->m_index, eedge.m_dest->m_index,
2052 				pb.get_diag_node ()->m_index,
2053 				sibling->m_src->m_index,
2054 				sibling->m_dest->m_index);
2055 	  return false;
2056 	}
2057     }
2058 
2059   return true;
2060 }
2061 
2062 /* Subroutine of diagnostic_manager::add_events_for_eedge
2063    where EEDGE has an underlying superedge i.e. a CFG edge,
2064    or an interprocedural call/return.
2065    Add any events for the superedge to EMISSION_PATH.  */
2066 
2067 void
add_events_for_superedge(const path_builder & pb,const exploded_edge & eedge,checker_path * emission_path) const2068 diagnostic_manager::add_events_for_superedge (const path_builder &pb,
2069 					      const exploded_edge &eedge,
2070 					      checker_path *emission_path)
2071   const
2072 {
2073   gcc_assert (eedge.m_sedge);
2074 
2075   /* Give diagnostics an opportunity to override this function.  */
2076   pending_diagnostic *pd = pb.get_pending_diagnostic ();
2077   if (pd->maybe_add_custom_events_for_superedge (eedge, emission_path))
2078     return;
2079 
2080   /* Don't add events for insignificant edges at verbosity levels below 3.  */
2081   if (m_verbosity < 3)
2082     if (!significant_edge_p (pb, eedge))
2083       return;
2084 
2085   const exploded_node *src_node = eedge.m_src;
2086   const program_point &src_point = src_node->get_point ();
2087   const exploded_node *dst_node = eedge.m_dest;
2088   const program_point &dst_point = dst_node->get_point ();
2089   const int src_stack_depth = src_point.get_stack_depth ();
2090   const int dst_stack_depth = dst_point.get_stack_depth ();
2091   const gimple *last_stmt = src_point.get_supernode ()->get_last_stmt ();
2092 
2093   switch (eedge.m_sedge->m_kind)
2094     {
2095     case SUPEREDGE_CFG_EDGE:
2096       {
2097 	emission_path->add_event
2098 	  (new start_cfg_edge_event (eedge,
2099 			       (last_stmt
2100 				? last_stmt->location
2101 				: UNKNOWN_LOCATION),
2102 			       src_point.get_fndecl (),
2103 			       src_stack_depth));
2104 	emission_path->add_event
2105 	  (new end_cfg_edge_event (eedge,
2106 				   dst_point.get_supernode ()->get_start_location (),
2107 				   dst_point.get_fndecl (),
2108 				   dst_stack_depth));
2109       }
2110       break;
2111 
2112     case SUPEREDGE_CALL:
2113       {
2114 	emission_path->add_event
2115 	  (new call_event (eedge,
2116 			   (last_stmt
2117 			    ? last_stmt->location
2118 			    : UNKNOWN_LOCATION),
2119 			   src_point.get_fndecl (),
2120 			   src_stack_depth));
2121       }
2122       break;
2123 
2124     case SUPEREDGE_INTRAPROCEDURAL_CALL:
2125       {
2126 	/* TODO: add a subclass for this, or generate events for the
2127 	   summary.  */
2128 	emission_path->add_event
2129 	  (new debug_event ((last_stmt
2130 			     ? last_stmt->location
2131 			     : UNKNOWN_LOCATION),
2132 			    src_point.get_fndecl (),
2133 			    src_stack_depth,
2134 			    "call summary"));
2135       }
2136       break;
2137 
2138     case SUPEREDGE_RETURN:
2139       {
2140 	const return_superedge *return_edge
2141 	  = as_a <const return_superedge *> (eedge.m_sedge);
2142 
2143 	const gcall *call_stmt = return_edge->get_call_stmt ();
2144 	emission_path->add_event
2145 	  (new return_event (eedge,
2146 			     (call_stmt
2147 			      ? call_stmt->location
2148 			      : UNKNOWN_LOCATION),
2149 			     dst_point.get_fndecl (),
2150 			     dst_stack_depth));
2151       }
2152       break;
2153     }
2154 }
2155 
2156 /* Prune PATH, based on the verbosity level, to the most pertinent
2157    events for a diagnostic that involves VAR ending in state STATE
2158    (for state machine SM).
2159 
2160    PATH is updated in place, and the redundant checker_events are deleted.
2161 
2162    As well as deleting events, call record_critical_state on events in
2163    which state critical to the pending_diagnostic is being handled; see
2164    the comment for diagnostic_manager::prune_for_sm_diagnostic.  */
2165 
2166 void
prune_path(checker_path * path,const state_machine * sm,const svalue * sval,state_machine::state_t state) const2167 diagnostic_manager::prune_path (checker_path *path,
2168 				const state_machine *sm,
2169 				const svalue *sval,
2170 				state_machine::state_t state) const
2171 {
2172   LOG_FUNC (get_logger ());
2173   path->maybe_log (get_logger (), "path");
2174   prune_for_sm_diagnostic (path, sm, sval, state);
2175   prune_interproc_events (path);
2176   consolidate_conditions (path);
2177   finish_pruning (path);
2178   path->maybe_log (get_logger (), "pruned");
2179 }
2180 
2181 /* A cheap test to determine if EXPR can be the expression of interest in
2182    an sm-diagnostic, so that we can reject cases where we have a non-lvalue.
2183    We don't have always have a model when calling this, so we can't use
2184    tentative_region_model_context, so there can be false positives.  */
2185 
2186 static bool
can_be_expr_of_interest_p(tree expr)2187 can_be_expr_of_interest_p (tree expr)
2188 {
2189   if (!expr)
2190     return false;
2191 
2192   /* Reject constants.  */
2193   if (CONSTANT_CLASS_P (expr))
2194     return false;
2195 
2196   /* Otherwise assume that it can be an lvalue.  */
2197   return true;
2198 }
2199 
2200 /* First pass of diagnostic_manager::prune_path: apply verbosity level,
2201    pruning unrelated state change events.
2202 
2203    Iterate backwards through PATH, skipping state change events that aren't
2204    VAR but update the pertinent VAR when state-copying occurs.
2205 
2206    As well as deleting events, call record_critical_state on events in
2207    which state critical to the pending_diagnostic is being handled, so
2208    that the event's get_desc vfunc can potentially supply a more precise
2209    description of the event to the user.
2210    e.g. improving
2211      "calling 'foo' from 'bar'"
2212    to
2213      "passing possibly-NULL pointer 'ptr' to 'foo' from 'bar' as param 1"
2214    when the diagnostic relates to later dereferencing 'ptr'.  */
2215 
2216 void
prune_for_sm_diagnostic(checker_path * path,const state_machine * sm,const svalue * sval,state_machine::state_t state) const2217 diagnostic_manager::prune_for_sm_diagnostic (checker_path *path,
2218 					     const state_machine *sm,
2219 					     const svalue *sval,
2220 					     state_machine::state_t state) const
2221 {
2222   int idx = path->num_events () - 1;
2223   while (idx >= 0 && idx < (signed)path->num_events ())
2224     {
2225       checker_event *base_event = path->get_checker_event (idx);
2226       if (get_logger ())
2227 	{
2228 	  if (sm)
2229 	    {
2230 	      if (sval)
2231 		{
2232 		  label_text sval_desc = sval->get_desc ();
2233 		  log ("considering event %i (%s), with sval: %qs, state: %qs",
2234 		       idx, event_kind_to_string (base_event->m_kind),
2235 		       sval_desc.m_buffer, state->get_name ());
2236 		  sval_desc.maybe_free ();
2237 		}
2238 	      else
2239 		log ("considering event %i (%s), with global state: %qs",
2240 		     idx, event_kind_to_string (base_event->m_kind),
2241 		     state->get_name ());
2242 	    }
2243 	  else
2244 	    log ("considering event %i", idx);
2245 	}
2246 
2247       switch (base_event->m_kind)
2248 	{
2249 	default:
2250 	  gcc_unreachable ();
2251 
2252 	case EK_DEBUG:
2253 	  if (m_verbosity < 4)
2254 	    {
2255 	      log ("filtering event %i: debug event", idx);
2256 	      path->delete_event (idx);
2257 	    }
2258 	  break;
2259 
2260 	case EK_CUSTOM:
2261 	  /* Don't filter custom events.  */
2262 	  break;
2263 
2264 	case EK_STMT:
2265 	  {
2266 	    if (m_verbosity < 4)
2267 	      {
2268 		log ("filtering event %i: statement event", idx);
2269 		path->delete_event (idx);
2270 	      }
2271 	  }
2272 	  break;
2273 
2274 	case EK_REGION_CREATION:
2275 	  /* Don't filter these.  */
2276 	  break;
2277 
2278 	case EK_FUNCTION_ENTRY:
2279 	  if (m_verbosity < 1)
2280 	    {
2281 	      log ("filtering event %i: function entry", idx);
2282 	      path->delete_event (idx);
2283 	    }
2284 	  break;
2285 
2286 	case EK_STATE_CHANGE:
2287 	  {
2288 	    state_change_event *state_change = (state_change_event *)base_event;
2289 	    gcc_assert (state_change->m_dst_state.m_region_model);
2290 
2291 	    if (state_change->m_sval == sval)
2292 	      {
2293 		if (state_change->m_origin)
2294 		  {
2295 		    if (get_logger ())
2296 		      {
2297 			label_text sval_desc = sval->get_desc ();
2298 			label_text origin_sval_desc
2299 			  = state_change->m_origin->get_desc ();
2300 			log ("event %i:"
2301 			     " switching var of interest from %qs to %qs",
2302 			     idx, sval_desc.m_buffer,
2303 			     origin_sval_desc.m_buffer);
2304 			sval_desc.maybe_free ();
2305 			origin_sval_desc.maybe_free ();
2306 		      }
2307 		    sval = state_change->m_origin;
2308 		  }
2309 		log ("event %i: switching state of interest from %qs to %qs",
2310 		     idx, state_change->m_to->get_name (),
2311 		     state_change->m_from->get_name ());
2312 		state = state_change->m_from;
2313 	      }
2314 	    else if (m_verbosity < 4)
2315 	      {
2316 		if (get_logger ())
2317 		  {
2318 		    if (state_change->m_sval)
2319 		      {
2320 			label_text change_sval_desc
2321 			  = state_change->m_sval->get_desc ();
2322 			if (sval)
2323 			  {
2324 			    label_text sval_desc = sval->get_desc ();
2325 			    log ("filtering event %i:"
2326 				 " state change to %qs unrelated to %qs",
2327 				 idx, change_sval_desc.m_buffer,
2328 				 sval_desc.m_buffer);
2329 			  }
2330 			else
2331 			  log ("filtering event %i: state change to %qs",
2332 			       idx, change_sval_desc.m_buffer);
2333 			change_sval_desc.maybe_free ();
2334 		      }
2335 		    else
2336 		      log ("filtering event %i: global state change", idx);
2337 		  }
2338 		path->delete_event (idx);
2339 	      }
2340 	  }
2341 	  break;
2342 
2343 	case EK_START_CFG_EDGE:
2344 	  {
2345 	    cfg_edge_event *event = (cfg_edge_event *)base_event;
2346 
2347 	    /* TODO: is this edge significant to var?
2348 	       See if var can be in other states in the dest, but not
2349 	       in other states in the src?
2350 	       Must have multiple sibling edges.  */
2351 
2352 	    if (event->should_filter_p (m_verbosity))
2353 	      {
2354 		log ("filtering events %i and %i: CFG edge", idx, idx + 1);
2355 		path->delete_event (idx);
2356 		/* Also delete the corresponding EK_END_CFG_EDGE.  */
2357 		gcc_assert (path->get_checker_event (idx)->m_kind
2358 			    == EK_END_CFG_EDGE);
2359 		path->delete_event (idx);
2360 	      }
2361 	  }
2362 	  break;
2363 
2364 	case EK_END_CFG_EDGE:
2365 	  /* These come in pairs with EK_START_CFG_EDGE events and are
2366 	     filtered when their start event is filtered.  */
2367 	  break;
2368 
2369 	case EK_CALL_EDGE:
2370 	  {
2371 	    call_event *event = (call_event *)base_event;
2372 	    const region_model *callee_model
2373 	      = event->m_eedge.m_dest->get_state ().m_region_model;
2374 	    const region_model *caller_model
2375 	      = event->m_eedge.m_src->get_state ().m_region_model;
2376 	    tree callee_var = callee_model->get_representative_tree (sval);
2377 	    callsite_expr expr;
2378 
2379 	    tree caller_var;
2380             if(event->m_sedge)
2381               {
2382                 const callgraph_superedge& cg_superedge
2383                   = event->get_callgraph_superedge ();
2384                 if (cg_superedge.m_cedge)
2385 	          caller_var
2386 	            = cg_superedge.map_expr_from_callee_to_caller (callee_var,
2387                                                                    &expr);
2388                 else
2389                   caller_var = caller_model->get_representative_tree (sval);
2390               }
2391             else
2392 	      caller_var = caller_model->get_representative_tree (sval);
2393 
2394 	    if (caller_var)
2395 	      {
2396 		if (get_logger ())
2397 		  {
2398 		    label_text sval_desc = sval->get_desc ();
2399 		    log ("event %i:"
2400 			 " recording critical state for %qs at call"
2401 			 " from %qE in callee to %qE in caller",
2402 			 idx, sval_desc.m_buffer, callee_var, caller_var);
2403 		    sval_desc.maybe_free ();
2404 		  }
2405 		if (expr.param_p ())
2406 		  event->record_critical_state (caller_var, state);
2407 	      }
2408 	  }
2409 	  break;
2410 
2411 	case EK_RETURN_EDGE:
2412 	  {
2413 	    if (sval)
2414 	      {
2415 		return_event *event = (return_event *)base_event;
2416                 const region_model *caller_model
2417                   = event->m_eedge.m_dest->get_state ().m_region_model;
2418                 tree caller_var = caller_model->get_representative_tree (sval);
2419                 const region_model *callee_model
2420                   = event->m_eedge.m_src->get_state ().m_region_model;
2421 		callsite_expr expr;
2422 
2423                 tree callee_var;
2424                 if (event->m_sedge)
2425                   {
2426                     const callgraph_superedge& cg_superedge
2427                       = event->get_callgraph_superedge ();
2428                     if (cg_superedge.m_cedge)
2429                       callee_var
2430                         = cg_superedge.map_expr_from_caller_to_callee (caller_var,
2431                                                                        &expr);
2432                     else
2433                       callee_var = callee_model->get_representative_tree (sval);
2434                   }
2435                 else
2436                   callee_var = callee_model->get_representative_tree (sval);
2437 
2438 		if (callee_var)
2439 		  {
2440 		    if (get_logger ())
2441 		      {
2442 			label_text sval_desc = sval->get_desc ();
2443 			log ("event %i:"
2444 			     " recording critical state for %qs at return"
2445 			     " from %qE in caller to %qE in callee",
2446 			     idx, sval_desc.m_buffer, callee_var, callee_var);
2447 			sval_desc.maybe_free ();
2448 		      }
2449 		    if (expr.return_value_p ())
2450 		      event->record_critical_state (callee_var, state);
2451 		  }
2452 	      }
2453 	  }
2454 	  break;
2455 
2456 	case EK_SETJMP:
2457 	  /* TODO: only show setjmp_events that matter i.e. those for which
2458 	     there is a later rewind event using them.  */
2459 	case EK_REWIND_FROM_LONGJMP:
2460 	case EK_REWIND_TO_SETJMP:
2461 	  break;
2462 
2463 	case EK_WARNING:
2464 	  /* Always show the final "warning" event in the path.  */
2465 	  break;
2466 	}
2467       idx--;
2468     }
2469 }
2470 
2471 /* Subroutine of diagnostic_manager::prune_for_sm_diagnostic.
2472    If *EXPR is not suitable to be the expression of interest in
2473    an sm-diagnostic, set *EXPR to NULL and log.  */
2474 
2475 void
update_for_unsuitable_sm_exprs(tree * expr) const2476 diagnostic_manager::update_for_unsuitable_sm_exprs (tree *expr) const
2477 {
2478   gcc_assert (expr);
2479   if (*expr && !can_be_expr_of_interest_p (*expr))
2480     {
2481       log ("new var %qE is unsuitable; setting var to NULL", *expr);
2482       *expr = NULL_TREE;
2483     }
2484 }
2485 
2486 /* Second pass of diagnostic_manager::prune_path: remove redundant
2487    interprocedural information.
2488 
2489    For example, given:
2490      (1)- calling "f2" from "f1"
2491      (2)--- entry to "f2"
2492      (3)--- calling "f3" from "f2"
2493      (4)----- entry to "f3"
2494      (5)--- returning to "f2" to "f3"
2495      (6)- returning to "f1" to "f2"
2496    with no other intervening events, then none of these events are
2497    likely to be interesting to the user.
2498 
2499    Prune [..., call, function-entry, return, ...] triples repeatedly
2500    until nothing has changed.  For the example above, this would
2501    remove events (3, 4, 5), and then remove events (1, 2, 6).  */
2502 
2503 void
prune_interproc_events(checker_path * path) const2504 diagnostic_manager::prune_interproc_events (checker_path *path) const
2505 {
2506   bool changed = false;
2507   do
2508     {
2509       changed = false;
2510       int idx = (signed)path->num_events () - 1;
2511       while (idx >= 0)
2512 	{
2513 	  /* Prune [..., call, function-entry, return, ...] triples.  */
2514 	  if (idx + 2 < (signed)path->num_events ()
2515 	      && path->get_checker_event (idx)->is_call_p ()
2516 	      && path->get_checker_event (idx + 1)->is_function_entry_p ()
2517 	      && path->get_checker_event (idx + 2)->is_return_p ())
2518 	    {
2519 	      if (get_logger ())
2520 		{
2521 		  label_text desc
2522 		    (path->get_checker_event (idx)->get_desc (false));
2523 		  log ("filtering events %i-%i:"
2524 		       " irrelevant call/entry/return: %s",
2525 		       idx, idx + 2, desc.m_buffer);
2526 		  desc.maybe_free ();
2527 		}
2528 	      path->delete_event (idx + 2);
2529 	      path->delete_event (idx + 1);
2530 	      path->delete_event (idx);
2531 	      changed = true;
2532 	      idx--;
2533 	      continue;
2534 	    }
2535 
2536 	  /* Prune [..., call, return, ...] pairs
2537 	     (for -fanalyzer-verbosity=0).  */
2538 	  if (idx + 1 < (signed)path->num_events ()
2539 	      && path->get_checker_event (idx)->is_call_p ()
2540 	      && path->get_checker_event (idx + 1)->is_return_p ())
2541 	    {
2542 	      if (get_logger ())
2543 		{
2544 		  label_text desc
2545 		    (path->get_checker_event (idx)->get_desc (false));
2546 		  log ("filtering events %i-%i:"
2547 		       " irrelevant call/return: %s",
2548 		       idx, idx + 1, desc.m_buffer);
2549 		  desc.maybe_free ();
2550 		}
2551 	      path->delete_event (idx + 1);
2552 	      path->delete_event (idx);
2553 	      changed = true;
2554 	      idx--;
2555 	      continue;
2556 	    }
2557 
2558 	  idx--;
2559 	}
2560 
2561     }
2562   while (changed);
2563 }
2564 
2565 /* Return true iff event IDX within PATH is on the same line as REF_EXP_LOC.  */
2566 
2567 static bool
same_line_as_p(const expanded_location & ref_exp_loc,checker_path * path,unsigned idx)2568 same_line_as_p (const expanded_location &ref_exp_loc,
2569 		checker_path *path, unsigned idx)
2570 {
2571   const checker_event *ev = path->get_checker_event (idx);
2572   expanded_location idx_exp_loc = expand_location (ev->get_location ());
2573   gcc_assert (ref_exp_loc.file);
2574   if (idx_exp_loc.file == NULL)
2575     return false;
2576   if (strcmp (ref_exp_loc.file, idx_exp_loc.file))
2577     return false;
2578   return ref_exp_loc.line == idx_exp_loc.line;
2579 }
2580 
2581 /* This path-readability optimization reduces the verbosity of compound
2582    conditional statements (without needing to reconstruct the AST, which
2583    has already been lost).
2584 
2585    For example, it converts:
2586 
2587     |   61 |   if (cp[0] != '\0' && cp[0] != '#')
2588     |      |      ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
2589     |      |      |              |    |
2590     |      |      |              |    (6) ...to here
2591     |      |      |              (7) following ‘true’ branch...
2592     |      |      (5) following ‘true’ branch...
2593     |   62 |     {
2594     |   63 |       alias = cp++;
2595     |      |               ~~~~
2596     |      |                 |
2597     |      |                 (8) ...to here
2598 
2599    into:
2600 
2601     |   61 |   if (cp[0] != '\0' && cp[0] != '#')
2602     |      |      ~
2603     |      |      |
2604     |      |      (5) following ‘true’ branch...
2605     |   62 |     {
2606     |   63 |       alias = cp++;
2607     |      |               ~~~~
2608     |      |                 |
2609     |      |                 (6) ...to here
2610 
2611    by combining events 5-8 into new events 5-6.
2612 
2613    Find runs of consecutive (start_cfg_edge_event, end_cfg_edge_event) pairs
2614    in which all events apart from the final end_cfg_edge_event are on the same
2615    line, and for which either all the CFG edges are TRUE edges, or all are
2616    FALSE edges.
2617 
2618    Consolidate each such run into a
2619      (start_consolidated_cfg_edges_event, end_consolidated_cfg_edges_event)
2620    pair.  */
2621 
2622 void
consolidate_conditions(checker_path * path) const2623 diagnostic_manager::consolidate_conditions (checker_path *path) const
2624 {
2625   /* Don't simplify edges if we're debugging them.  */
2626   if (flag_analyzer_verbose_edges)
2627     return;
2628 
2629   for (int start_idx = 0;
2630        start_idx < (signed)path->num_events () - 1;
2631        start_idx++)
2632     {
2633       if (path->cfg_edge_pair_at_p (start_idx))
2634 	{
2635 	  const checker_event *old_start_ev
2636 	    = path->get_checker_event (start_idx);
2637 	  expanded_location start_exp_loc
2638 	    = expand_location (old_start_ev->get_location ());
2639 	  if (start_exp_loc.file == NULL)
2640 	    continue;
2641 	  if (!same_line_as_p (start_exp_loc, path, start_idx + 1))
2642 	    continue;
2643 
2644 	  /* Are we looking for a run of all TRUE edges, or all FALSE edges?  */
2645 	  gcc_assert (old_start_ev->m_kind == EK_START_CFG_EDGE);
2646 	  const start_cfg_edge_event *old_start_cfg_ev
2647 	    = (const start_cfg_edge_event *)old_start_ev;
2648 	  const cfg_superedge& first_cfg_sedge
2649 	    = old_start_cfg_ev->get_cfg_superedge ();
2650 	  bool edge_sense;
2651 	  if (first_cfg_sedge.true_value_p ())
2652 	    edge_sense = true;
2653 	  else if (first_cfg_sedge.false_value_p ())
2654 	    edge_sense = false;
2655 	  else
2656 	    continue;
2657 
2658 	  /* Find a run of CFG start/end event pairs from
2659 	       [start_idx, next_idx)
2660 	     where all apart from the final event are on the same line,
2661 	     and all are either TRUE or FALSE edges, matching the initial.  */
2662 	  int next_idx = start_idx + 2;
2663 	  while (path->cfg_edge_pair_at_p (next_idx)
2664 		 && same_line_as_p (start_exp_loc, path, next_idx))
2665 	    {
2666 	      const checker_event *iter_ev
2667 		= path->get_checker_event (next_idx);
2668 	      gcc_assert (iter_ev->m_kind == EK_START_CFG_EDGE);
2669 	      const start_cfg_edge_event *iter_cfg_ev
2670 		= (const start_cfg_edge_event *)iter_ev;
2671 	      const cfg_superedge& iter_cfg_sedge
2672 		= iter_cfg_ev->get_cfg_superedge ();
2673 	      if (edge_sense)
2674 		{
2675 		  if (!iter_cfg_sedge.true_value_p ())
2676 		    break;
2677 		}
2678 	      else
2679 		{
2680 		  if (!iter_cfg_sedge.false_value_p ())
2681 		    break;
2682 		}
2683 	      next_idx += 2;
2684 	    }
2685 
2686 	  /* If we have more than one pair in the run, consolidate.  */
2687 	  if (next_idx > start_idx + 2)
2688 	    {
2689 	      const checker_event *old_end_ev
2690 		= path->get_checker_event (next_idx - 1);
2691 	      log ("consolidating CFG edge events %i-%i into %i-%i",
2692 		   start_idx, next_idx - 1, start_idx, start_idx +1);
2693 	      start_consolidated_cfg_edges_event *new_start_ev
2694 		= new start_consolidated_cfg_edges_event
2695 		(old_start_ev->get_location (),
2696 		 old_start_ev->get_fndecl (),
2697 		 old_start_ev->get_stack_depth (),
2698 		 edge_sense);
2699 	      checker_event *new_end_ev
2700 		= new end_consolidated_cfg_edges_event
2701 		(old_end_ev->get_location (),
2702 		 old_end_ev->get_fndecl (),
2703 		 old_end_ev->get_stack_depth ());
2704 	      path->replace_event (start_idx, new_start_ev);
2705 	      path->replace_event (start_idx + 1, new_end_ev);
2706 	      path->delete_events (start_idx + 2, next_idx - (start_idx + 2));
2707 	    }
2708 	}
2709     }
2710 }
2711 
2712 /* Final pass of diagnostic_manager::prune_path.
2713 
2714    If all we're left with is in one function, then filter function entry
2715    events.  */
2716 
2717 void
finish_pruning(checker_path * path) const2718 diagnostic_manager::finish_pruning (checker_path *path) const
2719 {
2720   if (!path->interprocedural_p ())
2721     {
2722       int idx = path->num_events () - 1;
2723       while (idx >= 0 && idx < (signed)path->num_events ())
2724 	{
2725 	  checker_event *base_event = path->get_checker_event (idx);
2726 	  if (base_event->m_kind == EK_FUNCTION_ENTRY)
2727 	    {
2728 	      log ("filtering event %i:"
2729 		   " function entry for purely intraprocedural path", idx);
2730 	      path->delete_event (idx);
2731 	    }
2732 	  idx--;
2733 	}
2734     }
2735 }
2736 
2737 } // namespace ana
2738 
2739 #endif /* #if ENABLE_ANALYZER */
2740