PR sanitizer/65400
[official-gcc.git] / gcc / tree-vectorizer.c
blob415bffa14d52329d5edcf153eb069c94e1aec037
1 /* Vectorizer
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Dorit Naishlos <dorit@il.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Loop and basic block vectorizer.
23 This file contains drivers for the three vectorizers:
24 (1) loop vectorizer (inter-iteration parallelism),
25 (2) loop-aware SLP (intra-iteration parallelism) (invoked by the loop
26 vectorizer)
27 (3) BB vectorizer (out-of-loops), aka SLP
29 The rest of the vectorizer's code is organized as follows:
30 - tree-vect-loop.c - loop specific parts such as reductions, etc. These are
31 used by drivers (1) and (2).
32 - tree-vect-loop-manip.c - vectorizer's loop control-flow utilities, used by
33 drivers (1) and (2).
34 - tree-vect-slp.c - BB vectorization specific analysis and transformation,
35 used by drivers (2) and (3).
36 - tree-vect-stmts.c - statements analysis and transformation (used by all).
37 - tree-vect-data-refs.c - vectorizer specific data-refs analysis and
38 manipulations (used by all).
39 - tree-vect-patterns.c - vectorizable code patterns detector (used by all)
41 Here's a poor attempt at illustrating that:
43 tree-vectorizer.c:
44 loop_vect() loop_aware_slp() slp_vect()
45 | / \ /
46 | / \ /
47 tree-vect-loop.c tree-vect-slp.c
48 | \ \ / / |
49 | \ \/ / |
50 | \ /\ / |
51 | \ / \ / |
52 tree-vect-stmts.c tree-vect-data-refs.c
53 \ /
54 tree-vect-patterns.c
57 #include "config.h"
58 #include "system.h"
59 #include "coretypes.h"
60 #include "dumpfile.h"
61 #include "tm.h"
62 #include "hash-set.h"
63 #include "machmode.h"
64 #include "vec.h"
65 #include "double-int.h"
66 #include "input.h"
67 #include "alias.h"
68 #include "symtab.h"
69 #include "wide-int.h"
70 #include "inchash.h"
71 #include "tree.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "tree-pretty-print.h"
75 #include "predict.h"
76 #include "hard-reg-set.h"
77 #include "input.h"
78 #include "function.h"
79 #include "dominance.h"
80 #include "cfg.h"
81 #include "basic-block.h"
82 #include "tree-ssa-alias.h"
83 #include "internal-fn.h"
84 #include "gimple-expr.h"
85 #include "is-a.h"
86 #include "gimple.h"
87 #include "gimple-iterator.h"
88 #include "gimple-walk.h"
89 #include "gimple-ssa.h"
90 #include "hash-map.h"
91 #include "plugin-api.h"
92 #include "ipa-ref.h"
93 #include "cgraph.h"
94 #include "tree-phinodes.h"
95 #include "ssa-iterators.h"
96 #include "tree-ssa-loop-manip.h"
97 #include "tree-cfg.h"
98 #include "cfgloop.h"
99 #include "tree-vectorizer.h"
100 #include "tree-pass.h"
101 #include "tree-ssa-propagate.h"
102 #include "dbgcnt.h"
103 #include "gimple-fold.h"
104 #include "tree-scalar-evolution.h"
107 /* Loop or bb location. */
108 source_location vect_location;
110 /* Vector mapping GIMPLE stmt to stmt_vec_info. */
111 vec<vec_void_p> stmt_vec_info_vec;
113 /* For mapping simduid to vectorization factor. */
115 struct simduid_to_vf : typed_free_remove<simduid_to_vf>
117 unsigned int simduid;
118 int vf;
120 /* hash_table support. */
121 typedef simduid_to_vf value_type;
122 typedef simduid_to_vf compare_type;
123 static inline hashval_t hash (const value_type *);
124 static inline int equal (const value_type *, const compare_type *);
127 inline hashval_t
128 simduid_to_vf::hash (const value_type *p)
130 return p->simduid;
133 inline int
134 simduid_to_vf::equal (const value_type *p1, const value_type *p2)
136 return p1->simduid == p2->simduid;
139 /* This hash maps the OMP simd array to the corresponding simduid used
140 to index into it. Like thus,
142 _7 = GOMP_SIMD_LANE (simduid.0)
145 D.1737[_7] = stuff;
148 This hash maps from the OMP simd array (D.1737[]) to DECL_UID of
149 simduid.0. */
151 struct simd_array_to_simduid : typed_free_remove<simd_array_to_simduid>
153 tree decl;
154 unsigned int simduid;
156 /* hash_table support. */
157 typedef simd_array_to_simduid value_type;
158 typedef simd_array_to_simduid compare_type;
159 static inline hashval_t hash (const value_type *);
160 static inline int equal (const value_type *, const compare_type *);
163 inline hashval_t
164 simd_array_to_simduid::hash (const value_type *p)
166 return DECL_UID (p->decl);
169 inline int
170 simd_array_to_simduid::equal (const value_type *p1, const value_type *p2)
172 return p1->decl == p2->decl;
175 /* Fold IFN_GOMP_SIMD_LANE, IFN_GOMP_SIMD_VF and IFN_GOMP_SIMD_LAST_LANE
176 into their corresponding constants. */
178 static void
179 adjust_simduid_builtins (hash_table<simduid_to_vf> **htab)
181 basic_block bb;
183 FOR_EACH_BB_FN (bb, cfun)
185 gimple_stmt_iterator i;
187 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
189 unsigned int vf = 1;
190 enum internal_fn ifn;
191 gimple stmt = gsi_stmt (i);
192 tree t;
193 if (!is_gimple_call (stmt)
194 || !gimple_call_internal_p (stmt))
195 continue;
196 ifn = gimple_call_internal_fn (stmt);
197 switch (ifn)
199 case IFN_GOMP_SIMD_LANE:
200 case IFN_GOMP_SIMD_VF:
201 case IFN_GOMP_SIMD_LAST_LANE:
202 break;
203 default:
204 continue;
206 tree arg = gimple_call_arg (stmt, 0);
207 gcc_assert (arg != NULL_TREE);
208 gcc_assert (TREE_CODE (arg) == SSA_NAME);
209 simduid_to_vf *p = NULL, data;
210 data.simduid = DECL_UID (SSA_NAME_VAR (arg));
211 if (*htab)
212 p = (*htab)->find (&data);
213 if (p)
214 vf = p->vf;
215 switch (ifn)
217 case IFN_GOMP_SIMD_VF:
218 t = build_int_cst (unsigned_type_node, vf);
219 break;
220 case IFN_GOMP_SIMD_LANE:
221 t = build_int_cst (unsigned_type_node, 0);
222 break;
223 case IFN_GOMP_SIMD_LAST_LANE:
224 t = gimple_call_arg (stmt, 1);
225 break;
226 default:
227 gcc_unreachable ();
229 update_call_from_tree (&i, t);
234 /* Helper structure for note_simd_array_uses. */
236 struct note_simd_array_uses_struct
238 hash_table<simd_array_to_simduid> **htab;
239 unsigned int simduid;
242 /* Callback for note_simd_array_uses, called through walk_gimple_op. */
244 static tree
245 note_simd_array_uses_cb (tree *tp, int *walk_subtrees, void *data)
247 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
248 struct note_simd_array_uses_struct *ns
249 = (struct note_simd_array_uses_struct *) wi->info;
251 if (TYPE_P (*tp))
252 *walk_subtrees = 0;
253 else if (VAR_P (*tp)
254 && lookup_attribute ("omp simd array", DECL_ATTRIBUTES (*tp))
255 && DECL_CONTEXT (*tp) == current_function_decl)
257 simd_array_to_simduid data;
258 if (!*ns->htab)
259 *ns->htab = new hash_table<simd_array_to_simduid> (15);
260 data.decl = *tp;
261 data.simduid = ns->simduid;
262 simd_array_to_simduid **slot = (*ns->htab)->find_slot (&data, INSERT);
263 if (*slot == NULL)
265 simd_array_to_simduid *p = XNEW (simd_array_to_simduid);
266 *p = data;
267 *slot = p;
269 else if ((*slot)->simduid != ns->simduid)
270 (*slot)->simduid = -1U;
271 *walk_subtrees = 0;
273 return NULL_TREE;
276 /* Find "omp simd array" temporaries and map them to corresponding
277 simduid. */
279 static void
280 note_simd_array_uses (hash_table<simd_array_to_simduid> **htab)
282 basic_block bb;
283 gimple_stmt_iterator gsi;
284 struct walk_stmt_info wi;
285 struct note_simd_array_uses_struct ns;
287 memset (&wi, 0, sizeof (wi));
288 wi.info = &ns;
289 ns.htab = htab;
291 FOR_EACH_BB_FN (bb, cfun)
292 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
294 gimple stmt = gsi_stmt (gsi);
295 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
296 continue;
297 switch (gimple_call_internal_fn (stmt))
299 case IFN_GOMP_SIMD_LANE:
300 case IFN_GOMP_SIMD_VF:
301 case IFN_GOMP_SIMD_LAST_LANE:
302 break;
303 default:
304 continue;
306 tree lhs = gimple_call_lhs (stmt);
307 if (lhs == NULL_TREE)
308 continue;
309 imm_use_iterator use_iter;
310 gimple use_stmt;
311 ns.simduid = DECL_UID (SSA_NAME_VAR (gimple_call_arg (stmt, 0)));
312 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, lhs)
313 if (!is_gimple_debug (use_stmt))
314 walk_gimple_op (use_stmt, note_simd_array_uses_cb, &wi);
318 /* A helper function to free data refs. */
320 void
321 vect_destroy_datarefs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
323 vec<data_reference_p> datarefs;
324 struct data_reference *dr;
325 unsigned int i;
327 if (loop_vinfo)
328 datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
329 else
330 datarefs = BB_VINFO_DATAREFS (bb_vinfo);
332 FOR_EACH_VEC_ELT (datarefs, i, dr)
333 if (dr->aux)
335 free (dr->aux);
336 dr->aux = NULL;
339 free_data_refs (datarefs);
343 /* If LOOP has been versioned during ifcvt, return the internal call
344 guarding it. */
346 static gimple
347 vect_loop_vectorized_call (struct loop *loop)
349 basic_block bb = loop_preheader_edge (loop)->src;
350 gimple g;
353 g = last_stmt (bb);
354 if (g)
355 break;
356 if (!single_pred_p (bb))
357 break;
358 bb = single_pred (bb);
360 while (1);
361 if (g && gimple_code (g) == GIMPLE_COND)
363 gimple_stmt_iterator gsi = gsi_for_stmt (g);
364 gsi_prev (&gsi);
365 if (!gsi_end_p (gsi))
367 g = gsi_stmt (gsi);
368 if (is_gimple_call (g)
369 && gimple_call_internal_p (g)
370 && gimple_call_internal_fn (g) == IFN_LOOP_VECTORIZED
371 && (tree_to_shwi (gimple_call_arg (g, 0)) == loop->num
372 || tree_to_shwi (gimple_call_arg (g, 1)) == loop->num))
373 return g;
376 return NULL;
379 /* Fold LOOP_VECTORIZED internal call G to VALUE and
380 update any immediate uses of it's LHS. */
382 static void
383 fold_loop_vectorized_call (gimple g, tree value)
385 tree lhs = gimple_call_lhs (g);
386 use_operand_p use_p;
387 imm_use_iterator iter;
388 gimple use_stmt;
389 gimple_stmt_iterator gsi = gsi_for_stmt (g);
391 update_call_from_tree (&gsi, value);
392 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
394 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
395 SET_USE (use_p, value);
396 update_stmt (use_stmt);
400 /* Function vectorize_loops.
402 Entry point to loop vectorization phase. */
404 unsigned
405 vectorize_loops (void)
407 unsigned int i;
408 unsigned int num_vectorized_loops = 0;
409 unsigned int vect_loops_num;
410 struct loop *loop;
411 hash_table<simduid_to_vf> *simduid_to_vf_htab = NULL;
412 hash_table<simd_array_to_simduid> *simd_array_to_simduid_htab = NULL;
413 bool any_ifcvt_loops = false;
414 unsigned ret = 0;
416 vect_loops_num = number_of_loops (cfun);
418 /* Bail out if there are no loops. */
419 if (vect_loops_num <= 1)
421 if (cfun->has_simduid_loops)
422 adjust_simduid_builtins (&simduid_to_vf_htab);
423 return 0;
426 if (cfun->has_simduid_loops)
427 note_simd_array_uses (&simd_array_to_simduid_htab);
429 init_stmt_vec_info_vec ();
431 /* ----------- Analyze loops. ----------- */
433 /* If some loop was duplicated, it gets bigger number
434 than all previously defined loops. This fact allows us to run
435 only over initial loops skipping newly generated ones. */
436 FOR_EACH_LOOP (loop, 0)
437 if (loop->dont_vectorize)
438 any_ifcvt_loops = true;
439 else if ((flag_tree_loop_vectorize
440 && optimize_loop_nest_for_speed_p (loop))
441 || loop->force_vectorize)
443 loop_vec_info loop_vinfo;
444 vect_location = find_loop_location (loop);
445 if (LOCATION_LOCUS (vect_location) != UNKNOWN_LOCATION
446 && dump_enabled_p ())
447 dump_printf (MSG_NOTE, "\nAnalyzing loop at %s:%d\n",
448 LOCATION_FILE (vect_location),
449 LOCATION_LINE (vect_location));
451 loop_vinfo = vect_analyze_loop (loop);
452 loop->aux = loop_vinfo;
454 if (!loop_vinfo || !LOOP_VINFO_VECTORIZABLE_P (loop_vinfo))
455 continue;
457 if (!dbg_cnt (vect_loop))
458 break;
460 gimple loop_vectorized_call = vect_loop_vectorized_call (loop);
461 if (loop_vectorized_call)
463 tree arg = gimple_call_arg (loop_vectorized_call, 1);
464 basic_block *bbs;
465 unsigned int i;
466 struct loop *scalar_loop = get_loop (cfun, tree_to_shwi (arg));
468 LOOP_VINFO_SCALAR_LOOP (loop_vinfo) = scalar_loop;
469 gcc_checking_assert (vect_loop_vectorized_call
470 (LOOP_VINFO_SCALAR_LOOP (loop_vinfo))
471 == loop_vectorized_call);
472 bbs = get_loop_body (scalar_loop);
473 for (i = 0; i < scalar_loop->num_nodes; i++)
475 basic_block bb = bbs[i];
476 gimple_stmt_iterator gsi;
477 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
478 gsi_next (&gsi))
480 gimple phi = gsi_stmt (gsi);
481 gimple_set_uid (phi, 0);
483 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
484 gsi_next (&gsi))
486 gimple stmt = gsi_stmt (gsi);
487 gimple_set_uid (stmt, 0);
490 free (bbs);
493 if (LOCATION_LOCUS (vect_location) != UNKNOWN_LOCATION
494 && dump_enabled_p ())
495 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, vect_location,
496 "loop vectorized\n");
497 vect_transform_loop (loop_vinfo);
498 num_vectorized_loops++;
499 /* Now that the loop has been vectorized, allow it to be unrolled
500 etc. */
501 loop->force_vectorize = false;
503 if (loop->simduid)
505 simduid_to_vf *simduid_to_vf_data = XNEW (simduid_to_vf);
506 if (!simduid_to_vf_htab)
507 simduid_to_vf_htab = new hash_table<simduid_to_vf> (15);
508 simduid_to_vf_data->simduid = DECL_UID (loop->simduid);
509 simduid_to_vf_data->vf = loop_vinfo->vectorization_factor;
510 *simduid_to_vf_htab->find_slot (simduid_to_vf_data, INSERT)
511 = simduid_to_vf_data;
514 if (loop_vectorized_call)
516 fold_loop_vectorized_call (loop_vectorized_call, boolean_true_node);
517 ret |= TODO_cleanup_cfg;
521 vect_location = UNKNOWN_LOCATION;
523 statistics_counter_event (cfun, "Vectorized loops", num_vectorized_loops);
524 if (dump_enabled_p ()
525 || (num_vectorized_loops > 0 && dump_enabled_p ()))
526 dump_printf_loc (MSG_NOTE, vect_location,
527 "vectorized %u loops in function.\n",
528 num_vectorized_loops);
530 /* ----------- Finalize. ----------- */
532 if (any_ifcvt_loops)
533 for (i = 1; i < vect_loops_num; i++)
535 loop = get_loop (cfun, i);
536 if (loop && loop->dont_vectorize)
538 gimple g = vect_loop_vectorized_call (loop);
539 if (g)
541 fold_loop_vectorized_call (g, boolean_false_node);
542 ret |= TODO_cleanup_cfg;
547 for (i = 1; i < vect_loops_num; i++)
549 loop_vec_info loop_vinfo;
551 loop = get_loop (cfun, i);
552 if (!loop)
553 continue;
554 loop_vinfo = (loop_vec_info) loop->aux;
555 destroy_loop_vec_info (loop_vinfo, true);
556 loop->aux = NULL;
559 free_stmt_vec_info_vec ();
561 /* Fold IFN_GOMP_SIMD_{VF,LANE,LAST_LANE} builtins. */
562 if (cfun->has_simduid_loops)
563 adjust_simduid_builtins (&simduid_to_vf_htab);
565 /* Shrink any "omp array simd" temporary arrays to the
566 actual vectorization factors. */
567 if (simd_array_to_simduid_htab)
569 for (hash_table<simd_array_to_simduid>::iterator iter
570 = simd_array_to_simduid_htab->begin ();
571 iter != simd_array_to_simduid_htab->end (); ++iter)
572 if ((*iter)->simduid != -1U)
574 tree decl = (*iter)->decl;
575 int vf = 1;
576 if (simduid_to_vf_htab)
578 simduid_to_vf *p = NULL, data;
579 data.simduid = (*iter)->simduid;
580 p = simduid_to_vf_htab->find (&data);
581 if (p)
582 vf = p->vf;
584 tree atype
585 = build_array_type_nelts (TREE_TYPE (TREE_TYPE (decl)), vf);
586 TREE_TYPE (decl) = atype;
587 relayout_decl (decl);
590 delete simd_array_to_simduid_htab;
592 delete simduid_to_vf_htab;
593 simduid_to_vf_htab = NULL;
595 if (num_vectorized_loops > 0)
597 /* If we vectorized any loop only virtual SSA form needs to be updated.
598 ??? Also while we try hard to update loop-closed SSA form we fail
599 to properly do this in some corner-cases (see PR56286). */
600 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa_only_virtuals);
601 return TODO_cleanup_cfg;
604 return ret;
608 /* Entry point to basic block SLP phase. */
610 namespace {
612 const pass_data pass_data_slp_vectorize =
614 GIMPLE_PASS, /* type */
615 "slp", /* name */
616 OPTGROUP_LOOP | OPTGROUP_VEC, /* optinfo_flags */
617 TV_TREE_SLP_VECTORIZATION, /* tv_id */
618 ( PROP_ssa | PROP_cfg ), /* properties_required */
619 0, /* properties_provided */
620 0, /* properties_destroyed */
621 0, /* todo_flags_start */
622 TODO_update_ssa, /* todo_flags_finish */
625 class pass_slp_vectorize : public gimple_opt_pass
627 public:
628 pass_slp_vectorize (gcc::context *ctxt)
629 : gimple_opt_pass (pass_data_slp_vectorize, ctxt)
632 /* opt_pass methods: */
633 opt_pass * clone () { return new pass_slp_vectorize (m_ctxt); }
634 virtual bool gate (function *) { return flag_tree_slp_vectorize != 0; }
635 virtual unsigned int execute (function *);
637 }; // class pass_slp_vectorize
639 unsigned int
640 pass_slp_vectorize::execute (function *fun)
642 basic_block bb;
644 bool in_loop_pipeline = scev_initialized_p ();
645 if (!in_loop_pipeline)
647 loop_optimizer_init (LOOPS_NORMAL);
648 scev_initialize ();
651 init_stmt_vec_info_vec ();
653 FOR_EACH_BB_FN (bb, fun)
655 vect_location = find_bb_location (bb);
657 if (vect_slp_analyze_bb (bb))
659 if (!dbg_cnt (vect_slp))
660 break;
662 vect_slp_transform_bb (bb);
663 if (dump_enabled_p ())
664 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, vect_location,
665 "basic block vectorized\n");
669 free_stmt_vec_info_vec ();
671 if (!in_loop_pipeline)
673 scev_finalize ();
674 loop_optimizer_finalize ();
677 return 0;
680 } // anon namespace
682 gimple_opt_pass *
683 make_pass_slp_vectorize (gcc::context *ctxt)
685 return new pass_slp_vectorize (ctxt);
689 /* Increase alignment of global arrays to improve vectorization potential.
690 TODO:
691 - Consider also structs that have an array field.
692 - Use ipa analysis to prune arrays that can't be vectorized?
693 This should involve global alignment analysis and in the future also
694 array padding. */
696 static unsigned int
697 increase_alignment (void)
699 varpool_node *vnode;
701 vect_location = UNKNOWN_LOCATION;
703 /* Increase the alignment of all global arrays for vectorization. */
704 FOR_EACH_DEFINED_VARIABLE (vnode)
706 tree vectype, decl = vnode->decl;
707 tree t;
708 unsigned int alignment;
710 t = TREE_TYPE (decl);
711 if (TREE_CODE (t) != ARRAY_TYPE)
712 continue;
713 vectype = get_vectype_for_scalar_type (strip_array_types (t));
714 if (!vectype)
715 continue;
716 alignment = TYPE_ALIGN (vectype);
717 if (DECL_ALIGN (decl) >= alignment)
718 continue;
720 if (vect_can_force_dr_alignment_p (decl, alignment))
722 vnode->increase_alignment (TYPE_ALIGN (vectype));
723 dump_printf (MSG_NOTE, "Increasing alignment of decl: ");
724 dump_generic_expr (MSG_NOTE, TDF_SLIM, decl);
725 dump_printf (MSG_NOTE, "\n");
728 return 0;
732 namespace {
734 const pass_data pass_data_ipa_increase_alignment =
736 SIMPLE_IPA_PASS, /* type */
737 "increase_alignment", /* name */
738 OPTGROUP_LOOP | OPTGROUP_VEC, /* optinfo_flags */
739 TV_IPA_OPT, /* tv_id */
740 0, /* properties_required */
741 0, /* properties_provided */
742 0, /* properties_destroyed */
743 0, /* todo_flags_start */
744 0, /* todo_flags_finish */
747 class pass_ipa_increase_alignment : public simple_ipa_opt_pass
749 public:
750 pass_ipa_increase_alignment (gcc::context *ctxt)
751 : simple_ipa_opt_pass (pass_data_ipa_increase_alignment, ctxt)
754 /* opt_pass methods: */
755 virtual bool gate (function *)
757 return flag_section_anchors && flag_tree_loop_vectorize;
760 virtual unsigned int execute (function *) { return increase_alignment (); }
762 }; // class pass_ipa_increase_alignment
764 } // anon namespace
766 simple_ipa_opt_pass *
767 make_pass_ipa_increase_alignment (gcc::context *ctxt)
769 return new pass_ipa_increase_alignment (ctxt);