219
224
if (busy_blocks < 1.0)
220
225
busy_blocks= 1.0;
222
cost->setIOCount(busy_blocks);
227
cost->io_count= busy_blocks;
224
229
if (! interrupted)
226
231
/* Assume reading is done in one 'sweep' */
227
cost->setAvgIOCost((DISK_SEEK_BASE_COST +
228
DISK_SEEK_PROP_COST*n_blocks/busy_blocks));
232
cost->avg_io_cost= (DISK_SEEK_BASE_COST +
233
DISK_SEEK_PROP_COST*n_blocks/busy_blocks);
233
static optimizer::SEL_TREE * get_mm_parts(optimizer::RangeParameter *param,
241
class SEL_TREE :public memory::SqlAlloc
245
Starting an effort to document this field:
246
(for some i, keys[i]->type == optimizer::SEL_ARG::IMPOSSIBLE) =>
247
(type == SEL_TREE::IMPOSSIBLE)
258
SEL_TREE(enum Type type_arg) :type(type_arg) {}
259
SEL_TREE() :type(KEY)
262
memset(keys, 0, sizeof(keys));
265
Note: there may exist SEL_TREE objects with sel_tree->type=KEY and
266
keys[i]=0 for all i. (SergeyP: it is not clear whether there is any
267
merit in range analyzer functions (e.g. get_mm_parts) returning a
268
pointer to such SEL_TREE instead of NULL)
270
optimizer::SEL_ARG *keys[MAX_KEY];
271
key_map keys_map; /* bitmask of non-NULL elements in keys */
274
Possible ways to read rows using index_merge. The list is non-empty only
275
if type==KEY. Currently can be non empty only if keys_map.none().
277
List<SEL_IMERGE> merges;
279
/* The members below are filled/used only after get_mm_tree is done */
280
key_map ror_scans_map; /* bitmask of ROR scan-able elements in keys */
281
uint32_t n_ror_scans; /* number of set bits in ror_scans_map */
283
struct st_ror_scan_info **ror_scans; /* list of ROR key scans */
284
struct st_ror_scan_info **ror_scans_end; /* last ROR scan */
285
/* Note that #records for each key scan is stored in table->quick_rows */
288
struct st_ror_scan_info;
290
static SEL_TREE * get_mm_parts(optimizer::RangeParameter *param,
236
293
Item_func::Functype type,
244
301
Item_func::Functype type,
247
static optimizer::SEL_TREE *get_mm_tree(optimizer::RangeParameter *param, COND *cond);
304
static SEL_TREE *get_mm_tree(optimizer::RangeParameter *param, COND *cond);
249
306
static bool is_key_scan_ror(optimizer::Parameter *param, uint32_t keynr, uint8_t nparts);
251
static ha_rows check_quick_select(Session *session,
252
optimizer::Parameter *param,
308
static ha_rows check_quick_select(optimizer::Parameter *param,
255
311
optimizer::SEL_ARG *tree,
256
312
bool update_tbl_stats,
257
313
uint32_t *mrr_flags,
258
314
uint32_t *bufsize,
259
optimizer::CostVector *cost);
261
static optimizer::RangeReadPlan *get_key_scans_params(Session *session,
262
optimizer::Parameter *param,
263
optimizer::SEL_TREE *tree,
317
static optimizer::RangeReadPlan *get_key_scans_params(optimizer::Parameter *param,
264
319
bool index_read_must_be_used,
265
320
bool update_tbl_stats,
266
321
double read_time);
269
324
optimizer::RorIntersectReadPlan *get_best_ror_intersect(const optimizer::Parameter *param,
270
optimizer::SEL_TREE *tree,
271
326
double read_time,
272
327
bool *are_all_covering);
275
330
optimizer::RorIntersectReadPlan *get_best_covering_ror_intersect(optimizer::Parameter *param,
276
optimizer::SEL_TREE *tree,
277
332
double read_time);
280
optimizer::TableReadPlan *get_best_disjunct_quick(Session *session,
281
optimizer::Parameter *param,
282
optimizer::SEL_IMERGE *imerge,
335
optimizer::TableReadPlan *get_best_disjunct_quick(optimizer::Parameter *param,
283
337
double read_time);
286
optimizer::GroupMinMaxReadPlan *get_best_group_min_max(optimizer::Parameter *param, optimizer::SEL_TREE *tree);
288
static optimizer::SEL_TREE *tree_and(optimizer::RangeParameter *param,
289
optimizer::SEL_TREE *tree1,
290
optimizer::SEL_TREE *tree2);
340
optimizer::GroupMinMaxReadPlan *get_best_group_min_max(optimizer::Parameter *param, SEL_TREE *tree);
342
static void print_sel_tree(optimizer::Parameter *param,
347
static void print_ror_scans_arr(Table *table,
349
struct st_ror_scan_info **start,
350
struct st_ror_scan_info **end);
352
static SEL_TREE *tree_and(optimizer::RangeParameter *param, SEL_TREE *tree1, SEL_TREE *tree2);
354
static SEL_TREE *tree_or(optimizer::RangeParameter *param, SEL_TREE *tree1, SEL_TREE *tree2);
292
356
static optimizer::SEL_ARG *sel_add(optimizer::SEL_ARG *key1, optimizer::SEL_ARG *key2);
358
static optimizer::SEL_ARG *key_or(optimizer::RangeParameter *param, optimizer::SEL_ARG *key1, optimizer::SEL_ARG *key2);
294
360
static optimizer::SEL_ARG *key_and(optimizer::RangeParameter *param,
295
361
optimizer::SEL_ARG *key1,
296
362
optimizer::SEL_ARG *key2,
299
365
static bool get_range(optimizer::SEL_ARG **e1, optimizer::SEL_ARG **e2, optimizer::SEL_ARG *root1);
367
static bool eq_tree(optimizer::SEL_ARG* a, optimizer::SEL_ARG *b);
301
369
optimizer::SEL_ARG optimizer::null_element(optimizer::SEL_ARG::IMPOSSIBLE);
303
371
static bool null_part_in_key(KEY_PART *key_part,
304
372
const unsigned char *key,
305
373
uint32_t length);
307
bool sel_trees_can_be_ored(optimizer::SEL_TREE *tree1,
308
optimizer::SEL_TREE *tree2,
309
optimizer::RangeParameter *param);
375
bool sel_trees_can_be_ored(SEL_TREE *tree1, SEL_TREE *tree2, optimizer::RangeParameter *param);
379
SEL_IMERGE is a list of possible ways to do index merge, i.e. it is
380
a condition in the following form:
381
(t_1||t_2||...||t_N) && (next)
383
where all t_i are SEL_TREEs, next is another SEL_IMERGE and no pair
384
(t_i,t_j) contains SEL_ARGS for the same index.
386
SEL_TREE contained in SEL_IMERGE always has merges=NULL.
388
This class relies on memory manager to do the cleanup.
391
class SEL_IMERGE : public memory::SqlAlloc
393
enum { PREALLOCED_TREES= 10};
395
SEL_TREE *trees_prealloced[PREALLOCED_TREES];
396
SEL_TREE **trees; /* trees used to do index_merge */
397
SEL_TREE **trees_next; /* last of these trees */
398
SEL_TREE **trees_end; /* end of allocated space */
400
optimizer::SEL_ARG ***best_keys; /* best keys to read in SEL_TREEs */
403
trees(&trees_prealloced[0]),
405
trees_end(trees + PREALLOCED_TREES)
407
int or_sel_tree(optimizer::RangeParameter *param, SEL_TREE *tree);
408
int or_sel_tree_with_checks(optimizer::RangeParameter *param, SEL_TREE *new_tree);
409
int or_sel_imerge_with_checks(optimizer::RangeParameter *param, SEL_IMERGE* imerge);
414
Add SEL_TREE to this index_merge without any checks,
417
This function implements the following:
418
(x_1||...||x_N) || t = (x_1||...||x_N||t), where x_i, t are SEL_TREEs
424
int SEL_IMERGE::or_sel_tree(optimizer::RangeParameter *param, SEL_TREE *tree)
426
if (trees_next == trees_end)
428
const int realloc_ratio= 2; /* Double size for next round */
429
uint32_t old_elements= (trees_end - trees);
430
uint32_t old_size= sizeof(SEL_TREE**) * old_elements;
431
uint32_t new_size= old_size * realloc_ratio;
432
SEL_TREE **new_trees;
433
if (!(new_trees= (SEL_TREE**)alloc_root(param->mem_root, new_size)))
435
memcpy(new_trees, trees, old_size);
437
trees_next= trees + old_elements;
438
trees_end= trees + old_elements * realloc_ratio;
440
*(trees_next++)= tree;
446
Perform OR operation on this SEL_IMERGE and supplied SEL_TREE new_tree,
447
combining new_tree with one of the trees in this SEL_IMERGE if they both
448
have SEL_ARGs for the same key.
451
or_sel_tree_with_checks()
452
param Parameter from SqlSelect::test_quick_select
453
new_tree SEL_TREE with type KEY or KEY_SMALLER.
456
This does the following:
457
(t_1||...||t_k)||new_tree =
459
= (t_1||...||t_k||new_tree)
461
= (t_1||....||(t_j|| new_tree)||...||t_k),
463
where t_i, y are SEL_TREEs.
464
new_tree is combined with the first t_j it has a SEL_ARG on common
465
key with. As a consequence of this, choice of keys to do index_merge
466
read may depend on the order of conditions in WHERE part of the query.
470
1 One of the trees was combined with new_tree to SEL_TREE::ALWAYS,
471
and (*this) should be discarded.
472
-1 An error occurred.
474
int SEL_IMERGE::or_sel_tree_with_checks(optimizer::RangeParameter *param, SEL_TREE *new_tree)
476
for (SEL_TREE** tree = trees;
480
if (sel_trees_can_be_ored(*tree, new_tree, param))
482
*tree = tree_or(param, *tree, new_tree);
485
if (((*tree)->type == SEL_TREE::MAYBE) ||
486
((*tree)->type == SEL_TREE::ALWAYS))
488
/* SEL_TREE::IMPOSSIBLE is impossible here */
493
/* New tree cannot be combined with any of existing trees. */
494
return or_sel_tree(param, new_tree);
499
Perform OR operation on this index_merge and supplied index_merge list.
503
1 - One of conditions in result is always true and this SEL_IMERGE
505
-1 - An error occurred
508
int SEL_IMERGE::or_sel_imerge_with_checks(optimizer::RangeParameter *param, SEL_IMERGE* imerge)
510
for (SEL_TREE** tree= imerge->trees;
511
tree != imerge->trees_next;
514
if (or_sel_tree_with_checks(param, *tree))
317
522
Perform AND operation on two index_merge lists and store result in *im1.
320
inline void imerge_list_and_list(List<optimizer::SEL_IMERGE> *im1, List<optimizer::SEL_IMERGE> *im2)
525
inline void imerge_list_and_list(List<SEL_IMERGE> *im1, List<SEL_IMERGE> *im2)
322
527
im1->concat(im2);
532
Perform OR operation on 2 index_merge lists, storing result in first list.
535
The following conversion is implemented:
536
(a_1 &&...&& a_N)||(b_1 &&...&& b_K) = AND_i,j(a_i || b_j) =>
539
i.e. all conjuncts except the first one are currently dropped.
540
This is done to avoid producing N*K ways to do index_merge.
542
If (a_1||b_1) produce a condition that is always true, NULL is returned
543
and index_merge is discarded (while it is actually possible to try
546
As a consequence of this, choice of keys to do index_merge read may depend
547
on the order of conditions in WHERE part of the query.
550
0 OK, result is stored in *im1
551
other Error, both passed lists are unusable
554
static int imerge_list_or_list(optimizer::RangeParameter *param,
555
List<SEL_IMERGE> *im1,
556
List<SEL_IMERGE> *im2)
558
SEL_IMERGE *imerge= im1->head();
560
im1->push_back(imerge);
562
return imerge->or_sel_imerge_with_checks(param, im2->head());
567
Perform OR operation on index_merge list and key tree.
570
0 OK, result is stored in *im1.
574
static int imerge_list_or_tree(optimizer::RangeParameter *param,
575
List<SEL_IMERGE> *im1,
579
List_iterator<SEL_IMERGE> it(*im1);
580
while ((imerge= it++))
582
if (imerge->or_sel_tree_with_checks(param, tree))
585
return im1->is_empty();
326
589
/***************************************************************************
327
590
** Basic functions for SqlSelect and QuickRangeSelect
328
591
***************************************************************************/
543
802
static int fill_used_fields_bitmap(optimizer::Parameter *param)
545
804
Table *table= param->table;
547
param->tmp_covered_fields.clear();
548
param->needed_fields.resize(table->getShare()->sizeFields());
549
param->needed_fields.reset();
551
param->needed_fields|= *table->read_set;
552
param->needed_fields|= *table->write_set;
554
pk= param->table->getShare()->getPrimaryKey();
807
param->tmp_covered_fields.setBitmap(0);
808
param->fields_bitmap_size= table->s->column_bitmap_size;
809
if (!(tmp= (my_bitmap_map*) alloc_root(param->mem_root,
810
param->fields_bitmap_size)) ||
811
param->needed_fields.init(tmp, table->s->fields))
814
param->needed_fields= *table->read_set;
815
bitmap_union(¶m->needed_fields, table->write_set);
817
pk= param->table->s->primary_key;
555
818
if (pk != MAX_KEY && param->table->cursor->primary_key_is_clustered())
557
820
/* The table uses clustered PK and it is not internally generated */
558
KeyPartInfo *key_part= param->table->key_info[pk].key_part;
559
KeyPartInfo *key_part_end= key_part +
821
KEY_PART_INFO *key_part= param->table->key_info[pk].key_part;
822
KEY_PART_INFO *key_part_end= key_part +
560
823
param->table->key_info[pk].key_parts;
561
824
for (;key_part != key_part_end; ++key_part)
562
param->needed_fields.reset(key_part->fieldnr-1);
825
param->needed_fields.clearBit(key_part->fieldnr-1);
1218
1496
ror_scan->sel_arg= sel_arg;
1219
1497
ror_scan->records= param->table->quick_rows[keynr];
1221
ror_scan->covered_fields_size= param->table->getShare()->sizeFields();
1222
boost::dynamic_bitset<> tmp_bitset(param->table->getShare()->sizeFields());
1225
KeyPartInfo *key_part= param->table->key_info[keynr].key_part;
1226
KeyPartInfo *key_part_end= key_part +
1499
if (!(bitmap_buf= (my_bitmap_map*) alloc_root(param->mem_root,
1500
param->fields_bitmap_size)))
1503
if (ror_scan->covered_fields.init(bitmap_buf,
1504
param->table->s->fields))
1506
ror_scan->covered_fields.clearAll();
1508
KEY_PART_INFO *key_part= param->table->key_info[keynr].key_part;
1509
KEY_PART_INFO *key_part_end= key_part +
1227
1510
param->table->key_info[keynr].key_parts;
1228
for (; key_part != key_part_end; ++key_part)
1511
for (;key_part != key_part_end; ++key_part)
1230
if (param->needed_fields.test(key_part->fieldnr-1))
1231
tmp_bitset.set(key_part->fieldnr-1);
1513
if (param->needed_fields.isBitSet(key_part->fieldnr-1))
1514
ror_scan->covered_fields.setBit(key_part->fieldnr-1);
1233
1516
double rows= rows2double(param->table->quick_rows[ror_scan->keynr]);
1234
1517
ror_scan->index_read_cost=
1235
1518
param->table->cursor->index_only_read_time(ror_scan->keynr, rows);
1236
ror_scan->covered_fields= tmp_bitset.to_ulong();
1242
Compare two optimizer::RorScanInfo** by E(#records_matched) * key_record_length.
1524
Compare two ROR_SCAN_INFO** by E(#records_matched) * key_record_length.
1244
1526
cmp_ror_scan_info()
1245
1527
a ptr to first compared value
1602
Get best covering ROR-intersection.
1604
get_best_covering_ror_intersect()
1605
param Parameter from test_quick_select function.
1606
tree optimizer::SEL_TREE with sets of intervals for different keys.
1607
read_time Don't return table read plans with cost > read_time.
1610
Best covering ROR-intersection plan
1611
NULL if no plan found.
1614
get_best_ror_intersect must be called for a tree before calling this
1616
This function invalidates tree->ror_scans member values.
1618
The following approximate algorithm is used:
1619
I=set of all covering indexes
1620
F=set of all fields to cover
1625
Order I by (#covered fields in F desc,
1627
number of first not covered component asc);
1628
F=F-covered by first(I);
1631
} while F is not empty.
1635
optimizer::RorIntersectReadPlan *get_best_covering_ror_intersect(optimizer::Parameter *param,
1636
optimizer::SEL_TREE *tree,
1639
optimizer::RorScanInfo **ror_scan_mark;
1640
optimizer::RorScanInfo **ror_scans_end= tree->ror_scans_end;
1642
for (optimizer::RorScanInfo **scan= tree->ror_scans; scan != ror_scans_end; ++scan)
1643
(*scan)->key_components=
1644
param->table->key_info[(*scan)->keynr].key_parts;
1647
Run covering-ROR-search algorithm.
1648
Assume set I is [ror_scan .. ror_scans_end)
1651
/*I=set of all covering indexes */
1652
ror_scan_mark= tree->ror_scans;
1654
boost::dynamic_bitset<> *covered_fields= ¶m->tmp_covered_fields;
1655
if (covered_fields->empty())
1657
covered_fields->resize(param->table->getShare()->sizeFields());
1659
covered_fields->reset();
1661
double total_cost= 0.0f;
1668
Update changed sorting info:
1670
number of first not covered component
1671
Calculate and save these values for each of remaining scans.
1673
for (optimizer::RorScanInfo **scan= ror_scan_mark; scan != ror_scans_end; ++scan)
1675
/* subtract one bitset from the other */
1676
(*scan)->subtractBitset(*covered_fields);
1677
(*scan)->used_fields_covered=
1678
(*scan)->getBitCount();
1679
(*scan)->first_uncovered_field= (*scan)->findFirstNotSet();
1682
internal::my_qsort(ror_scan_mark, ror_scans_end-ror_scan_mark,
1683
sizeof(optimizer::RorScanInfo*),
1684
(qsort_cmp)cmp_ror_scan_info_covering);
1687
total_cost += (*ror_scan_mark)->index_read_cost;
1688
records += (*ror_scan_mark)->records;
1689
if (total_cost > read_time)
1691
/* F=F-covered by first(I) */
1692
boost::dynamic_bitset<> tmp_bitset= (*ror_scan_mark)->bitsToBitset();
1693
*covered_fields|= tmp_bitset;
1694
all_covered= param->needed_fields.is_subset_of(*covered_fields);
1695
} while ((++ror_scan_mark < ror_scans_end) && ! all_covered);
1697
if (!all_covered || (ror_scan_mark - tree->ror_scans) == 1)
1701
Ok, [tree->ror_scans .. ror_scan) holds covering index_intersection with
1704
/* Add priority queue use cost. */
1705
total_cost += rows2double(records)*
1706
log((double)(ror_scan_mark - tree->ror_scans)) /
1707
(TIME_FOR_COMPARE_ROWID * M_LN2);
1709
if (total_cost > read_time)
1712
optimizer::RorIntersectReadPlan *trp= NULL;
1713
if (! (trp= new (param->mem_root) optimizer::RorIntersectReadPlan))
1718
uint32_t best_num= (ror_scan_mark - tree->ror_scans);
1719
if (!(trp->first_scan= (optimizer::RorScanInfo**)param->mem_root->alloc_root(sizeof(optimizer::RorScanInfo*)* best_num)))
1721
memcpy(trp->first_scan, tree->ror_scans, best_num*sizeof(optimizer::RorScanInfo*));
1722
trp->last_scan= trp->first_scan + best_num;
1723
trp->is_covering= true;
1724
trp->read_cost= total_cost;
1725
trp->records= records;
1726
trp->cpk_scan= NULL;
1727
set_if_smaller(param->table->quick_condition_rows, records);
1734
1894
Get best ROR-intersection plan using non-covering ROR-intersection search
1735
1895
algorithm. The returned plan may be covering.
1798
1958
optimizer::RorIntersectReadPlan *get_best_ror_intersect(const optimizer::Parameter *param,
1799
optimizer::SEL_TREE *tree,
1800
1960
double read_time,
1801
1961
bool *are_all_covering)
1804
1964
double min_cost= DBL_MAX;
1806
if ((tree->n_ror_scans < 2) || ! param->table->cursor->stats.records)
1966
if ((tree->n_ror_scans < 2) || !param->table->cursor->stats.records)
1810
Step1: Collect ROR-able SEL_ARGs and create optimizer::RorScanInfo for each of
1970
Step1: Collect ROR-able SEL_ARGs and create ROR_SCAN_INFO for each of
1811
1971
them. Also find and save clustered PK scan if there is one.
1813
optimizer::RorScanInfo **cur_ror_scan= NULL;
1814
optimizer::RorScanInfo *cpk_scan= NULL;
1973
ROR_SCAN_INFO **cur_ror_scan;
1974
ROR_SCAN_INFO *cpk_scan= NULL;
1816
1976
bool cpk_scan_used= false;
1818
if (! (tree->ror_scans= (optimizer::RorScanInfo**)param->mem_root->alloc_root(sizeof(optimizer::RorScanInfo*)* param->keys)))
1978
if (!(tree->ror_scans= (ROR_SCAN_INFO**)alloc_root(param->mem_root,
1979
sizeof(ROR_SCAN_INFO*)*
1822
1982
cpk_no= ((param->table->cursor->primary_key_is_clustered()) ?
1823
param->table->getShare()->getPrimaryKey() : MAX_KEY);
1983
param->table->s->primary_key : MAX_KEY);
1825
1985
for (idx= 0, cur_ror_scan= tree->ror_scans; idx < param->keys; idx++)
1827
optimizer::RorScanInfo *scan;
1987
ROR_SCAN_INFO *scan;
1828
1988
if (! tree->ror_scans_map.test(idx))
1830
if (! (scan= make_ror_scan(param, idx, tree->keys[idx])))
1990
if (!(scan= make_ror_scan(param, idx, tree->keys[idx])))
1832
1992
if (param->real_keynr[idx] == cpk_no)
1841
2001
tree->ror_scans_end= cur_ror_scan;
2002
print_ror_scans_arr(param->table, "original",
2004
tree->ror_scans_end);
1843
2006
Ok, [ror_scans, ror_scans_end) is array of ptrs to initialized
1844
optimizer::RorScanInfo's.
1845
2008
Step 2: Get best ROR-intersection using an approximate algorithm.
1847
internal::my_qsort(tree->ror_scans, tree->n_ror_scans, sizeof(optimizer::RorScanInfo*),
2010
internal::my_qsort(tree->ror_scans, tree->n_ror_scans, sizeof(ROR_SCAN_INFO*),
1848
2011
(qsort_cmp)cmp_ror_scan_info);
2012
print_ror_scans_arr(param->table, "ordered",
2014
tree->ror_scans_end);
1850
optimizer::RorScanInfo **intersect_scans= NULL; /* ROR scans used in index intersection */
1851
optimizer::RorScanInfo **intersect_scans_end= NULL;
1852
if (! (intersect_scans= (optimizer::RorScanInfo**)param->mem_root->alloc_root(sizeof(optimizer::RorScanInfo*) * tree->n_ror_scans)))
2016
ROR_SCAN_INFO **intersect_scans; /* ROR scans used in index intersection */
2017
ROR_SCAN_INFO **intersect_scans_end;
2018
if (!(intersect_scans= (ROR_SCAN_INFO**)alloc_root(param->mem_root,
2019
sizeof(ROR_SCAN_INFO*)*
2020
tree->n_ror_scans)))
1854
2022
intersect_scans_end= intersect_scans;
1856
2024
/* Create and incrementally update ROR intersection. */
1857
ROR_INTERSECT_INFO intersect(param);
1858
ROR_INTERSECT_INFO intersect_best(param);
2025
ROR_INTERSECT_INFO *intersect, *intersect_best;
2026
if (!(intersect= ror_intersect_init(param)) ||
2027
!(intersect_best= ror_intersect_init(param)))
1860
2030
/* [intersect_scans,intersect_scans_best) will hold the best intersection */
1861
optimizer::RorScanInfo **intersect_scans_best= NULL;
2031
ROR_SCAN_INFO **intersect_scans_best;
1862
2032
cur_ror_scan= tree->ror_scans;
1863
2033
intersect_scans_best= intersect_scans;
1864
while (cur_ror_scan != tree->ror_scans_end && ! intersect.is_covering)
2034
while (cur_ror_scan != tree->ror_scans_end && !intersect->is_covering)
1866
2036
/* S= S + first(R); R= R - first(R); */
1867
if (! ror_intersect_add(&intersect, *cur_ror_scan, false))
2037
if (!ror_intersect_add(intersect, *cur_ror_scan, false))
1869
2039
cur_ror_scan++;
1915
2090
if (! (trp->first_scan=
1916
(optimizer::RorScanInfo**)param->mem_root->alloc_root(sizeof(optimizer::RorScanInfo*)*best_num)))
2091
(ROR_SCAN_INFO**)alloc_root(param->mem_root,
2092
sizeof(ROR_SCAN_INFO*)*best_num)))
1918
memcpy(trp->first_scan, intersect_scans, best_num*sizeof(optimizer::RorScanInfo*));
2094
memcpy(trp->first_scan, intersect_scans, best_num*sizeof(ROR_SCAN_INFO*));
1919
2095
trp->last_scan= trp->first_scan + best_num;
1920
trp->is_covering= intersect_best.is_covering;
1921
trp->read_cost= intersect_best.total_cost;
2096
trp->is_covering= intersect_best->is_covering;
2097
trp->read_cost= intersect_best->total_cost;
1922
2098
/* Prevent divisons by zero */
1923
ha_rows best_rows = double2rows(intersect_best.out_rows);
2099
ha_rows best_rows = double2rows(intersect_best->out_rows);
1926
2102
set_if_smaller(param->table->quick_condition_rows, best_rows);
1927
2103
trp->records= best_rows;
1928
trp->index_scan_costs= intersect_best.index_scan_costs;
2104
trp->index_scan_costs= intersect_best->index_scan_costs;
1929
2105
trp->cpk_scan= cpk_scan_used? cpk_scan: NULL;
1936
Get best "range" table read plan for given optimizer::SEL_TREE, also update some info
2112
Get best covering ROR-intersection.
2114
get_best_covering_ror_intersect()
2115
param Parameter from test_quick_select function.
2116
tree SEL_TREE with sets of intervals for different keys.
2117
read_time Don't return table read plans with cost > read_time.
2120
Best covering ROR-intersection plan
2121
NULL if no plan found.
2124
get_best_ror_intersect must be called for a tree before calling this
2126
This function invalidates tree->ror_scans member values.
2128
The following approximate algorithm is used:
2129
I=set of all covering indexes
2130
F=set of all fields to cover
2135
Order I by (#covered fields in F desc,
2137
number of first not covered component asc);
2138
F=F-covered by first(I);
2141
} while F is not empty.
2145
optimizer::RorIntersectReadPlan *get_best_covering_ror_intersect(optimizer::Parameter *param,
2149
ROR_SCAN_INFO **ror_scan_mark;
2150
ROR_SCAN_INFO **ror_scans_end= tree->ror_scans_end;
2152
for (ROR_SCAN_INFO **scan= tree->ror_scans; scan != ror_scans_end; ++scan)
2153
(*scan)->key_components=
2154
param->table->key_info[(*scan)->keynr].key_parts;
2157
Run covering-ROR-search algorithm.
2158
Assume set I is [ror_scan .. ror_scans_end)
2161
/*I=set of all covering indexes */
2162
ror_scan_mark= tree->ror_scans;
2164
MyBitmap *covered_fields= ¶m->tmp_covered_fields;
2165
if (! covered_fields->getBitmap())
2167
my_bitmap_map *tmp_bitmap= (my_bitmap_map*)alloc_root(param->mem_root,
2168
param->fields_bitmap_size);
2169
covered_fields->setBitmap(tmp_bitmap);
2171
if (! covered_fields->getBitmap() ||
2172
covered_fields->init(covered_fields->getBitmap(),
2173
param->table->s->fields))
2175
covered_fields->clearAll();
2177
double total_cost= 0.0f;
2181
print_ror_scans_arr(param->table,
2182
"building covering ROR-I",
2183
ror_scan_mark, ror_scans_end);
2187
Update changed sorting info:
2189
number of first not covered component
2190
Calculate and save these values for each of remaining scans.
2192
for (ROR_SCAN_INFO **scan= ror_scan_mark; scan != ror_scans_end; ++scan)
2194
bitmap_subtract(&(*scan)->covered_fields, covered_fields);
2195
(*scan)->used_fields_covered=
2196
(*scan)->covered_fields.getBitsSet();
2197
(*scan)->first_uncovered_field=
2198
(*scan)->covered_fields.getFirst();
2201
internal::my_qsort(ror_scan_mark, ror_scans_end-ror_scan_mark,
2202
sizeof(ROR_SCAN_INFO*),
2203
(qsort_cmp)cmp_ror_scan_info_covering);
2205
print_ror_scans_arr(param->table,
2207
ror_scan_mark, ror_scans_end);
2210
total_cost += (*ror_scan_mark)->index_read_cost;
2211
records += (*ror_scan_mark)->records;
2212
if (total_cost > read_time)
2214
/* F=F-covered by first(I) */
2215
bitmap_union(covered_fields, &(*ror_scan_mark)->covered_fields);
2216
all_covered= bitmap_is_subset(¶m->needed_fields, covered_fields);
2217
} while ((++ror_scan_mark < ror_scans_end) && !all_covered);
2219
if (!all_covered || (ror_scan_mark - tree->ror_scans) == 1)
2223
Ok, [tree->ror_scans .. ror_scan) holds covering index_intersection with
2226
print_ror_scans_arr(param->table,
2227
"creating covering ROR-intersect",
2228
tree->ror_scans, ror_scan_mark);
2230
/* Add priority queue use cost. */
2231
total_cost += rows2double(records)*
2232
log((double)(ror_scan_mark - tree->ror_scans)) /
2233
(TIME_FOR_COMPARE_ROWID * M_LN2);
2235
if (total_cost > read_time)
2238
optimizer::RorIntersectReadPlan *trp= NULL;
2239
if (! (trp= new (param->mem_root) optimizer::RorIntersectReadPlan))
2244
uint32_t best_num= (ror_scan_mark - tree->ror_scans);
2245
if (!(trp->first_scan= (ROR_SCAN_INFO**)alloc_root(param->mem_root,
2246
sizeof(ROR_SCAN_INFO*)*
2249
memcpy(trp->first_scan, tree->ror_scans, best_num*sizeof(ROR_SCAN_INFO*));
2250
trp->last_scan= trp->first_scan + best_num;
2251
trp->is_covering= true;
2252
trp->read_cost= total_cost;
2253
trp->records= records;
2254
trp->cpk_scan= NULL;
2255
set_if_smaller(param->table->quick_condition_rows, records);
2262
Get best "range" table read plan for given SEL_TREE, also update some info
1939
2265
get_key_scans_params()
1941
2266
param Parameters from test_quick_select
1942
tree Make range select for this optimizer::SEL_TREE
2267
tree Make range select for this SEL_TREE
1943
2268
index_read_must_be_used true <=> assume 'index only' option will be set
1944
2269
(except for clustered PK indexes)
1945
2270
update_tbl_stats true <=> update table->quick_* with information
3637
Check if two SEL_TREES can be combined into one (i.e. a single key range
3638
read can be constructed for "cond_of_tree1 OR cond_of_tree2" ) without
3642
bool sel_trees_can_be_ored(SEL_TREE *tree1, SEL_TREE *tree2,
3643
optimizer::RangeParameter* param)
3645
key_map common_keys= tree1->keys_map;
3646
common_keys&= tree2->keys_map;
3648
if (common_keys.none())
3651
/* trees have a common key, check if they refer to same key part */
3652
optimizer::SEL_ARG **key1,**key2;
3653
for (uint32_t key_no=0; key_no < param->keys; key_no++)
3655
if (common_keys.test(key_no))
3657
key1= tree1->keys + key_no;
3658
key2= tree2->keys + key_no;
3659
if ((*key1)->part == (*key2)->part)
3670
Remove the trees that are not suitable for record retrieval.
3672
param Range analysis parameter
3673
tree Tree to be processed, tree->type is KEY or KEY_SMALLER
3676
This function walks through tree->keys[] and removes the SEL_ARG* trees
3677
that are not "maybe" trees (*) and cannot be used to construct quick range
3679
(*) - have type MAYBE or MAYBE_KEY. Perhaps we should remove trees of
3680
these types here as well.
3682
A SEL_ARG* tree cannot be used to construct quick select if it has
3683
tree->part != 0. (e.g. it could represent "keypart2 < const").
3685
WHY THIS FUNCTION IS NEEDED
3687
Normally we allow construction of SEL_TREE objects that have SEL_ARG
3688
trees that do not allow quick range select construction. For example for
3689
" keypart1=1 AND keypart2=2 " the execution will proceed as follows:
3690
tree1= SEL_TREE { SEL_ARG{keypart1=1} }
3691
tree2= SEL_TREE { SEL_ARG{keypart2=2} } -- can't make quick range select
3693
call tree_and(tree1, tree2) -- this joins SEL_ARGs into a usable SEL_ARG
3696
There is an exception though: when we construct index_merge SEL_TREE,
3697
any SEL_ARG* tree that cannot be used to construct quick range select can
3698
be removed, because current range analysis code doesn't provide any way
3699
that tree could be later combined with another tree.
3700
Consider an example: we should not construct
3702
merges = SEL_IMERGE {
3703
SEL_TREE(t.key1part1 = 1),
3704
SEL_TREE(t.key2part2 = 2) -- (*)
3708
- (*) cannot be used to construct quick range select,
3709
- There is no execution path that would cause (*) to be converted to
3710
a tree that could be used.
3712
The latter is easy to verify: first, notice that the only way to convert
3713
(*) into a usable tree is to call tree_and(something, (*)).
3715
Second look at what tree_and/tree_or function would do when passed a
3716
SEL_TREE that has the structure like st1 tree has, and conlcude that
3717
tree_and(something, (*)) will not be called.
3720
0 Ok, some suitable trees left
3721
1 No tree->keys[] left.
3724
static bool remove_nonrange_trees(optimizer::RangeParameter *param, SEL_TREE *tree)
3727
for (uint32_t i=0; i < param->keys; i++)
3731
if (tree->keys[i]->part)
3733
tree->keys[i]= NULL;
3734
tree->keys_map.reset(i);
3745
tree_or(optimizer::RangeParameter *param,SEL_TREE *tree1,SEL_TREE *tree2)
3747
if (!tree1 || !tree2)
3749
if (tree1->type == SEL_TREE::IMPOSSIBLE || tree2->type == SEL_TREE::ALWAYS)
3751
if (tree2->type == SEL_TREE::IMPOSSIBLE || tree1->type == SEL_TREE::ALWAYS)
3753
if (tree1->type == SEL_TREE::MAYBE)
3754
return(tree1); // Can't use this
3755
if (tree2->type == SEL_TREE::MAYBE)
3758
SEL_TREE *result= 0;
3759
key_map result_keys;
3760
result_keys.reset();
3761
if (sel_trees_can_be_ored(tree1, tree2, param))
3763
/* Join the trees key per key */
3764
optimizer::SEL_ARG **key1,**key2,**end;
3765
for (key1= tree1->keys,key2= tree2->keys,end= key1+param->keys ;
3766
key1 != end ; key1++,key2++)
3768
*key1=key_or(param, *key1, *key2);
3771
result=tree1; // Added to tree1
3772
result_keys.set(key1 - tree1->keys);
3776
result->keys_map= result_keys;
3780
/* ok, two trees have KEY type but cannot be used without index merge */
3781
if (tree1->merges.is_empty() && tree2->merges.is_empty())
3783
if (param->remove_jump_scans)
3785
bool no_trees= remove_nonrange_trees(param, tree1);
3786
no_trees= no_trees || remove_nonrange_trees(param, tree2);
3788
return(new SEL_TREE(SEL_TREE::ALWAYS));
3791
/* both trees are "range" trees, produce new index merge structure */
3792
if (!(result= new SEL_TREE()) || !(merge= new SEL_IMERGE()) ||
3793
(result->merges.push_back(merge)) ||
3794
(merge->or_sel_tree(param, tree1)) ||
3795
(merge->or_sel_tree(param, tree2)))
3798
result->type= tree1->type;
3800
else if (!tree1->merges.is_empty() && !tree2->merges.is_empty())
3802
if (imerge_list_or_list(param, &tree1->merges, &tree2->merges))
3803
result= new SEL_TREE(SEL_TREE::ALWAYS);
3809
/* one tree is index merge tree and another is range tree */
3810
if (tree1->merges.is_empty())
3811
std::swap(tree1, tree2);
3813
if (param->remove_jump_scans && remove_nonrange_trees(param, tree2))
3814
return(new SEL_TREE(SEL_TREE::ALWAYS));
3815
/* add tree2 to tree1->merges, checking if it collapses to ALWAYS */
3816
if (imerge_list_or_tree(param, &tree1->merges, tree2))
3817
result= new SEL_TREE(SEL_TREE::ALWAYS);
3324
3826
/* And key trees where key1->part < key2 -> part */
4021
static optimizer::SEL_ARG *
4022
key_or(optimizer::RangeParameter *param, optimizer::SEL_ARG *key1, optimizer::SEL_ARG *key2)
4042
if (key1->part != key2->part)
4046
return 0; // Can't optimize this
4049
// If one of the key is MAYBE_KEY then the found region may be bigger
4050
if (key1->type == optimizer::SEL_ARG::MAYBE_KEY)
4056
if (key2->type == optimizer::SEL_ARG::MAYBE_KEY)
4063
if (key1->use_count > 0)
4065
if (key2->use_count == 0 || key1->elements > key2->elements)
4067
std::swap(key1,key2);
4069
if (key1->use_count > 0 || !(key1=key1->clone_tree(param)))
4073
// Add tree at key2 to tree at key1
4074
bool key2_shared= key2->use_count != 0;
4075
key1->maybe_flag|= key2->maybe_flag;
4077
for (key2=key2->first(); key2; )
4079
optimizer::SEL_ARG *tmp= key1->find_range(key2); // Find key1.min <= key2.min
4084
tmp=key1->first(); // tmp.min > key2.min
4087
else if ((cmp=tmp->cmp_max_to_min(key2)) < 0)
4088
{ // Found tmp.max < key2.min
4089
optimizer::SEL_ARG *next= tmp->next;
4090
if (cmp == -2 && eq_tree(tmp->next_key_part,key2->next_key_part))
4092
// Join near ranges like tmp.max < 0 and key2.min >= 0
4093
optimizer::SEL_ARG *key2_next=key2->next;
4096
if (! (key2=new optimizer::SEL_ARG(*key2)))
4097
return 0; // out of memory
4098
key2->increment_use_count(key1->use_count+1);
4099
key2->next= key2_next; // New copy of key2
4101
key2->copy_min(tmp);
4102
if (! (key1=key1->tree_delete(tmp)))
4103
{ // Only one key in tree
4110
if (! (tmp= next)) // tmp.min > key2.min
4111
break; // Copy rest of key2
4114
{ // tmp.min > key2.min
4116
if ((tmp_cmp= tmp->cmp_min_to_max(key2)) > 0) // if tmp.min > key2.max
4118
if (tmp_cmp == 2 && eq_tree(tmp->next_key_part,key2->next_key_part))
4119
{ // ranges are connected
4120
tmp->copy_min_to_min(key2);
4121
key1->merge_flags(key2);
4122
if (tmp->min_flag & NO_MIN_RANGE &&
4123
tmp->max_flag & NO_MAX_RANGE)
4125
if (key1->maybe_flag)
4126
return new optimizer::SEL_ARG(optimizer::SEL_ARG::MAYBE_KEY);
4129
key2->increment_use_count(-1); // Free not used tree
4135
optimizer::SEL_ARG *next= key2->next; // Keys are not overlapping
4138
optimizer::SEL_ARG *cpy= new optimizer::SEL_ARG(*key2); // Must make copy
4141
key1= key1->insert(cpy);
4142
key2->increment_use_count(key1->use_count+1);
4145
key1= key1->insert(key2); // Will destroy key2_root
4152
// tmp.max >= key2.min && tmp.min <= key.cmax(overlapping ranges)
4153
if (eq_tree(tmp->next_key_part,key2->next_key_part))
4155
if (tmp->is_same(key2))
4157
tmp->merge_flags(key2); // Copy maybe flags
4158
key2->increment_use_count(-1); // Free not used tree
4162
optimizer::SEL_ARG *last= tmp;
4163
while (last->next && last->next->cmp_min_to_max(key2) <= 0 &&
4164
eq_tree(last->next->next_key_part,key2->next_key_part))
4166
optimizer::SEL_ARG *save= last;
4168
key1= key1->tree_delete(save);
4170
last->copy_min(tmp);
4171
if (last->copy_min(key2) || last->copy_max(key2))
4174
for (; key2; key2= key2->next)
4175
key2->increment_use_count(-1); // Free not used tree
4176
if (key1->maybe_flag)
4177
return new optimizer::SEL_ARG(optimizer::SEL_ARG::MAYBE_KEY);
4185
if (cmp >= 0 && tmp->cmp_min_to_min(key2) < 0)
4186
{ // tmp.min <= x < key2.min
4187
optimizer::SEL_ARG *new_arg= tmp->clone_first(key2);
4190
if ((new_arg->next_key_part= key1->next_key_part))
4191
new_arg->increment_use_count(key1->use_count+1);
4192
tmp->copy_min_to_min(key2);
4193
key1= key1->insert(new_arg);
4196
// tmp.min >= key2.min && tmp.min <= key2.max
4197
optimizer::SEL_ARG key(*key2); // Get copy we can modify
4200
if (tmp->cmp_min_to_min(&key) > 0)
4201
{ // key.min <= x < tmp.min
4202
optimizer::SEL_ARG *new_arg= key.clone_first(tmp);
4205
if ((new_arg->next_key_part=key.next_key_part))
4206
new_arg->increment_use_count(key1->use_count+1);
4207
key1= key1->insert(new_arg);
4209
if ((cmp=tmp->cmp_max_to_max(&key)) <= 0)
4210
{ // tmp.min. <= x <= tmp.max
4211
tmp->maybe_flag|= key.maybe_flag;
4212
key.increment_use_count(key1->use_count+1);
4213
tmp->next_key_part= key_or(param, tmp->next_key_part, key.next_key_part);
4214
if (! cmp) // Key2 is ready
4216
key.copy_max_to_min(tmp);
4217
if (! (tmp= tmp->next))
4219
optimizer::SEL_ARG *tmp2= new optimizer::SEL_ARG(key);
4222
key1= key1->insert(tmp2);
4226
if (tmp->cmp_min_to_max(&key) > 0)
4228
optimizer::SEL_ARG *tmp2= new optimizer::SEL_ARG(key);
4231
key1= key1->insert(tmp2);
4237
optimizer::SEL_ARG *new_arg= tmp->clone_last(&key); // tmp.min <= x <= key.max
4240
tmp->copy_max_to_min(&key);
4241
tmp->increment_use_count(key1->use_count+1);
4242
/* Increment key count as it may be used for next loop */
4243
key.increment_use_count(1);
4244
new_arg->next_key_part= key_or(param, tmp->next_key_part, key.next_key_part);
4245
key1= key1->insert(new_arg);
4255
optimizer::SEL_ARG *next= key2->next;
4258
optimizer::SEL_ARG *tmp= new optimizer::SEL_ARG(*key2); // Must make copy
4261
key2->increment_use_count(key1->use_count+1);
4262
key1= key1->insert(tmp);
4265
key1= key1->insert(key2); // Will destroy key2_root
4273
/* Compare if two trees are equal */
4274
static bool eq_tree(optimizer::SEL_ARG *a, optimizer::SEL_ARG *b)
4281
if (! a || ! b || ! a->is_same(b))
4286
if (a->left != &optimizer::null_element && b->left != &optimizer::null_element)
4288
if (! eq_tree(a->left,b->left))
4291
else if (a->left != &optimizer::null_element || b->left != &optimizer::null_element)
4296
if (a->right != &optimizer::null_element && b->right != &optimizer::null_element)
4298
if (! eq_tree(a->right,b->right))
4301
else if (a->right != &optimizer::null_element || b->right != &optimizer::null_element)
4306
if (a->next_key_part != b->next_key_part)
4308
if (! a->next_key_part != ! b->next_key_part ||
4309
! eq_tree(a->next_key_part, b->next_key_part))
3519
4317
/****************************************************************************
3520
4318
MRR Range Sequence Interface implementation that walks a SEL_ARG* tree.
3521
4319
****************************************************************************/
4402
static inline uint32_t get_field_keypart(KeyInfo *index, Field *field);
5200
static inline uint32_t get_field_keypart(KEY *index, Field *field);
4404
5202
static inline optimizer::SEL_ARG * get_index_range_tree(uint32_t index,
4405
optimizer::SEL_TREE *range_tree,
5203
SEL_TREE *range_tree,
4406
5204
optimizer::Parameter *param,
4407
5205
uint32_t *param_idx);
4409
static bool get_constant_key_infix(KeyInfo *index_info,
5207
static bool get_constant_key_infix(KEY *index_info,
4410
5208
optimizer::SEL_ARG *index_range_tree,
4411
KeyPartInfo *first_non_group_part,
4412
KeyPartInfo *min_max_arg_part,
4413
KeyPartInfo *last_part,
5209
KEY_PART_INFO *first_non_group_part,
5210
KEY_PART_INFO *min_max_arg_part,
5211
KEY_PART_INFO *last_part,
4414
5212
Session *session,
4415
5213
unsigned char *key_infix,
4416
5214
uint32_t *key_infix_len,
4417
KeyPartInfo **first_non_infix_part);
5215
KEY_PART_INFO **first_non_infix_part);
4419
5217
static bool check_group_min_max_predicates(COND *cond, Item_field *min_max_arg_item);
4422
5220
cost_group_min_max(Table* table,
4423
KeyInfo *index_info,
4424
5222
uint32_t used_key_parts,
4425
5223
uint32_t group_key_parts,
4426
optimizer::SEL_TREE *range_tree,
5224
SEL_TREE *range_tree,
4427
5225
optimizer::SEL_ARG *index_tree,
4428
5226
ha_rows quick_prefix_records,
5165
get_constant_key_infix(KeyInfo *,
5962
get_constant_key_infix(KEY *,
5166
5963
optimizer::SEL_ARG *index_range_tree,
5167
KeyPartInfo *first_non_group_part,
5168
KeyPartInfo *min_max_arg_part,
5169
KeyPartInfo *last_part,
5964
KEY_PART_INFO *first_non_group_part,
5965
KEY_PART_INFO *min_max_arg_part,
5966
KEY_PART_INFO *last_part,
5171
5968
unsigned char *key_infix,
5172
5969
uint32_t *key_infix_len,
5173
KeyPartInfo **first_non_infix_part)
5970
KEY_PART_INFO **first_non_infix_part)
5175
5972
optimizer::SEL_ARG *cur_range= NULL;
5176
KeyPartInfo *cur_part= NULL;
5973
KEY_PART_INFO *cur_part= NULL;
5177
5974
/* End part for the first loop below. */
5178
KeyPartInfo *end_part= min_max_arg_part ? min_max_arg_part : last_part;
5975
KEY_PART_INFO *end_part= min_max_arg_part ? min_max_arg_part : last_part;
5180
5977
*key_infix_len= 0;
5181
5978
unsigned char *key_ptr= key_infix;
5570
uint32_t optimizer::RorScanInfo::findFirstNotSet() const
6367
static void print_sel_tree(optimizer::Parameter *param, SEL_TREE *tree, key_map *tree_map, const char *)
5572
boost::dynamic_bitset<> map= bitsToBitset();
5573
for (boost::dynamic_bitset<>::size_type i= 0; i < map.size(); i++)
6369
optimizer::SEL_ARG **key= NULL;
6370
optimizer::SEL_ARG **end= NULL;
6374
String tmp(buff,sizeof(buff),&my_charset_bin);
6376
for (idx= 0, key=tree->keys, end= key + param->keys;
6380
if (tree_map->test(idx))
6382
uint32_t keynr= param->real_keynr[idx];
6385
tmp.append(param->table->key_info[keynr].name);
5584
size_t optimizer::RorScanInfo::getBitCount() const
5586
boost::dynamic_bitset<> tmp_bitset= bitsToBitset();
5587
return tmp_bitset.count();
5591
void optimizer::RorScanInfo::subtractBitset(const boost::dynamic_bitset<>& in_bitset)
5593
boost::dynamic_bitset<> tmp_bitset= bitsToBitset();
5594
tmp_bitset-= in_bitset;
5595
covered_fields= tmp_bitset.to_ulong();
5599
boost::dynamic_bitset<> optimizer::RorScanInfo::bitsToBitset() const
5602
uint64_t conv= covered_fields;
5605
res.push_back((conv & 1) + '0');
5610
std::reverse(res.begin(), res.end());
5612
string final(covered_fields_size - res.length(), '0');
5614
return (boost::dynamic_bitset<>(final));
6389
tmp.append(STRING_WITH_LEN("(empty)"));
6393
static void print_ror_scans_arr(Table *table,
6395
struct st_ror_scan_info **start,
6396
struct st_ror_scan_info **end)
6399
String tmp(buff,sizeof(buff),&my_charset_bin);
6401
for (; start != end; start++)
6405
tmp.append(table->key_info[(*start)->keynr].name);
6408
tmp.append(STRING_WITH_LEN("(empty)"));
5618
6411
} /* namespace drizzled */