459
464
MAX_KEY if no such index was found.
462
uint32_t optimizer::get_index_for_order(Table *table, order_st *order, ha_rows limit)
467
uint32_t optimizer::get_index_for_order(Table *table, Order *order, ha_rows limit)
465
470
uint32_t match_key= MAX_KEY, match_key_len= MAX_KEY_LENGTH + 1;
468
473
for (ord= order; ord; ord= ord->next)
472
for (idx= 0; idx < table->s->keys; idx++)
477
for (idx= 0; idx < table->getShare()->sizeKeys(); idx++)
474
479
if (!(table->keys_in_use_for_query.test(idx)))
476
KEY_PART_INFO *keyinfo= table->key_info[idx].key_part;
481
KeyPartInfo *keyinfo= table->key_info[idx].key_part;
477
482
uint32_t n_parts= table->key_info[idx].key_parts;
478
483
uint32_t partno= 0;
538
543
static int fill_used_fields_bitmap(optimizer::Parameter *param)
540
545
Table *table= param->table;
543
param->tmp_covered_fields.setBitmap(0);
544
param->fields_bitmap_size= table->s->column_bitmap_size;
545
if (!(tmp= (my_bitmap_map*) alloc_root(param->mem_root,
546
param->fields_bitmap_size)) ||
547
param->needed_fields.init(tmp, table->s->fields))
550
param->needed_fields= *table->read_set;
551
bitmap_union(¶m->needed_fields, table->write_set);
553
pk= param->table->s->primary_key;
547
param->tmp_covered_fields.clear();
548
param->needed_fields.resize(table->getShare()->sizeFields());
549
param->needed_fields.reset();
551
param->needed_fields|= *table->read_set;
552
param->needed_fields|= *table->write_set;
554
pk= param->table->getShare()->getPrimaryKey();
554
555
if (pk != MAX_KEY && param->table->cursor->primary_key_is_clustered())
556
557
/* The table uses clustered PK and it is not internally generated */
557
KEY_PART_INFO *key_part= param->table->key_info[pk].key_part;
558
KEY_PART_INFO *key_part_end= key_part +
558
KeyPartInfo *key_part= param->table->key_info[pk].key_part;
559
KeyPartInfo *key_part_end= key_part +
559
560
param->table->key_info[pk].key_parts;
560
561
for (;key_part != key_part_end; ++key_part)
561
param->needed_fields.clearBit(key_part->fieldnr-1);
562
param->needed_fields.reset(key_part->fieldnr-1);
1177
typedef struct st_ror_scan_info
1179
uint32_t idx; /* # of used key in param->keys */
1180
uint32_t keynr; /* # of used key in table */
1181
ha_rows records; /* estimate of # records this scan will return */
1183
/* Set of intervals over key fields that will be used for row retrieval. */
1184
optimizer::SEL_ARG *sel_arg;
1186
/* Fields used in the query and covered by this ROR scan. */
1187
MyBitmap covered_fields;
1188
uint32_t used_fields_covered; /* # of set bits in covered_fields */
1189
int key_rec_length; /* length of key record (including rowid) */
1192
Cost of reading all index records with values in sel_arg intervals set
1193
(assuming there is no need to access full table records)
1195
double index_read_cost;
1196
uint32_t first_uncovered_field; /* first unused bit in covered_fields */
1197
uint32_t key_components; /* # of parts in the key */
1202
Create ROR_SCAN_INFO* structure with a single ROR scan on index idx using
1189
Create optimizer::RorScanInfo* structure with a single ROR scan on index idx using
1203
1190
sel_arg set of intervals.
1232
1217
ror_scan->sel_arg= sel_arg;
1233
1218
ror_scan->records= param->table->quick_rows[keynr];
1235
if (!(bitmap_buf= (my_bitmap_map*) alloc_root(param->mem_root,
1236
param->fields_bitmap_size)))
1239
if (ror_scan->covered_fields.init(bitmap_buf,
1240
param->table->s->fields))
1242
ror_scan->covered_fields.clearAll();
1244
KEY_PART_INFO *key_part= param->table->key_info[keynr].key_part;
1245
KEY_PART_INFO *key_part_end= key_part +
1220
ror_scan->covered_fields_size= param->table->getShare()->sizeFields();
1221
boost::dynamic_bitset<> tmp_bitset(param->table->getShare()->sizeFields());
1224
KeyPartInfo *key_part= param->table->key_info[keynr].key_part;
1225
KeyPartInfo *key_part_end= key_part +
1246
1226
param->table->key_info[keynr].key_parts;
1247
for (;key_part != key_part_end; ++key_part)
1227
for (; key_part != key_part_end; ++key_part)
1249
if (param->needed_fields.isBitSet(key_part->fieldnr-1))
1250
ror_scan->covered_fields.setBit(key_part->fieldnr-1);
1229
if (param->needed_fields.test(key_part->fieldnr-1))
1230
tmp_bitset.set(key_part->fieldnr-1);
1252
double rows= rows2double(param->table->quick_rows[ror_scan->keynr]);
1232
double rows= param->table->quick_rows[ror_scan->keynr];
1253
1233
ror_scan->index_read_cost=
1254
1234
param->table->cursor->index_only_read_time(ror_scan->keynr, rows);
1235
ror_scan->covered_fields= tmp_bitset.to_ulong();
1260
Compare two ROR_SCAN_INFO** by E(#records_matched) * key_record_length.
1241
Compare two optimizer::RorScanInfo** by E(#records_matched) * key_record_length.
1262
1243
cmp_ror_scan_info()
1263
1244
a ptr to first compared value
1272
static int cmp_ror_scan_info(ROR_SCAN_INFO** a, ROR_SCAN_INFO** b)
1253
static int cmp_ror_scan_info(optimizer::RorScanInfo** a, optimizer::RorScanInfo** b)
1274
double val1= rows2double((*a)->records) * (*a)->key_rec_length;
1275
double val2= rows2double((*b)->records) * (*b)->key_rec_length;
1255
double val1= static_cast<double>((*a)->records) * (*a)->key_rec_length;
1256
double val2= static_cast<double>((*b)->records) * (*b)->key_rec_length;
1276
1257
return (val1 < val2)? -1: (val1 == val2)? 0 : 1;
1280
Compare two ROR_SCAN_INFO** by
1262
Compare two optimizer::RorScanInfo** by
1281
1263
(#covered fields in F desc,
1282
1264
#components asc,
1283
1265
number of first not covered component asc)
1331
1336
} ROR_INTERSECT_INFO;
1335
Allocate a ROR_INTERSECT_INFO and initialize it to contain zero scans.
1338
ror_intersect_init()
1339
param Parameter from test_quick_select
1347
ROR_INTERSECT_INFO* ror_intersect_init(const optimizer::Parameter *param)
1349
ROR_INTERSECT_INFO *info;
1351
if (!(info= (ROR_INTERSECT_INFO*)alloc_root(param->mem_root,
1352
sizeof(ROR_INTERSECT_INFO))))
1355
if (!(buf= (my_bitmap_map*) alloc_root(param->mem_root,
1356
param->fields_bitmap_size)))
1358
if (info->covered_fields.init(buf, param->table->s->fields))
1360
info->is_covering= false;
1361
info->index_scan_costs= 0.0;
1362
info->index_records= 0;
1363
info->out_rows= (double) param->table->cursor->stats.records;
1364
info->covered_fields.clearAll();
1368
1339
static void ror_intersect_cpy(ROR_INTERSECT_INFO *dst,
1369
1340
const ROR_INTERSECT_INFO *src)
1471
1442
static double ror_scan_selectivity(const ROR_INTERSECT_INFO *info,
1472
const ROR_SCAN_INFO *scan)
1443
const optimizer::RorScanInfo *scan)
1474
1445
double selectivity_mult= 1.0;
1475
KEY_PART_INFO *key_part= info->param->table->key_info[scan->keynr].key_part;
1446
KeyPartInfo *key_part= info->param->table->key_info[scan->keynr].key_part;
1476
1447
unsigned char key_val[MAX_KEY_LENGTH+MAX_FIELD_WIDTH]; /* key values tuple */
1477
1448
unsigned char *key_ptr= key_val;
1478
1449
optimizer::SEL_ARG *sel_arg= NULL;
1479
1450
optimizer::SEL_ARG *tuple_arg= NULL;
1480
1451
key_part_map keypart_map= 0;
1481
1452
bool cur_covered;
1482
bool prev_covered= test(info->covered_fields.isBitSet(key_part->fieldnr-1));
1453
bool prev_covered= test(info->covered_fields.test(key_part->fieldnr-1));
1483
1454
key_range min_range;
1484
1455
key_range max_range;
1485
1456
min_range.key= key_val;
1597
1565
each record of every scan. Assuming 1/TIME_FOR_COMPARE_ROWID
1598
1566
per check this gives us:
1600
info->index_scan_costs += rows2double(info->index_records) /
1568
info->index_scan_costs += static_cast<double>(info->index_records) /
1601
1569
TIME_FOR_COMPARE_ROWID;
1605
1573
info->index_records += info->param->table->quick_rows[ror_scan->keynr];
1606
1574
info->index_scan_costs += ror_scan->index_read_cost;
1607
bitmap_union(&info->covered_fields, &ror_scan->covered_fields);
1608
if (!info->is_covering && bitmap_is_subset(&info->param->needed_fields,
1609
&info->covered_fields))
1575
boost::dynamic_bitset<> tmp_bitset= ror_scan->bitsToBitset();
1576
info->covered_fields|= tmp_bitset;
1577
if (! info->is_covering && info->param->needed_fields.is_subset_of(info->covered_fields))
1611
1579
info->is_covering= true;
1615
1583
info->total_cost= info->index_scan_costs;
1616
if (!info->is_covering)
1584
if (! info->is_covering)
1618
1586
optimizer::CostVector sweep_cost;
1619
JOIN *join= info->param->session->lex->select_lex.join;
1587
Join *join= info->param->session->lex->select_lex.join;
1620
1588
bool is_interrupted= test(join && join->tables == 1);
1621
1589
get_sweep_read_cost(info->param->table, double2rows(info->out_rows),
1622
1590
is_interrupted, &sweep_cost);
1598
Get best covering ROR-intersection.
1600
get_best_covering_ror_intersect()
1601
param Parameter from test_quick_select function.
1602
tree optimizer::SEL_TREE with sets of intervals for different keys.
1603
read_time Don't return table read plans with cost > read_time.
1606
Best covering ROR-intersection plan
1607
NULL if no plan found.
1610
get_best_ror_intersect must be called for a tree before calling this
1612
This function invalidates tree->ror_scans member values.
1614
The following approximate algorithm is used:
1615
I=set of all covering indexes
1616
F=set of all fields to cover
1621
Order I by (#covered fields in F desc,
1623
number of first not covered component asc);
1624
F=F-covered by first(I);
1627
} while F is not empty.
1631
optimizer::RorIntersectReadPlan *get_best_covering_ror_intersect(optimizer::Parameter *param,
1632
optimizer::SEL_TREE *tree,
1635
optimizer::RorScanInfo **ror_scan_mark;
1636
optimizer::RorScanInfo **ror_scans_end= tree->ror_scans_end;
1638
for (optimizer::RorScanInfo **scan= tree->ror_scans; scan != ror_scans_end; ++scan)
1639
(*scan)->key_components=
1640
param->table->key_info[(*scan)->keynr].key_parts;
1643
Run covering-ROR-search algorithm.
1644
Assume set I is [ror_scan .. ror_scans_end)
1647
/*I=set of all covering indexes */
1648
ror_scan_mark= tree->ror_scans;
1650
boost::dynamic_bitset<> *covered_fields= ¶m->tmp_covered_fields;
1651
if (covered_fields->empty())
1653
covered_fields->resize(param->table->getShare()->sizeFields());
1655
covered_fields->reset();
1657
double total_cost= 0.0f;
1664
Update changed sorting info:
1666
number of first not covered component
1667
Calculate and save these values for each of remaining scans.
1669
for (optimizer::RorScanInfo **scan= ror_scan_mark; scan != ror_scans_end; ++scan)
1671
/* subtract one bitset from the other */
1672
(*scan)->subtractBitset(*covered_fields);
1673
(*scan)->used_fields_covered=
1674
(*scan)->getBitCount();
1675
(*scan)->first_uncovered_field= (*scan)->findFirstNotSet();
1678
internal::my_qsort(ror_scan_mark, ror_scans_end-ror_scan_mark,
1679
sizeof(optimizer::RorScanInfo*),
1680
(qsort_cmp)cmp_ror_scan_info_covering);
1683
total_cost += (*ror_scan_mark)->index_read_cost;
1684
records += (*ror_scan_mark)->records;
1685
if (total_cost > read_time)
1687
/* F=F-covered by first(I) */
1688
boost::dynamic_bitset<> tmp_bitset= (*ror_scan_mark)->bitsToBitset();
1689
*covered_fields|= tmp_bitset;
1690
all_covered= param->needed_fields.is_subset_of(*covered_fields);
1691
} while ((++ror_scan_mark < ror_scans_end) && ! all_covered);
1693
if (!all_covered || (ror_scan_mark - tree->ror_scans) == 1)
1697
Ok, [tree->ror_scans .. ror_scan) holds covering index_intersection with
1700
/* Add priority queue use cost. */
1701
total_cost += static_cast<double>(records) *
1702
log((double)(ror_scan_mark - tree->ror_scans)) /
1703
(TIME_FOR_COMPARE_ROWID * M_LN2);
1705
if (total_cost > read_time)
1708
optimizer::RorIntersectReadPlan *trp= NULL;
1709
if (! (trp= new (param->mem_root) optimizer::RorIntersectReadPlan))
1714
uint32_t best_num= (ror_scan_mark - tree->ror_scans);
1715
if (!(trp->first_scan= (optimizer::RorScanInfo**)param->mem_root->alloc_root(sizeof(optimizer::RorScanInfo*)* best_num)))
1717
memcpy(trp->first_scan, tree->ror_scans, best_num*sizeof(optimizer::RorScanInfo*));
1718
trp->last_scan= trp->first_scan + best_num;
1719
trp->is_covering= true;
1720
trp->read_cost= total_cost;
1721
trp->records= records;
1722
trp->cpk_scan= NULL;
1723
set_if_smaller(param->table->quick_condition_rows, records);
1630
1730
Get best ROR-intersection plan using non-covering ROR-intersection search
1631
1731
algorithm. The returned plan may be covering.
1706
Step1: Collect ROR-able SEL_ARGs and create ROR_SCAN_INFO for each of
1806
Step1: Collect ROR-able SEL_ARGs and create optimizer::RorScanInfo for each of
1707
1807
them. Also find and save clustered PK scan if there is one.
1709
ROR_SCAN_INFO **cur_ror_scan= NULL;
1710
ROR_SCAN_INFO *cpk_scan= NULL;
1809
optimizer::RorScanInfo **cur_ror_scan= NULL;
1810
optimizer::RorScanInfo *cpk_scan= NULL;
1711
1811
uint32_t cpk_no= 0;
1712
1812
bool cpk_scan_used= false;
1714
if (! (tree->ror_scans= (ROR_SCAN_INFO**)alloc_root(param->mem_root,
1715
sizeof(ROR_SCAN_INFO*)*
1814
if (! (tree->ror_scans= (optimizer::RorScanInfo**)param->mem_root->alloc_root(sizeof(optimizer::RorScanInfo*)* param->keys)))
1718
1818
cpk_no= ((param->table->cursor->primary_key_is_clustered()) ?
1719
param->table->s->primary_key : MAX_KEY);
1819
param->table->getShare()->getPrimaryKey() : MAX_KEY);
1721
1821
for (idx= 0, cur_ror_scan= tree->ror_scans; idx < param->keys; idx++)
1723
ROR_SCAN_INFO *scan;
1823
optimizer::RorScanInfo *scan;
1724
1824
if (! tree->ror_scans_map.test(idx))
1726
if (!(scan= make_ror_scan(param, idx, tree->keys[idx])))
1826
if (! (scan= make_ror_scan(param, idx, tree->keys[idx])))
1728
1828
if (param->real_keynr[idx] == cpk_no)
1737
1837
tree->ror_scans_end= cur_ror_scan;
1739
1839
Ok, [ror_scans, ror_scans_end) is array of ptrs to initialized
1840
optimizer::RorScanInfo's.
1741
1841
Step 2: Get best ROR-intersection using an approximate algorithm.
1743
internal::my_qsort(tree->ror_scans, tree->n_ror_scans, sizeof(ROR_SCAN_INFO*),
1843
internal::my_qsort(tree->ror_scans, tree->n_ror_scans, sizeof(optimizer::RorScanInfo*),
1744
1844
(qsort_cmp)cmp_ror_scan_info);
1746
ROR_SCAN_INFO **intersect_scans= NULL; /* ROR scans used in index intersection */
1747
ROR_SCAN_INFO **intersect_scans_end= NULL;
1748
if (! (intersect_scans= (ROR_SCAN_INFO**)alloc_root(param->mem_root,
1749
sizeof(ROR_SCAN_INFO*)*
1750
tree->n_ror_scans)))
1846
optimizer::RorScanInfo **intersect_scans= NULL; /* ROR scans used in index intersection */
1847
optimizer::RorScanInfo **intersect_scans_end= NULL;
1848
if (! (intersect_scans= (optimizer::RorScanInfo**)param->mem_root->alloc_root(sizeof(optimizer::RorScanInfo*) * tree->n_ror_scans)))
1752
1850
intersect_scans_end= intersect_scans;
1754
1852
/* Create and incrementally update ROR intersection. */
1755
ROR_INTERSECT_INFO *intersect= NULL;
1756
ROR_INTERSECT_INFO *intersect_best= NULL;
1757
if (! (intersect= ror_intersect_init(param)) ||
1758
! (intersect_best= ror_intersect_init(param)))
1853
ROR_INTERSECT_INFO intersect(param);
1854
ROR_INTERSECT_INFO intersect_best(param);
1761
1856
/* [intersect_scans,intersect_scans_best) will hold the best intersection */
1762
ROR_SCAN_INFO **intersect_scans_best= NULL;
1857
optimizer::RorScanInfo **intersect_scans_best= NULL;
1763
1858
cur_ror_scan= tree->ror_scans;
1764
1859
intersect_scans_best= intersect_scans;
1765
while (cur_ror_scan != tree->ror_scans_end && !intersect->is_covering)
1860
while (cur_ror_scan != tree->ror_scans_end && ! intersect.is_covering)
1767
1862
/* S= S + first(R); R= R - first(R); */
1768
if (!ror_intersect_add(intersect, *cur_ror_scan, false))
1863
if (! ror_intersect_add(&intersect, *cur_ror_scan, false))
1770
1865
cur_ror_scan++;
1790
*are_all_covering= intersect->is_covering;
1885
*are_all_covering= intersect.is_covering;
1791
1886
uint32_t best_num= intersect_scans_best - intersect_scans;
1792
ror_intersect_cpy(intersect, intersect_best);
1887
ror_intersect_cpy(&intersect, &intersect_best);
1795
1890
Ok, found the best ROR-intersection of non-CPK key scans.
1796
1891
Check if we should add a CPK scan. If the obtained ROR-intersection is
1797
1892
covering, it doesn't make sense to add CPK scan.
1799
if (cpk_scan && !intersect->is_covering)
1894
if (cpk_scan && ! intersect.is_covering)
1801
if (ror_intersect_add(intersect, cpk_scan, true) &&
1802
(intersect->total_cost < min_cost))
1896
if (ror_intersect_add(&intersect, cpk_scan, true) &&
1897
(intersect.total_cost < min_cost))
1804
1899
cpk_scan_used= true;
1805
1900
intersect_best= intersect; //just set pointer here
1813
1908
if (! (trp= new (param->mem_root) optimizer::RorIntersectReadPlan))
1816
trp->ror_range_scans.assign(intersect_scans, intersect_scans + best_num);
1817
trp->setRowRetrievalNecessary(intersect_best->is_covering);
1818
trp->read_cost= intersect_best->total_cost;
1911
if (! (trp->first_scan=
1912
(optimizer::RorScanInfo**)param->mem_root->alloc_root(sizeof(optimizer::RorScanInfo*)*best_num)))
1914
memcpy(trp->first_scan, intersect_scans, best_num*sizeof(optimizer::RorScanInfo*));
1915
trp->last_scan= trp->first_scan + best_num;
1916
trp->is_covering= intersect_best.is_covering;
1917
trp->read_cost= intersect_best.total_cost;
1819
1918
/* Prevent divisons by zero */
1820
ha_rows best_rows = double2rows(intersect_best->out_rows);
1919
ha_rows best_rows = double2rows(intersect_best.out_rows);
1821
1920
if (! best_rows)
1823
1922
set_if_smaller(param->table->quick_condition_rows, best_rows);
1824
1923
trp->records= best_rows;
1825
trp->setCostOfIndexScans(intersect_best->index_scan_costs);
1924
trp->index_scan_costs= intersect_best.index_scan_costs;
1826
1925
trp->cpk_scan= cpk_scan_used? cpk_scan: NULL;
1833
Get best covering ROR-intersection.
1835
get_best_covering_ror_intersect()
1836
param Parameter from test_quick_select function.
1837
tree optimizer::SEL_TREE with sets of intervals for different keys.
1838
read_time Don't return table read plans with cost > read_time.
1841
Best covering ROR-intersection plan
1842
NULL if no plan found.
1845
get_best_ror_intersect must be called for a tree before calling this
1847
This function invalidates tree->ror_scans member values.
1849
The following approximate algorithm is used:
1850
I=set of all covering indexes
1851
F=set of all fields to cover
1856
Order I by (#covered fields in F desc,
1858
number of first not covered component asc);
1859
F=F-covered by first(I);
1862
} while F is not empty.
1866
optimizer::RorIntersectReadPlan *get_best_covering_ror_intersect(optimizer::Parameter *param,
1867
optimizer::SEL_TREE *tree,
1870
ROR_SCAN_INFO **ror_scan_mark;
1871
ROR_SCAN_INFO **ror_scans_end= tree->ror_scans_end;
1873
for (ROR_SCAN_INFO **scan= tree->ror_scans; scan != ror_scans_end; ++scan)
1874
(*scan)->key_components=
1875
param->table->key_info[(*scan)->keynr].key_parts;
1878
Run covering-ROR-search algorithm.
1879
Assume set I is [ror_scan .. ror_scans_end)
1882
/*I=set of all covering indexes */
1883
ror_scan_mark= tree->ror_scans;
1885
MyBitmap *covered_fields= ¶m->tmp_covered_fields;
1886
if (! covered_fields->getBitmap())
1888
my_bitmap_map *tmp_bitmap= (my_bitmap_map*)alloc_root(param->mem_root,
1889
param->fields_bitmap_size);
1890
covered_fields->setBitmap(tmp_bitmap);
1892
if (! covered_fields->getBitmap() ||
1893
covered_fields->init(covered_fields->getBitmap(),
1894
param->table->s->fields))
1896
covered_fields->clearAll();
1898
double total_cost= 0.0f;
1905
Update changed sorting info:
1907
number of first not covered component
1908
Calculate and save these values for each of remaining scans.
1910
for (ROR_SCAN_INFO **scan= ror_scan_mark; scan != ror_scans_end; ++scan)
1912
bitmap_subtract(&(*scan)->covered_fields, covered_fields);
1913
(*scan)->used_fields_covered=
1914
(*scan)->covered_fields.getBitsSet();
1915
(*scan)->first_uncovered_field=
1916
(*scan)->covered_fields.getFirst();
1919
internal::my_qsort(ror_scan_mark, ror_scans_end-ror_scan_mark,
1920
sizeof(ROR_SCAN_INFO*),
1921
(qsort_cmp)cmp_ror_scan_info_covering);
1924
total_cost += (*ror_scan_mark)->index_read_cost;
1925
records += (*ror_scan_mark)->records;
1926
if (total_cost > read_time)
1928
/* F=F-covered by first(I) */
1929
bitmap_union(covered_fields, &(*ror_scan_mark)->covered_fields);
1930
all_covered= bitmap_is_subset(¶m->needed_fields, covered_fields);
1931
} while ((++ror_scan_mark < ror_scans_end) && !all_covered);
1933
if (!all_covered || (ror_scan_mark - tree->ror_scans) == 1)
1937
Ok, [tree->ror_scans .. ror_scan) holds covering index_intersection with
1940
/* Add priority queue use cost. */
1941
total_cost += rows2double(records)*
1942
log((double)(ror_scan_mark - tree->ror_scans)) /
1943
(TIME_FOR_COMPARE_ROWID * M_LN2);
1945
if (total_cost > read_time)
1948
optimizer::RorIntersectReadPlan *trp= NULL;
1949
if (! (trp= new (param->mem_root) optimizer::RorIntersectReadPlan))
1954
uint32_t best_num= (ror_scan_mark - tree->ror_scans);
1955
trp->ror_range_scans.assign(tree->ror_scans, tree->ror_scans + best_num);
1956
trp->setRowRetrievalNecessary(true);
1957
trp->read_cost= total_cost;
1958
trp->records= records;
1959
trp->cpk_scan= NULL;
1960
set_if_smaller(param->table->quick_condition_rows, records);
1967
1932
Get best "range" table read plan for given optimizer::SEL_TREE, also update some info
1970
1935
get_key_scans_params()
1971
1937
param Parameters from test_quick_select
1972
1938
tree Make range select for this optimizer::SEL_TREE
1973
1939
index_read_must_be_used true <=> assume 'index only' option will be set
3949
3934
static bool is_key_scan_ror(optimizer::Parameter *param, uint32_t keynr, uint8_t nparts)
3951
KEY *table_key= param->table->key_info + keynr;
3952
KEY_PART_INFO *key_part= table_key->key_part + nparts;
3953
KEY_PART_INFO *key_part_end= (table_key->key_part +
3936
KeyInfo *table_key= param->table->key_info + keynr;
3937
KeyPartInfo *key_part= table_key->key_part + nparts;
3938
KeyPartInfo *key_part_end= (table_key->key_part +
3954
3939
table_key->key_parts);
3955
3940
uint32_t pk_number;
3957
for (KEY_PART_INFO *kp= table_key->key_part; kp < key_part; kp++)
3942
for (KeyPartInfo *kp= table_key->key_part; kp < key_part; kp++)
3959
3944
uint16_t fieldnr= param->table->key_info[keynr].
3960
3945
key_part[kp - table_key->key_part].fieldnr - 1;
3961
if (param->table->field[fieldnr]->key_length() != kp->length)
3946
if (param->table->getField(fieldnr)->key_length() != kp->length)
4251
4236
table_reference_st *ref,
4252
4237
ha_rows records)
4254
memory::Root *old_root, *alloc;
4255
optimizer::QuickRangeSelect *quick= NULL;
4256
KEY *key_info = &table->key_info[ref->key];
4239
memory::Root *old_root= NULL;
4240
memory::Root *alloc= NULL;
4241
KeyInfo *key_info = &table->key_info[ref->key];
4257
4242
KEY_PART *key_part;
4258
4243
optimizer::QuickRange *range= NULL;
4260
bool create_err= false;
4261
4245
optimizer::CostVector cost;
4263
4247
old_root= session->mem_root;
4264
4248
/* The following call may change session->mem_root */
4265
quick= new optimizer::QuickRangeSelect(session, table, ref->key, 0, 0, &create_err);
4249
optimizer::QuickRangeSelect *quick= new optimizer::QuickRangeSelect(session, table, ref->key, 0, 0);
4266
4250
/* save mem_root set by QuickRangeSelect constructor */
4267
4251
alloc= session->mem_root;
4417
static inline uint32_t get_field_keypart(KEY *index, Field *field);
4401
static inline uint32_t get_field_keypart(KeyInfo *index, Field *field);
4419
4403
static inline optimizer::SEL_ARG * get_index_range_tree(uint32_t index,
4420
4404
optimizer::SEL_TREE *range_tree,
4421
4405
optimizer::Parameter *param,
4422
4406
uint32_t *param_idx);
4424
static bool get_constant_key_infix(KEY *index_info,
4408
static bool get_constant_key_infix(KeyInfo *index_info,
4425
4409
optimizer::SEL_ARG *index_range_tree,
4426
KEY_PART_INFO *first_non_group_part,
4427
KEY_PART_INFO *min_max_arg_part,
4428
KEY_PART_INFO *last_part,
4410
KeyPartInfo *first_non_group_part,
4411
KeyPartInfo *min_max_arg_part,
4412
KeyPartInfo *last_part,
4429
4413
Session *session,
4430
4414
unsigned char *key_infix,
4431
4415
uint32_t *key_infix_len,
4432
KEY_PART_INFO **first_non_infix_part);
4416
KeyPartInfo **first_non_infix_part);
4434
4418
static bool check_group_min_max_predicates(COND *cond, Item_field *min_max_arg_item);
4437
4421
cost_group_min_max(Table* table,
4422
KeyInfo *index_info,
4439
4423
uint32_t used_key_parts,
4440
4424
uint32_t group_key_parts,
4441
4425
optimizer::SEL_TREE *range_tree,
4578
4562
get_best_group_min_max(optimizer::Parameter *param, optimizer::SEL_TREE *tree)
4580
4564
Session *session= param->session;
4581
JOIN *join= session->lex->current_select->join;
4565
Join *join= session->lex->current_select->join;
4582
4566
Table *table= param->table;
4583
4567
bool have_min= false; /* true if there is a MIN function. */
4584
4568
bool have_max= false; /* true if there is a MAX function. */
4585
4569
Item_field *min_max_arg_item= NULL; // The argument of all MIN/MAX functions
4586
KEY_PART_INFO *min_max_arg_part= NULL; /* The corresponding keypart. */
4570
KeyPartInfo *min_max_arg_part= NULL; /* The corresponding keypart. */
4587
4571
uint32_t group_prefix_len= 0; /* Length (in bytes) of the key prefix. */
4588
KEY *index_info= NULL; /* The index chosen for data access. */
4572
KeyInfo *index_info= NULL; /* The index chosen for data access. */
4589
4573
uint32_t index= 0; /* The id of the chosen index. */
4590
4574
uint32_t group_key_parts= 0; // Number of index key parts in the group prefix.
4591
4575
uint32_t used_key_parts= 0; /* Number of index key parts used for access. */
4665
4649
(GA1,GA2) are all true. If there is more than one such index, select the
4666
4650
first one. Here we set the variables: group_prefix_len and index_info.
4668
KEY *cur_index_info= table->key_info;
4669
KEY *cur_index_info_end= cur_index_info + table->s->keys;
4670
KEY_PART_INFO *cur_part= NULL;
4671
KEY_PART_INFO *end_part= NULL; /* Last part for loops. */
4652
KeyInfo *cur_index_info= table->key_info;
4653
KeyInfo *cur_index_info_end= cur_index_info + table->getShare()->sizeKeys();
4654
KeyPartInfo *cur_part= NULL;
4655
KeyPartInfo *end_part= NULL; /* Last part for loops. */
4672
4656
/* Last index part. */
4673
KEY_PART_INFO *last_part= NULL;
4674
KEY_PART_INFO *first_non_group_part= NULL;
4675
KEY_PART_INFO *first_non_infix_part= NULL;
4657
KeyPartInfo *last_part= NULL;
4658
KeyPartInfo *first_non_group_part= NULL;
4659
KeyPartInfo *first_non_infix_part= NULL;
4676
4660
uint32_t key_infix_parts= 0;
4677
4661
uint32_t cur_group_key_parts= 0;
4678
4662
uint32_t cur_group_prefix_len= 0;
5179
get_constant_key_infix(KEY *,
5164
get_constant_key_infix(KeyInfo *,
5180
5165
optimizer::SEL_ARG *index_range_tree,
5181
KEY_PART_INFO *first_non_group_part,
5182
KEY_PART_INFO *min_max_arg_part,
5183
KEY_PART_INFO *last_part,
5166
KeyPartInfo *first_non_group_part,
5167
KeyPartInfo *min_max_arg_part,
5168
KeyPartInfo *last_part,
5185
5170
unsigned char *key_infix,
5186
5171
uint32_t *key_infix_len,
5187
KEY_PART_INFO **first_non_infix_part)
5172
KeyPartInfo **first_non_infix_part)
5189
5174
optimizer::SEL_ARG *cur_range= NULL;
5190
KEY_PART_INFO *cur_part= NULL;
5175
KeyPartInfo *cur_part= NULL;
5191
5176
/* End part for the first loop below. */
5192
KEY_PART_INFO *end_part= min_max_arg_part ? min_max_arg_part : last_part;
5177
KeyPartInfo *end_part= min_max_arg_part ? min_max_arg_part : last_part;
5194
5179
*key_infix_len= 0;
5195
5180
unsigned char *key_ptr= key_infix;
5569
uint32_t optimizer::RorScanInfo::findFirstNotSet() const
5571
boost::dynamic_bitset<> map= bitsToBitset();
5572
for (boost::dynamic_bitset<>::size_type i= 0; i < map.size(); i++)
5583
size_t optimizer::RorScanInfo::getBitCount() const
5585
boost::dynamic_bitset<> tmp_bitset= bitsToBitset();
5586
return tmp_bitset.count();
5590
void optimizer::RorScanInfo::subtractBitset(const boost::dynamic_bitset<>& in_bitset)
5592
boost::dynamic_bitset<> tmp_bitset= bitsToBitset();
5593
tmp_bitset-= in_bitset;
5594
covered_fields= tmp_bitset.to_ulong();
5598
boost::dynamic_bitset<> optimizer::RorScanInfo::bitsToBitset() const
5601
uint64_t conv= covered_fields;
5604
res.push_back((conv & 1) + '0');
5609
std::reverse(res.begin(), res.end());
5611
string final(covered_fields_size - res.length(), '0');
5613
return (boost::dynamic_bitset<>(final));
5584
5617
} /* namespace drizzled */