1232
1221
ror_scan->sel_arg= sel_arg;
1233
1222
ror_scan->records= param->table->quick_rows[keynr];
1235
if (!(bitmap_buf= (my_bitmap_map*) alloc_root(param->mem_root,
1236
param->fields_bitmap_size)))
1224
if (!(bitmap_buf= (my_bitmap_map*) param->mem_root->alloc_root(param->fields_bitmap_size)))
1239
if (ror_scan->covered_fields.init(bitmap_buf,
1240
param->table->s->fields))
1229
if (ror_scan->covered_fields.init(bitmap_buf, param->table->s->fields))
1242
1233
ror_scan->covered_fields.clearAll();
1244
1235
KEY_PART_INFO *key_part= param->table->key_info[keynr].key_part;
1276
1267
return (val1 < val2)? -1: (val1 == val2)? 0 : 1;
1280
Compare two ROR_SCAN_INFO** by
1281
(#covered fields in F desc,
1283
number of first not covered component asc)
1286
cmp_ror_scan_info_covering()
1287
a ptr to first compared value
1288
b ptr to second compared value
1296
static int cmp_ror_scan_info_covering(ROR_SCAN_INFO** a, ROR_SCAN_INFO** b)
1298
if ((*a)->used_fields_covered > (*b)->used_fields_covered)
1300
if ((*a)->used_fields_covered < (*b)->used_fields_covered)
1302
if ((*a)->key_components < (*b)->key_components)
1304
if ((*a)->key_components > (*b)->key_components)
1306
if ((*a)->first_uncovered_field < (*b)->first_uncovered_field)
1308
if ((*a)->first_uncovered_field > (*b)->first_uncovered_field)
1314
1271
/* Auxiliary structure for incremental ROR-intersection creation */
1833
Get best covering ROR-intersection.
1835
get_best_covering_ror_intersect()
1836
param Parameter from test_quick_select function.
1837
tree optimizer::SEL_TREE with sets of intervals for different keys.
1838
read_time Don't return table read plans with cost > read_time.
1841
Best covering ROR-intersection plan
1842
NULL if no plan found.
1845
get_best_ror_intersect must be called for a tree before calling this
1847
This function invalidates tree->ror_scans member values.
1849
The following approximate algorithm is used:
1850
I=set of all covering indexes
1851
F=set of all fields to cover
1856
Order I by (#covered fields in F desc,
1858
number of first not covered component asc);
1859
F=F-covered by first(I);
1862
} while F is not empty.
1866
optimizer::RorIntersectReadPlan *get_best_covering_ror_intersect(optimizer::Parameter *param,
1867
optimizer::SEL_TREE *tree,
1870
ROR_SCAN_INFO **ror_scan_mark;
1871
ROR_SCAN_INFO **ror_scans_end= tree->ror_scans_end;
1873
for (ROR_SCAN_INFO **scan= tree->ror_scans; scan != ror_scans_end; ++scan)
1874
(*scan)->key_components=
1875
param->table->key_info[(*scan)->keynr].key_parts;
1878
Run covering-ROR-search algorithm.
1879
Assume set I is [ror_scan .. ror_scans_end)
1882
/*I=set of all covering indexes */
1883
ror_scan_mark= tree->ror_scans;
1885
MyBitmap *covered_fields= ¶m->tmp_covered_fields;
1886
if (! covered_fields->getBitmap())
1888
my_bitmap_map *tmp_bitmap= (my_bitmap_map*)alloc_root(param->mem_root,
1889
param->fields_bitmap_size);
1890
covered_fields->setBitmap(tmp_bitmap);
1892
if (! covered_fields->getBitmap() ||
1893
covered_fields->init(covered_fields->getBitmap(),
1894
param->table->s->fields))
1896
covered_fields->clearAll();
1898
double total_cost= 0.0f;
1905
Update changed sorting info:
1907
number of first not covered component
1908
Calculate and save these values for each of remaining scans.
1910
for (ROR_SCAN_INFO **scan= ror_scan_mark; scan != ror_scans_end; ++scan)
1912
bitmap_subtract(&(*scan)->covered_fields, covered_fields);
1913
(*scan)->used_fields_covered=
1914
(*scan)->covered_fields.getBitsSet();
1915
(*scan)->first_uncovered_field=
1916
(*scan)->covered_fields.getFirst();
1919
internal::my_qsort(ror_scan_mark, ror_scans_end-ror_scan_mark,
1920
sizeof(ROR_SCAN_INFO*),
1921
(qsort_cmp)cmp_ror_scan_info_covering);
1924
total_cost += (*ror_scan_mark)->index_read_cost;
1925
records += (*ror_scan_mark)->records;
1926
if (total_cost > read_time)
1928
/* F=F-covered by first(I) */
1929
bitmap_union(covered_fields, &(*ror_scan_mark)->covered_fields);
1930
all_covered= bitmap_is_subset(¶m->needed_fields, covered_fields);
1931
} while ((++ror_scan_mark < ror_scans_end) && !all_covered);
1933
if (!all_covered || (ror_scan_mark - tree->ror_scans) == 1)
1937
Ok, [tree->ror_scans .. ror_scan) holds covering index_intersection with
1940
/* Add priority queue use cost. */
1941
total_cost += rows2double(records)*
1942
log((double)(ror_scan_mark - tree->ror_scans)) /
1943
(TIME_FOR_COMPARE_ROWID * M_LN2);
1945
if (total_cost > read_time)
1948
optimizer::RorIntersectReadPlan *trp= NULL;
1949
if (! (trp= new (param->mem_root) optimizer::RorIntersectReadPlan))
1954
uint32_t best_num= (ror_scan_mark - tree->ror_scans);
1955
trp->ror_range_scans.assign(tree->ror_scans, tree->ror_scans + best_num);
1956
trp->setRowRetrievalNecessary(true);
1957
trp->read_cost= total_cost;
1958
trp->records= records;
1959
trp->cpk_scan= NULL;
1960
set_if_smaller(param->table->quick_condition_rows, records);
1967
1786
Get best "range" table read plan for given optimizer::SEL_TREE, also update some info