699
698
bool quick; // Don't calulate possible keys
701
700
uint32_t fields_bitmap_size;
702
bitset<MAX_FIELDS> needed_fields; /* bitmask of fields needed by the query */
703
bitset<MAX_FIELDS> tmp_covered_fields;
701
MY_BITMAP needed_fields; /* bitmask of fields needed by the query */
702
MY_BITMAP tmp_covered_fields;
705
704
key_map *needed_reg; /* ptr to SQL_SELECT::needed_reg */
1093
1092
QUICK_RANGE_SELECT::QUICK_RANGE_SELECT(Session *session, Table *table, uint32_t key_nr,
1094
bool no_alloc, MEM_ROOT *parent_alloc)
1093
bool no_alloc, MEM_ROOT *parent_alloc,
1095
1095
:free_file(0),cur_range(NULL),last_range(0),dont_free(0)
1097
my_bitmap_map *bitmap;
1097
1099
in_ror_merged_scan= 0;
1118
1120
save_read_set= head->read_set;
1119
1121
save_write_set= head->write_set;
1123
/* Allocate a bitmap for used columns (Q: why not on MEM_ROOT?) */
1124
if (!(bitmap= (my_bitmap_map*) malloc(head->s->column_bitmap_size)))
1126
column_bitmap.bitmap= 0;
1130
bitmap_init(&column_bitmap, bitmap, head->s->fields, false);
1160
1170
delete_dynamic(&ranges); /* ranges are allocated in alloc */
1161
1171
free_root(&alloc,MYF(0));
1172
free((char*) column_bitmap.bitmap);
1163
1174
head->column_bitmaps_set(save_read_set, save_write_set);
1164
1175
if (mrr_buf_desc)
1345
1356
head->prepare_for_position();
1346
1357
head->file= org_file;
1347
column_bitmap= *(head->read_set);
1358
bitmap_copy(&column_bitmap, head->read_set);
1348
1359
head->column_bitmaps_set(&column_bitmap, &column_bitmap);
1474
1485
session_param->mem_root= &alloc;
1478
* Function object that is used as the comparison function
1479
* for the priority queue in the QUICK_ROR_UNION_SELECT
1482
class compare_functor
1484
QUICK_ROR_UNION_SELECT *self;
1486
compare_functor(QUICK_ROR_UNION_SELECT *in_arg)
1488
inline bool operator()(const QUICK_SELECT_I *i, const QUICK_SELECT_I *j) const
1490
int val= self->head->file->cmp_ref(i->last_rowid,
1497
1490
Do post-constructor initialization.
1506
1499
int QUICK_ROR_UNION_SELECT::init()
1509
new priority_queue<QUICK_SELECT_I *, vector<QUICK_SELECT_I *>, compare_functor >(compare_functor(this));
1501
if (init_queue(&queue, quick_selects.elements, 0,
1502
false, quick_ror_union_select_queue_cmp,
1505
memset(&queue, 0, sizeof(QUEUE));
1510
1509
if (!(cur_rowid= (unsigned char*) alloc_root(&alloc, 2*head->file->ref_length)))
1512
1511
prev_rowid= cur_rowid + head->file->ref_length;
2085
2081
static int fill_used_fields_bitmap(PARAM *param)
2087
2083
Table *table= param->table;
2086
param->tmp_covered_fields.bitmap= 0;
2089
2087
param->fields_bitmap_size= table->s->column_bitmap_size;
2088
if (!(tmp= (my_bitmap_map*) alloc_root(param->mem_root,
2089
param->fields_bitmap_size)) ||
2090
bitmap_init(¶m->needed_fields, tmp, table->s->fields, false))
2091
param->needed_fields = *(table->read_set);
2092
param->needed_fields |= *(table->write_set);
2093
bitmap_copy(¶m->needed_fields, table->read_set);
2094
bitmap_union(¶m->needed_fields, table->write_set);
2094
2096
pk= param->table->s->primary_key;
2095
2097
if (pk != MAX_KEY && param->table->file->primary_key_is_clustered())
2099
2101
KEY_PART_INFO *key_part_end= key_part +
2100
2102
param->table->key_info[pk].key_parts;
2101
2103
for (;key_part != key_part_end; ++key_part)
2102
param->needed_fields.reset(key_part->fieldnr-1);
2104
bitmap_clear_bit(¶m->needed_fields, key_part->fieldnr-1);
2722
2724
SEL_ARG *sel_arg;
2724
2726
/* Fields used in the query and covered by this ROR scan. */
2725
bitset<MAX_FIELDS> covered_fields;
2727
MY_BITMAP covered_fields;
2726
2728
uint32_t used_fields_covered; /* # of set bits in covered_fields */
2727
2729
int key_rec_length; /* length of key record (including rowid) */
2773
2775
param->fields_bitmap_size)))
2776
ror_scan->covered_fields.reset();
2778
if (bitmap_init(&ror_scan->covered_fields, bitmap_buf,
2779
param->table->s->fields, false))
2781
bitmap_clear_all(&ror_scan->covered_fields);
2778
2783
KEY_PART_INFO *key_part= param->table->key_info[keynr].key_part;
2779
2784
KEY_PART_INFO *key_part_end= key_part +
2780
2785
param->table->key_info[keynr].key_parts;
2781
2786
for (;key_part != key_part_end; ++key_part)
2783
if (param->needed_fields.test(key_part->fieldnr-1))
2784
ror_scan->covered_fields.set(key_part->fieldnr-1);
2788
if (bitmap_is_set(¶m->needed_fields, key_part->fieldnr-1))
2789
bitmap_set_bit(&ror_scan->covered_fields, key_part->fieldnr-1);
2786
2791
double rows= rows2double(param->table->quick_rows[ror_scan->keynr]);
2787
2792
ror_scan->index_read_cost=
2851
2856
const PARAM *param;
2852
bitset<MAX_FIELDS> covered_fields; /* union of fields covered by all scans */
2857
MY_BITMAP covered_fields; /* union of fields covered by all scans */
2854
2859
Fraction of table records that satisfies conditions of all scans.
2855
2860
This is the number of full records that will be retrieved if a
2881
2886
ROR_INTERSECT_INFO* ror_intersect_init(const PARAM *param)
2883
2888
ROR_INTERSECT_INFO *info;
2884
2890
if (!(info= (ROR_INTERSECT_INFO*)alloc_root(param->mem_root,
2885
2891
sizeof(ROR_INTERSECT_INFO))))
2887
2893
info->param= param;
2894
if (!(buf= (my_bitmap_map*) alloc_root(param->mem_root,
2895
param->fields_bitmap_size)))
2897
if (bitmap_init(&info->covered_fields, buf, param->table->s->fields,
2888
2900
info->is_covering= false;
2889
2901
info->index_scan_costs= 0.0;
2890
2902
info->index_records= 0;
2891
2903
info->out_rows= (double) param->table->file->stats.records;
2892
info->covered_fields.reset();
2904
bitmap_clear_all(&info->covered_fields);
2896
2908
void ror_intersect_cpy(ROR_INTERSECT_INFO *dst, const ROR_INTERSECT_INFO *src)
2898
2910
dst->param= src->param;
2899
dst->covered_fields= src->covered_fields;
2911
memcpy(dst->covered_fields.bitmap, src->covered_fields.bitmap,
2912
no_bytes_in_map(&src->covered_fields));
2900
2913
dst->out_rows= src->out_rows;
2901
2914
dst->is_covering= src->is_covering;
2902
2915
dst->index_records= src->index_records;
3005
3018
SEL_ARG *sel_arg, *tuple_arg= NULL;
3006
3019
key_part_map keypart_map= 0;
3007
3020
bool cur_covered;
3008
bool prev_covered= test(info->covered_fields.test(key_part->fieldnr-1));
3021
bool prev_covered= test(bitmap_is_set(&info->covered_fields,
3022
key_part->fieldnr-1));
3009
3023
key_range min_range;
3010
3024
key_range max_range;
3011
3025
min_range.key= key_val;
3017
3031
for (sel_arg= scan->sel_arg; sel_arg;
3018
3032
sel_arg= sel_arg->next_key_part)
3020
cur_covered= test(info->covered_fields.test(key_part[sel_arg->part].fieldnr-1));
3034
cur_covered= test(bitmap_is_set(&info->covered_fields,
3035
key_part[sel_arg->part].fieldnr-1));
3021
3036
if (cur_covered != prev_covered)
3023
3038
/* create (part1val, ..., part{n-1}val) tuple. */
3130
3145
info->index_records += info->param->table->quick_rows[ror_scan->keynr];
3131
3146
info->index_scan_costs += ror_scan->index_read_cost;
3132
info->covered_fields |= ror_scan->covered_fields;
3133
if (!info->is_covering && isBitmapSubset(&info->param->needed_fields,
3134
&info->covered_fields))
3147
bitmap_union(&info->covered_fields, &ror_scan->covered_fields);
3148
if (!info->is_covering && bitmap_is_subset(&info->param->needed_fields,
3149
&info->covered_fields))
3136
3151
info->is_covering= true;
3419
3435
/*I=set of all covering indexes */
3420
3436
ror_scan_mark= tree->ror_scans;
3422
bitset<MAX_FIELDS> *covered_fields= ¶m->tmp_covered_fields;
3423
covered_fields->reset();
3438
MY_BITMAP *covered_fields= ¶m->tmp_covered_fields;
3439
if (!covered_fields->bitmap)
3440
covered_fields->bitmap= (my_bitmap_map*)alloc_root(param->mem_root,
3441
param->fields_bitmap_size);
3442
if (!covered_fields->bitmap ||
3443
bitmap_init(covered_fields, covered_fields->bitmap,
3444
param->table->s->fields, false))
3446
bitmap_clear_all(covered_fields);
3425
3448
double total_cost= 0.0f;
3426
3449
ha_rows records=0;
3440
3463
for (ROR_SCAN_INFO **scan= ror_scan_mark; scan != ror_scans_end; ++scan)
3442
/* subtract a bitset */
3443
(*scan)->covered_fields &= covered_fields->flip();
3444
covered_fields->flip();
3445
(*scan)->used_fields_covered= (*scan)->covered_fields.count();
3446
(*scan)->first_uncovered_field= getFirstBitPos((*scan)->covered_fields);
3465
bitmap_subtract(&(*scan)->covered_fields, covered_fields);
3466
(*scan)->used_fields_covered=
3467
bitmap_bits_set(&(*scan)->covered_fields);
3468
(*scan)->first_uncovered_field=
3469
bitmap_get_first(&(*scan)->covered_fields);
3449
3472
my_qsort(ror_scan_mark, ror_scans_end-ror_scan_mark, sizeof(ROR_SCAN_INFO*),
3459
3482
if (total_cost > read_time)
3461
3484
/* F=F-covered by first(I) */
3462
*covered_fields |= (*ror_scan_mark)->covered_fields;
3463
all_covered= isBitmapSubset(¶m->needed_fields, covered_fields);
3485
bitmap_union(covered_fields, &(*ror_scan_mark)->covered_fields);
3486
all_covered= bitmap_is_subset(¶m->needed_fields, covered_fields);
3464
3487
} while ((++ror_scan_mark < ror_scans_end) && !all_covered);
3466
3489
if (!all_covered || (ror_scan_mark - tree->ror_scans) == 1)
6445
6468
quick=new QUICK_RANGE_SELECT(param->session, param->table,
6446
6469
param->real_keynr[idx],
6447
test(parent_alloc), NULL);
6470
test(parent_alloc), NULL, &create_err);
6572
6595
return 1; // out of memory
6574
set_if_bigger(quick->max_used_key_length, (uint32_t)range->min_length);
6575
set_if_bigger(quick->max_used_key_length, (uint32_t)range->max_length);
6597
set_if_bigger(quick->max_used_key_length, range->min_length);
6598
set_if_bigger(quick->max_used_key_length, range->max_length);
6576
6599
set_if_bigger(quick->used_key_parts, (uint32_t) key_tree->part+1);
6577
6600
if (insert_dynamic(&quick->ranges, (unsigned char*) &range))
6636
bool QUICK_SELECT_I::is_keys_used(const bitset<MAX_FIELDS> *fields)
6659
bool QUICK_SELECT_I::is_keys_used(const MY_BITMAP *fields)
6638
6661
return is_key_used(head, index, fields);
6641
bool QUICK_INDEX_MERGE_SELECT::is_keys_used(const bitset<MAX_FIELDS> *fields)
6643
QUICK_RANGE_SELECT *quick;
6644
List_iterator_fast<QUICK_RANGE_SELECT> it(quick_selects);
6645
while ((quick= it++))
6647
if (is_key_used(head, quick->index, fields))
6653
bool QUICK_ROR_INTERSECT_SELECT::is_keys_used(const bitset<MAX_FIELDS> *fields)
6655
QUICK_RANGE_SELECT *quick;
6656
List_iterator_fast<QUICK_RANGE_SELECT> it(quick_selects);
6657
while ((quick= it++))
6659
if (is_key_used(head, quick->index, fields))
6665
bool QUICK_ROR_UNION_SELECT::is_keys_used(const bitset<MAX_FIELDS> *fields)
6664
bool QUICK_INDEX_MERGE_SELECT::is_keys_used(const MY_BITMAP *fields)
6666
QUICK_RANGE_SELECT *quick;
6667
List_iterator_fast<QUICK_RANGE_SELECT> it(quick_selects);
6668
while ((quick= it++))
6670
if (is_key_used(head, quick->index, fields))
6676
bool QUICK_ROR_INTERSECT_SELECT::is_keys_used(const MY_BITMAP *fields)
6678
QUICK_RANGE_SELECT *quick;
6679
List_iterator_fast<QUICK_RANGE_SELECT> it(quick_selects);
6680
while ((quick= it++))
6682
if (is_key_used(head, quick->index, fields))
6688
bool QUICK_ROR_UNION_SELECT::is_keys_used(const MY_BITMAP *fields)
6667
6690
QUICK_SELECT_I *quick;
6668
6691
List_iterator_fast<QUICK_SELECT_I> it(quick_selects);
6709
6732
old_root= session->mem_root;
6710
6733
/* The following call may change session->mem_root */
6711
quick= new QUICK_RANGE_SELECT(session, table, ref->key, 0, 0);
6734
quick= new QUICK_RANGE_SELECT(session, table, ref->key, 0, 0, &create_err);
6712
6735
/* save mem_root set by QUICK_RANGE_SELECT constructor */
6713
6736
alloc= session->mem_root;
7062
if (!queue.elements)
7040
7063
return(HA_ERR_END_OF_FILE);
7041
7064
/* Ok, we have a queue with >= 1 scans */
7043
quick= queue->top();
7066
quick= (QUICK_SELECT_I*)queue_top(&queue);
7044
7067
memcpy(cur_rowid, quick->last_rowid, rowid_length);
7046
7069
/* put into queue rowid from the same stream as top element */
8063
8085
If the field is used in the current query ensure that it's
8064
8086
part of 'cur_index'
8066
if (table->read_set->test(cur_field->field_index) &&
8088
if (bitmap_is_set(table->read_set, cur_field->field_index) &&
8067
8089
!cur_field->part_of_key_not_clustered.is_set(cur_index))
8068
8090
goto next_index; // Field was not part of key
8235
8257
(min_max_arg_part && (min_max_arg_part < last_part));
8236
8258
for (; cur_part != last_part; cur_part++)
8238
if (table->read_set->test(cur_part->field->field_index))
8260
if (bitmap_is_set(table->read_set, cur_part->field->field_index))
8239
8261
goto next_index;
8714
8736
quick_prefix_selectivity= (double) quick_prefix_records /
8715
8737
(double) table_records;
8716
8738
num_groups= (uint32_t) rint(num_groups * quick_prefix_selectivity);
8717
set_if_bigger(num_groups, 1U);
8739
set_if_bigger(num_groups, 1);
8720
8742
if (used_key_parts > group_key_parts)