Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 35 additions & 23 deletions arrow-select/src/interleave.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,14 @@
//! Interleave elements from multiple arrays

use crate::dictionary::{merge_dictionary_values, should_merge_dictionary_values};
use arrow_array::builder::{BooleanBufferBuilder, BufferBuilder, PrimitiveBuilder};
use arrow_array::builder::{BooleanBufferBuilder, PrimitiveBuilder};
use arrow_array::cast::AsArray;
use arrow_array::types::*;
use arrow_array::*;
use arrow_buffer::{ArrowNativeType, BooleanBuffer, MutableBuffer, NullBuffer, OffsetBuffer};
use arrow_data::transform::MutableArrayData;
use arrow_data::ByteView;
use arrow_schema::{ArrowError, DataType};
use std::collections::HashMap;
use std::sync::Arc;

macro_rules! primitive_helper {
Expand Down Expand Up @@ -238,32 +237,45 @@ fn interleave_views<T: ByteViewType>(
indices: &[(usize, usize)],
) -> Result<ArrayRef, ArrowError> {
let interleaved = Interleave::<'_, GenericByteViewArray<T>>::new(values, indices);
let mut views_builder = BufferBuilder::new(indices.len());
let mut buffers = Vec::new();

// (input array_index, input buffer_index) -> output buffer_index
let mut buffer_lookup: HashMap<(usize, u32), u32> = HashMap::new();
for (array_idx, value_idx) in indices {
let array = interleaved.arrays[*array_idx];
let raw_view = array.views().get(*value_idx).unwrap();
let view_len = *raw_view as u32;
if view_len <= 12 {
views_builder.append(*raw_view);
continue;
}
// value is big enough to be in a variadic buffer
let view = ByteView::from(*raw_view);
let new_buffer_idx: &mut u32 = buffer_lookup
.entry((*array_idx, view.buffer_index))
.or_insert_with(|| {
buffers.push(array.data_buffers()[view.buffer_index as usize].clone());
(buffers.len() - 1) as u32
});
views_builder.append(view.with_buffer_index(*new_buffer_idx).into());
}
// A mapping from (input array_index, input buffer_index) -> output buffer_index
// The outer vec corresponds to the input array index.
// The inner vec corresponds to the buffer index within that input array.
// The value is the index of the buffer in the output array.
let mut buffer_remap: Vec<Vec<Option<u32>>> = interleaved
.arrays
.iter()
.map(|a| vec![None; a.data_buffers().len()])
.collect();

let views: Vec<u128> = indices
.iter()
.map(|(array_idx, value_idx)| {
let array = interleaved.arrays[*array_idx];
let raw_view = array.views().get(*value_idx).unwrap();
let view_len = *raw_view as u32;
if view_len <= 12 {
return *raw_view;
}
// value is big enough to be in a variadic buffer
let view = ByteView::from(*raw_view);
let new_buffer_idx = match &mut buffer_remap[*array_idx][view.buffer_index as usize] {
Some(idx) => *idx,
opt => {
buffers.push(array.data_buffers()[view.buffer_index as usize].clone());
let new_idx = (buffers.len() - 1) as u32;
*opt = Some(new_idx);
new_idx
}
};
view.with_buffer_index(new_buffer_idx).as_u128()
})
.collect();

let array = unsafe {
GenericByteViewArray::<T>::new_unchecked(views_builder.into(), buffers, interleaved.nulls)
GenericByteViewArray::<T>::new_unchecked(views.into(), buffers, interleaved.nulls)
};
Ok(Arc::new(array))
}
Expand Down
3 changes: 3 additions & 0 deletions arrow/benches/interleave_kernels.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,13 +77,16 @@ fn add_benchmark(c: &mut Criterion) {
let values = create_string_array_with_len::<i32>(1024, 0.0, 20);
let sparse_dict = create_sparse_dict_from_values::<Int32Type>(1024, 0.0, &values, 10..20);

let string_view = create_string_view_array(1024, 0.0);

let cases: &[(&str, &dyn Array)] = &[
("i32(0.0)", &i32),
("i32(0.5)", &i32_opt),
("str(20, 0.0)", &string),
("str(20, 0.5)", &string_opt),
("dict(20, 0.0)", &dict),
("dict_sparse(20, 0.0)", &sparse_dict),
("str_view(0.0)", &string_view),
];

for (prefix, base) in cases {
Expand Down
Loading