Skip to content

Commit 1ed9266

Browse files
committed
WIP: Enable needles-pass-by-value lint
1 parent 6a59b88 commit 1ed9266

58 files changed

Lines changed: 247 additions & 316 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

Cargo.toml

Lines changed: 41 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -290,56 +290,56 @@ package = "getrandom"
290290
version = "0.3.1"
291291

292292
[workspace.lints.rust]
293-
let_underscore_drop = "deny"
294-
macro_use_extern_crate = "deny"
295-
redundant_lifetimes = "deny"
296-
unsafe_op_in_unsafe_fn = "deny"
297-
unused_lifetimes = "deny"
298-
unused_qualifications = "deny"
299-
unexpected_cfgs = { level = "deny", check-cfg = [
293+
let_underscore_drop = "warn"
294+
macro_use_extern_crate = "warn"
295+
redundant_lifetimes = "warn"
296+
unsafe_op_in_unsafe_fn = "warn"
297+
unused_lifetimes = "warn"
298+
unused_qualifications = "warn"
299+
unexpected_cfgs = { level = "warn", check-cfg = [
300300
"cfg(codspeed)",
301301
"cfg(disable_loom)",
302302
"cfg(vortex_nightly)",
303303
] }
304304
warnings = "warn"
305305

306306
[workspace.lints.clippy]
307-
all = { level = "deny", priority = -1 }
308-
as_ptr_cast_mut = "deny"
309-
borrow_as_ptr = "deny"
310-
cargo = { level = "deny", priority = -1 }
311-
cast_possible_truncation = "deny"
312-
cognitive_complexity = "deny"
313-
collection_is_never_read = "deny"
314-
dbg_macro = "deny"
315-
debug_assert_with_mut_call = "deny"
316-
derive_partial_eq_without_eq = "deny"
317-
equatable_if_let = "deny"
318-
exit = "deny"
319-
expect_fun_call = "deny"
320-
expect_used = "deny"
321-
fallible_impl_from = "deny"
322-
get_unwrap = "deny"
323-
host_endian_bytes = "deny"
324-
if_then_some_else_none = "deny"
325-
inconsistent_struct_constructor = "deny"
326-
manual_assert = "deny"
327-
manual_is_variant_and = "deny"
328-
many_single_char_names = "deny"
329-
mem_forget = "deny"
307+
large_futures = "deny"
308+
all = { level = "warn", priority = -1 }
309+
as_ptr_cast_mut = "warn"
310+
borrow_as_ptr = "warn"
311+
cargo = { level = "warn", priority = -1 }
312+
cast_possible_truncation = "warn"
313+
cognitive_complexity = "warn"
314+
collection_is_never_read = "warn"
315+
dbg_macro = "warn"
316+
debug_assert_with_mut_call = "warn"
317+
derive_partial_eq_without_eq = "warn"
318+
equatable_if_let = "warn"
319+
exit = "warn"
320+
expect_fun_call = "warn"
321+
expect_used = "warn"
322+
fallible_impl_from = "warn"
323+
get_unwrap = "warn"
324+
host_endian_bytes = "warn"
325+
if_then_some_else_none = "warn"
326+
inconsistent_struct_constructor = "warn"
327+
manual_assert = "warn"
328+
manual_is_variant_and = "warn"
329+
many_single_char_names = "warn"
330+
mem_forget = "warn"
330331
multiple_crate_versions = "allow"
331332
needless_range_loop = "allow"
332-
or_fun_call = "deny"
333-
panic = "deny"
334-
# panic_in_result_fn = "deny" -- we cannot disable this for tests to use assertions
335-
redundant_clone = "deny"
336-
same_name_method = "deny"
337-
tests_outside_test_module = "deny"
338-
# todo = "deny"
339-
# unimplemented = "deny"
340-
unwrap_in_result = "deny"
341-
unwrap_used = "deny"
342-
use_debug = "deny"
333+
needless_pass_by_value = "warn"
334+
or_fun_call = "warn"
335+
panic = "warn"
336+
redundant_clone = "warn"
337+
same_name_method = "warn"
338+
tests_outside_test_module = "warn"
339+
unwrap_in_result = "warn"
340+
unwrap_used = "warn"
341+
use_debug = "warn"
342+
assigning_clones = "deny"
343343

344344
[profile.release]
345345
codegen-units = 1
Lines changed: 1 addition & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,8 @@
11
// SPDX-License-Identifier: Apache-2.0
22
// SPDX-FileCopyrightText: Copyright the Vortex contributors
33

4-
use std::sync::Arc;
5-
6-
use ::vortex::array::arrays::ChunkedArray;
7-
use ::vortex::array::arrays::recursive_list_from_list_view;
8-
use arrow_array::RecordBatch;
9-
use arrow_schema::Schema;
104
#[cfg(feature = "lance")]
115
pub use lance_bench::compress::LanceCompressor;
6+
127
pub mod parquet;
138
pub mod vortex;
14-
15-
pub fn chunked_to_vec_record_batch(
16-
chunked: ChunkedArray,
17-
) -> anyhow::Result<(Vec<RecordBatch>, Arc<Schema>)> {
18-
let chunks_vec = chunked.chunks();
19-
assert!(!chunks_vec.is_empty(), "empty chunks");
20-
21-
let batches = chunks_vec
22-
.iter()
23-
.map(|array| {
24-
// TODO(connor)[ListView]: The rust Parquet implementation does not support writing
25-
// `ListView` to Parquet files yet.
26-
let converted_array = recursive_list_from_list_view(array.clone())?;
27-
Ok(RecordBatch::try_from(converted_array.as_ref())?)
28-
})
29-
.collect::<anyhow::Result<Vec<_>>>()?;
30-
31-
let schema = batches[0].schema();
32-
Ok((batches, schema))
33-
}

benchmarks/compress-bench/src/vortex.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33

44
use std::io::Cursor;
55
use std::path::Path;
6-
use std::sync::Arc;
76
use std::time::Duration;
87
use std::time::Instant;
98

@@ -58,9 +57,9 @@ impl Compressor for VortexCompressor {
5857
let start = Instant::now();
5958
let data = Bytes::from(buf);
6059
let scan = SESSION.open_options().open_buffer(data)?.scan()?;
61-
let schema = Arc::new(scan.dtype()?.to_arrow_schema()?);
60+
let schema = scan.dtype()?.to_arrow_schema()?;
6261

63-
let stream = scan.into_record_batch_stream(schema)?;
62+
let stream = scan.into_record_batch_stream(&schema)?;
6463
pin_mut!(stream);
6564

6665
while let Some(batch) = stream.next().await {

benchmarks/datafusion-bench/src/main.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ use datafusion_physical_plan::collect;
2626
use futures::StreamExt;
2727
use parking_lot::Mutex;
2828
use tokio::fs::File;
29+
use vortex::io::filesystem::FileSystemRef;
2930
use vortex::scan::api::DataSourceRef;
3031
use vortex_bench::Benchmark;
3132
use vortex_bench::BenchmarkArg;
@@ -304,9 +305,9 @@ async fn register_v2_tables<B: Benchmark + ?Sized>(
304305
.runtime_env()
305306
.object_store(table_url.object_store())?;
306307

307-
let fs: vortex::io::filesystem::FileSystemRef =
308-
Arc::new(ObjectStoreFileSystem::new(store.clone(), SESSION.handle()));
309-
let base_prefix = benchmark_base.path().trim_start_matches('/').to_string();
308+
let fs =
309+
Arc::new(ObjectStoreFileSystem::new(store.clone(), SESSION.handle())) as FileSystemRef;
310+
let base_prefix = benchmark_base.path().trim_start_matches('/');
310311
let fs = fs.with_prefix(base_prefix);
311312

312313
let glob_pattern = match &pattern {

benchmarks/duckdb-bench/src/lib.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
// SPDX-License-Identifier: Apache-2.0
22
// SPDX-FileCopyrightText: Copyright the Vortex contributors
33

4+
#![allow(clippy::needless_pass_by_value)]
5+
46
//! DuckDB context for benchmarks.
57
68
use std::ops::Deref;

benchmarks/duckdb-bench/src/main.rs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
// SPDX-License-Identifier: Apache-2.0
22
// SPDX-FileCopyrightText: Copyright the Vortex contributors
33

4-
mod validation;
5-
64
use std::path::PathBuf;
75

86
use clap::Parser;

benchmarks/duckdb-bench/src/validation.rs

Lines changed: 0 additions & 110 deletions
This file was deleted.

benchmarks/lance-bench/src/lib.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
// SPDX-License-Identifier: Apache-2.0
22
// SPDX-FileCopyrightText: Copyright the Vortex contributors
33

4+
#![allow(clippy::needless_pass_by_value)]
5+
46
pub mod compress;
57
pub mod convert;
68
pub mod random_access;

encodings/alp/src/alp/decompress.rs

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -48,15 +48,15 @@ pub fn decompress_into_array(
4848
&patches_values,
4949
&patches_chunk_offsets,
5050
patches,
51-
dtype,
51+
&dtype,
5252
))
5353
} else {
5454
let encoded_prim = encoded.to_primitive();
5555
// We need to drop ALPArray here in case converting encoded buffer into
5656
// primitive didn't create a copy. In that case both alp_encoded and array
5757
// will hold a reference to the buffer we want to mutate.
5858
drop(encoded);
59-
decompress_unchunked_core(encoded_prim, exponents, patches, dtype, ctx)
59+
decompress_unchunked_core(encoded_prim, exponents, patches, &dtype, ctx)
6060
}
6161
}
6262

@@ -70,6 +70,7 @@ pub fn decompress_into_array(
7070
/// A `PrimitiveArray` containing the decompressed floating-point values with all patches applied.
7171
pub fn execute_decompress(array: ALPArray, ctx: &mut ExecutionCtx) -> VortexResult<PrimitiveArray> {
7272
let (encoded, exponents, patches, dtype) = array.into_parts();
73+
7374
if let Some(ref patches) = patches
7475
&& let Some(chunk_offsets) = patches.chunk_offsets()
7576
{
@@ -85,11 +86,11 @@ pub fn execute_decompress(array: ALPArray, ctx: &mut ExecutionCtx) -> VortexResu
8586
&patches_values,
8687
&patches_chunk_offsets,
8788
patches,
88-
dtype,
89+
&dtype,
8990
))
9091
} else {
9192
let encoded = encoded.execute::<PrimitiveArray>(ctx)?;
92-
decompress_unchunked_core(encoded, exponents, patches, dtype, ctx)
93+
decompress_unchunked_core(encoded, exponents, patches, &dtype, ctx)
9394
}
9495
}
9596

@@ -108,7 +109,7 @@ fn decompress_chunked_core(
108109
patches_values: &PrimitiveArray,
109110
patches_chunk_offsets: &PrimitiveArray,
110111
patches: &Patches,
111-
dtype: DType,
112+
dtype: &DType,
112113
) -> PrimitiveArray {
113114
let validity = encoded.validity().clone();
114115
let ptype = dtype.as_ptype();
@@ -157,7 +158,7 @@ fn decompress_unchunked_core(
157158
encoded: PrimitiveArray,
158159
exponents: Exponents,
159160
patches: Option<Patches>,
160-
dtype: DType,
161+
dtype: &DType,
161162
ctx: &mut ExecutionCtx,
162163
) -> VortexResult<PrimitiveArray> {
163164
let validity = encoded.validity().clone();

encodings/alp/src/alp_rd/array.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -311,7 +311,7 @@ impl VTable for ALPRDVTable {
311311
let decoded_array = if array.is_f32() {
312312
PrimitiveArray::new(
313313
alp_rd_decode::<f32>(
314-
left_parts.into_buffer::<u16>(),
314+
left_parts.as_slice::<u16>(),
315315
left_parts_dict,
316316
array.right_bit_width,
317317
right_parts.into_buffer_mut::<u32>(),
@@ -323,7 +323,7 @@ impl VTable for ALPRDVTable {
323323
} else {
324324
PrimitiveArray::new(
325325
alp_rd_decode::<f64>(
326-
left_parts.into_buffer::<u16>(),
326+
left_parts.as_slice::<u16>(),
327327
left_parts_dict,
328328
array.right_bit_width,
329329
right_parts.into_buffer_mut::<u64>(),

0 commit comments

Comments
 (0)