Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions src/query/storages/stage/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ databend-common-storages-parquet = { workspace = true }
databend-storages-common-stage = { workspace = true }
databend-storages-common-table-meta = { workspace = true }
enum-as-inner = { workspace = true }
futures = { workspace = true }
jsonb = { workspace = true }
lexical-core = { workspace = true }
log = { workspace = true }
Expand Down
37 changes: 32 additions & 5 deletions src/query/storages/stage/src/read/row_based/processors/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,16 @@ use databend_common_exception::Result;
use databend_common_expression::DataBlock;
use databend_common_pipeline_sources::PrefetchAsyncSource;
use databend_storages_common_stage::SingleFilePartition;
use futures::AsyncRead;
use futures::AsyncReadExt;
use log::debug;
use opendal::Operator;

use crate::read::row_based::batch::BytesBatch;

struct FileState {
file: SingleFilePartition,
reader: opendal::Reader,
reader: opendal::FuturesAsyncReader,
offset: usize,
}

Expand Down Expand Up @@ -60,15 +62,20 @@ impl BytesReader {

pub async fn read_batch(&mut self) -> Result<DataBlock> {
if let Some(state) = &mut self.file_state {
let end = state.file.size.min(self.read_batch_size + state.offset) as u64;
let buffer = state.reader.read(state.offset as u64..end).await?.to_vec();
let n = buffer.len();
let end = state.file.size.min(self.read_batch_size + state.offset);
let mut buffer = vec![0u8; end - state.offset];
let n = read_full(&mut state.reader, &mut buffer[..]).await?;

// let end = state.file.size.min(self.read_batch_size + state.offset) as u64;
// let buffer = state.reader.read(state.offset as u64..end).await?.to_vec();
// let n = buffer.len();
if n == 0 {
return Err(ErrorCode::BadBytes(format!(
"Unexpected EOF {} expect {} bytes, read only {} bytes.",
state.file.path, state.file.size, state.offset
)));
};
buffer.truncate(n);

Profile::record_usize_profile(ProfileStatisticsName::ScanBytes, n);
self.table_ctx
Expand Down Expand Up @@ -116,7 +123,12 @@ impl PrefetchAsyncSource for BytesReader {
};
let file = SingleFilePartition::from_part(&part)?.clone();

let reader = self.op.reader(&file.path).await?;
let reader = self
.op
.reader(&file.path)
.await?
.into_futures_async_read(0..file.size as u64)
.await?;

self.file_state = Some(FileState {
file,
Expand All @@ -130,3 +142,18 @@ impl PrefetchAsyncSource for BytesReader {
}
}
}

#[async_backtrace::framed]
pub async fn read_full<R: AsyncRead + Unpin>(reader: &mut R, buf: &mut [u8]) -> Result<usize> {
let mut buf = &mut buf[0..];
let mut n = 0;
while !buf.is_empty() {
let read = reader.read(buf).await?;
if read == 0 {
break;
}
n += read;
buf = &mut buf[read..]
}
Ok(n)
}
Loading