Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Allow COPY and CREATE STAGE from public read buckets without credentials #6623

Merged
merged 5 commits into from
Jul 14, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/actions/check/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ runs:

- name: Clippy
shell: bash
run: cargo -Z sparse-registry clippy --workspace --all-targets -- -D warnings
run: cargo -Z sparse-registry clippy --workspace --all-targets --all-features -- -D warnings

- name: Audit dependencies
shell: bash
Expand Down
38 changes: 7 additions & 31 deletions common/storage/src/location.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,23 +63,9 @@ pub fn parse_uri_location(l: &UriLocation) -> Result<(StorageParams, String)> {
account_name: l
.connection
.get("account_name")
.ok_or_else(|| {
Error::new(
ErrorKind::InvalidInput,
anyhow!("account_name is required for storage azblob"),
)
})?
.to_string(),
account_key: l
.connection
.get("account_key")
.ok_or_else(|| {
Error::new(
ErrorKind::InvalidInput,
anyhow!("account_name is required for storage azblob"),
)
})?
.to_string(),
.cloned()
.unwrap_or_default(),
account_key: l.connection.get("account_key").cloned().unwrap_or_default(),
root: root.to_string(),
}),
#[cfg(feature = "storage-hdfs")]
Expand Down Expand Up @@ -108,24 +94,14 @@ pub fn parse_uri_location(l: &UriLocation) -> Result<(StorageParams, String)> {
.connection
.get("access_key_id")
.or_else(|| l.connection.get("aws_key_id"))
.ok_or_else(|| {
Error::new(
ErrorKind::InvalidInput,
anyhow!("access_key_id is required for storage s3"),
)
})?
.to_string(),
.cloned()
.unwrap_or_default(),
secret_access_key: l
.connection
.get("secret_access_key")
.or_else(|| l.connection.get("aws_secret_key"))
.ok_or_else(|| {
Error::new(
ErrorKind::InvalidInput,
anyhow!("secret_access_key is required for storage s3"),
)
})?
.to_string(),
.cloned()
.unwrap_or_default(),
master_key: l.connection.get("master_key").cloned().unwrap_or_default(),
root: root.to_string(),
disable_credential_loader: true,
Expand Down
4 changes: 1 addition & 3 deletions query/src/catalogs/hive/hive_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ use common_planners::ReadDataSourcePlan;
use common_planners::Statistics;
use common_planners::TruncateTablePlan;
use common_streams::SendableDataBlockStream;
use common_tracing::tracing_futures::Instrument;
use futures::StreamExt;
use futures::TryStreamExt;
use opendal::ObjectMode;

Expand All @@ -38,7 +36,6 @@ use crate::pipelines::processors::port::OutputPort;
use crate::pipelines::processors::processor::ProcessorPtr;
use crate::pipelines::processors::SyncSource;
use crate::pipelines::processors::SyncSourcer;
use crate::pipelines::Pipe;
use crate::pipelines::Pipeline;
use crate::pipelines::SourcePipeBuilder;
use crate::sessions::QueryContext;
Expand Down Expand Up @@ -270,6 +267,7 @@ struct HiveSource {
}

impl HiveSource {
#[allow(dead_code)]
pub fn create(
ctx: Arc<QueryContext>,
output: Arc<OutputPort>,
Expand Down
4 changes: 0 additions & 4 deletions query/src/storages/hive/hive_parquet_block_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,8 @@ use common_exception::ErrorCode;
use common_exception::Result;
use common_planners::PartInfoPtr;
use common_tracing::tracing;
use common_tracing::tracing::debug_span;
use common_tracing::tracing::warn;
use common_tracing::tracing::Instrument;
use futures::AsyncReadExt;
use futures::StreamExt;
use futures::TryStreamExt;
use opendal::Object;
use opendal::Operator;

Expand Down