Skip to content

Commit

Permalink
merge
Browse files Browse the repository at this point in the history
  • Loading branch information
b41sh committed Feb 7, 2025
2 parents 240feda + 34be4e4 commit b598b17
Show file tree
Hide file tree
Showing 57 changed files with 680 additions and 229 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/links.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
id: lychee
uses: lycheeverse/[email protected]
with:
args: "--base . --cache --max-cache-age 1d . --exclude 'https?://twitter\\.com(?:/.*$)?$'"
args: "--base . --cache --max-cache-age 1d . --exclude 'https://github.com/databendlabs/databend/issues/' --exclude 'https?://twitter\\.com(?:/.*$)?$'"

- name: Save lychee cache
uses: actions/cache/save@v3
Expand Down
44 changes: 23 additions & 21 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -365,10 +365,10 @@ num-derive = "0.3.3"
num-traits = "0.2.19"
num_cpus = "1.13.1"
object = "0.36.5"
object_store_opendal = { version = "0.49.0", package = "object_store_opendal", git = "https://github.com/apache/opendal", rev = "b8a3b7a" }
object_store_opendal = { version = "0.49.1" }
once_cell = "1.15.0"
openai_api_rust = "0.1"
opendal = { version = "0.51.1", package = "opendal", git = "https://github.com/apache/opendal", rev = "b8a3b7a", features = [
opendal = { version = "0.51.2", features = [
"layers-fastrace",
"layers-prometheus-client",
"layers-async-backtrace",
Expand Down
22 changes: 11 additions & 11 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,24 +67,24 @@
## 🚀 Why Databend

- **Full Control**: Deploy on **cloud** or **on-prem** to suit your needs.

- **Blazing-Fast Performance**: Built with **Rust** for high-speed query execution. 👉 [ClickBench](https://databend.com/blog/clickbench-databend-top)

- **Cost-Effective**: Scalable architecture that boosts **performance** and reduces **costs**. 👉 [TPC-H](https://docs.databend.com/guides/benchmark/tpch)

- **AI-Enhanced Analytics**: Leverage built-in **[AI Functions](https://docs.databend.com/guides/ai-functions/)** for smarter data insights.

- **Simplified ETL**: Direct **data ingestion** without the need for external ETL tools. 👉 [Data Loading](https://docs.databend.com/guides/load-data/)

- **Real-Time Data Updates**: Keep your analytics **up-to-date** with real-time incremental data updates. 👉 [Stream](https://docs.databend.com/guides/load-data/continuous-data-pipelines/stream)

- **Advanced Indexing**: Boost query performance with **[Virtual Column](https://docs.databend.com/guides/performance/virtual-column)**, **[Aggregating Index](https://docs.databend.com/guides/performance/aggregating-index)**, and **[Full-Text Index](https://docs.databend.com/guides/performance/fulltext-index)**.

- **ACID Compliance + Version Control**: Ensure reliable **transactions** with full ACID compliance and Git-like versioning.

- **Schema Flexibility**: Effortlessly handle **semi-structured data** with the flexible **[VARIANT](https://docs.databend.com/sql/sql-reference/data-types/variant)** data type.
- **Community-Driven Growth**: **Open-source** and continuously evolving with contributions from a global community.

- **Community-Driven Growth**: **Open-source** and continuously evolving with contributions from a global community.



Expand Down Expand Up @@ -296,7 +296,7 @@ Databend is released under a combination of two licenses: the [Apache License 2.

When contributing to Databend, you can find the relevant license header in each file.

For more information, see the [LICENSE](LICENSE) file and [Licensing FAQs](https://docs.databend.com/guides/overview/editions/dee/license).
For more information, see the [LICENSE](LICENSE) file and [Licensing FAQs](https://docs.databend.com/guides/products/dee/license).

## 🙏 Acknowledgement

Expand Down
8 changes: 8 additions & 0 deletions src/common/storage/src/operator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,13 @@ use std::env;
use std::io::Error;
use std::io::ErrorKind;
use std::io::Result;
use std::sync::LazyLock;
use std::time::Duration;

use anyhow::anyhow;
use databend_common_base::base::GlobalInstance;
use databend_common_base::runtime::metrics::register_counter_family;
use databend_common_base::runtime::metrics::FamilyCounter;
use databend_common_base::runtime::GlobalIORuntime;
use databend_common_base::runtime::TrySpawn;
use databend_common_exception::ErrorCode;
Expand Down Expand Up @@ -58,6 +61,9 @@ use crate::runtime_layer::RuntimeLayer;
use crate::StorageConfig;
use crate::StorageHttpClient;

static METRIC_OPENDAL_RETRIES_COUNT: LazyLock<FamilyCounter<Vec<(&'static str, String)>>> =
LazyLock::new(|| register_counter_family("opendal_retries_count"));

/// init_operator will init an opendal operator based on storage config.
pub fn init_operator(cfg: &StorageParams) -> Result<Operator> {
let op = match &cfg {
Expand Down Expand Up @@ -397,6 +403,8 @@ pub struct DatabendRetryInterceptor;

impl RetryInterceptor for DatabendRetryInterceptor {
fn intercept(&self, err: &opendal::Error, dur: Duration) {
let labels = vec![("err", err.kind().to_string())];
METRIC_OPENDAL_RETRIES_COUNT.get_or_create(&labels).inc();
warn!(
target: "opendal::layers::retry",
"will retry after {:.2}s because: {:?}",
Expand Down
2 changes: 2 additions & 0 deletions src/query/ast/src/ast/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -829,6 +829,7 @@ pub enum IntervalKind {
Week,
Dow,
Epoch,
MicroSecond,
}

impl Display for IntervalKind {
Expand All @@ -845,6 +846,7 @@ impl Display for IntervalKind {
IntervalKind::Dow => "DOW",
IntervalKind::Week => "WEEK",
IntervalKind::Epoch => "EPOCH",
IntervalKind::MicroSecond => "MICROSECOND",
})
}
}
Expand Down
11 changes: 10 additions & 1 deletion src/query/ast/src/parser/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1843,6 +1843,7 @@ pub fn interval_kind(i: Input) -> IResult<IntervalKind> {
let dow = value(IntervalKind::Dow, rule! { DOW });
let week = value(IntervalKind::Week, rule! { WEEK });
let epoch = value(IntervalKind::Epoch, rule! { EPOCH });
let microsecond = value(IntervalKind::MicroSecond, rule! { MICROSECOND });
let year_str = value(
IntervalKind::Year,
rule! { #literal_string_eq_ignore_case("YEAR") },
Expand Down Expand Up @@ -1887,6 +1888,10 @@ pub fn interval_kind(i: Input) -> IResult<IntervalKind> {
IntervalKind::Epoch,
rule! { #literal_string_eq_ignore_case("EPOCH") },
);
let microsecond_str = value(
IntervalKind::MicroSecond,
rule! { #literal_string_eq_ignore_case("MICROSECOND") },
);
alt((
rule!(
#year
Expand All @@ -1900,6 +1905,7 @@ pub fn interval_kind(i: Input) -> IResult<IntervalKind> {
| #dow
| #week
| #epoch
| #microsecond
),
rule!(
#year_str
Expand All @@ -1913,6 +1919,7 @@ pub fn interval_kind(i: Input) -> IResult<IntervalKind> {
| #dow_str
| #week_str
| #epoch_str
| #microsecond_str
),
))(i)
}
Expand Down Expand Up @@ -1977,7 +1984,9 @@ pub fn parse_float(text: &str) -> Result<Literal, ErrorKind> {
},
None => 0,
};
if i_part.len() as i32 + exp > 76 {

let p = i_part.len() as i32 + exp - f_part.len() as i32;
if !(-76..=76).contains(&p) {
Ok(Literal::Float64(fast_float2::parse(text)?))
} else {
let mut digits = String::with_capacity(76);
Expand Down
2 changes: 2 additions & 0 deletions src/query/ast/src/parser/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -583,6 +583,8 @@ pub enum TokenKind {
ENGINES,
#[token("EPOCH", ignore(ascii_case))]
EPOCH,
#[token("MICROSECOND", ignore(ascii_case))]
MICROSECOND,
#[token("ERROR_ON_COLUMN_COUNT_MISMATCH", ignore(ascii_case))]
ERROR_ON_COLUMN_COUNT_MISMATCH,
#[token("ESCAPE", ignore(ascii_case))]
Expand Down
Loading

0 comments on commit b598b17

Please sign in to comment.