Skip to content

Commit

Permalink
ci: upgrade spell checker to 1.13.10 (#1045)
Browse files Browse the repository at this point in the history
* ci: upgrade spell checker to 1.13.10

Signed-off-by: Zhizhen He <[email protected]>

* fix: fix existing typos

Signed-off-by: Zhizhen He <[email protected]>

* chore: use taplo to format typos.toml

Signed-off-by: Zhizhen He <[email protected]>

* chore: add fmt-toml rule to format TOML files

Signed-off-by: Zhizhen He <[email protected]>

---------

Signed-off-by: Zhizhen He <[email protected]>
  • Loading branch information
hezhizhen authored Feb 21, 2023
1 parent aaaf241 commit 6833b40
Show file tree
Hide file tree
Showing 15 changed files with 33 additions and 26 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/develop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ jobs:
name: Spell Check with Typos
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: crate-ci/typos@v1.0.4
- uses: actions/checkout@v3
- uses: crate-ci/typos@v1.13.10

check:
name: Check
Expand Down
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ arrow-schema = { version = "29.0", features = ["serde"] }
async-stream = "0.3"
async-trait = "0.1"
chrono = { version = "0.4", features = ["serde"] }
# TODO(LFC): Use released Datafusion when it officially dpendent on Arrow 29.0
# TODO(LFC): Use released Datafusion when it officially dependent on Arrow 29.0
datafusion = { git = "https://github.com/apache/arrow-datafusion.git", rev = "4917235a398ae20145c87d20984e6367dc1a0c1e" }
datafusion-common = { git = "https://github.com/apache/arrow-datafusion.git", rev = "4917235a398ae20145c87d20984e6367dc1a0c1e" }
datafusion-expr = { git = "https://github.com/apache/arrow-datafusion.git", rev = "4917235a398ae20145c87d20984e6367dc1a0c1e" }
Expand Down
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@ clean: ## Clean the project.
fmt: ## Format all the Rust code.
cargo fmt --all

.PHONY: fmt-toml
fmt-toml: ## Format all TOML files.
taplo format --check --option "indent_string= "

.PHONY: docker-image
docker-image: ## Build docker image.
docker build --network host -f docker/Dockerfile -t ${IMAGE_REGISTRY}:${IMAGE_TAG} .
Expand Down
4 changes: 2 additions & 2 deletions docs/rfcs/2022-12-20-promql-in-rust/rfc.md
Original file line number Diff line number Diff line change
Expand Up @@ -149,10 +149,10 @@ inputs:
- title: 'Series Normalize: \noffset = 0'
operator: prom
inputs:
- title: 'Filter: \ntimetamp > 2022-12-20T10:00:00 && timestamp < 2022-12-21T10:00:00'
- title: 'Filter: \ntimestamp > 2022-12-20T10:00:00 && timestamp < 2022-12-21T10:00:00'
operator: filter
inputs:
- title: 'Table Scan: \ntable = request_duration, timetamp > 2022-12-20T10:00:00 && timestamp < 2022-12-21T10:00:00'
- title: 'Table Scan: \ntable = request_duration, timestamp > 2022-12-20T10:00:00 && timestamp < 2022-12-21T10:00:00'
operator: scan -->

![example](example.png)
Expand Down
2 changes: 1 addition & 1 deletion src/datanode/src/sql/copy_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ impl ParquetWriter {
let mut end_loop = true;
// TODO(hl & jiachun): Since OpenDAL's writer is async and ArrowWriter requires a `std::io::Write`,
// here we use a Vec<u8> to buffer all parquet bytes in memory and write to object store
// at a time. Maybe we should find a better way to brige ArrowWriter and OpenDAL's object.
// at a time. Maybe we should find a better way to bridge ArrowWriter and OpenDAL's object.
while let Some(batch) = self
.stream
.try_next()
Expand Down
4 changes: 2 additions & 2 deletions src/meta-srv/src/bootstrap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ use crate::selector::load_based::LoadBasedSelector;
use crate::selector::SelectorType;
use crate::service::admin;
use crate::service::store::etcd::EtcdStore;
use crate::service::store::kv::ResetableKvStoreRef;
use crate::service::store::kv::ResettableKvStoreRef;
use crate::service::store::memory::MemStore;
use crate::{error, Result};

Expand Down Expand Up @@ -90,7 +90,7 @@ pub async fn make_meta_srv(opts: MetaSrvOptions) -> Result<MetaSrv> {
)
};

let in_memory = Arc::new(MemStore::default()) as ResetableKvStoreRef;
let in_memory = Arc::new(MemStore::default()) as ResettableKvStoreRef;

let meta_peer_client = MetaPeerClientBuilder::default()
.election(election.clone())
Expand Down
4 changes: 2 additions & 2 deletions src/meta-srv/src/cluster.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,13 @@ use crate::error::{match_for_io_error, Result};
use crate::keys::{StatKey, StatValue, DN_STAT_PREFIX};
use crate::metasrv::ElectionRef;
use crate::service::store::ext::KvStoreExt;
use crate::service::store::kv::ResetableKvStoreRef;
use crate::service::store::kv::ResettableKvStoreRef;
use crate::{error, util};

#[derive(Builder, Clone)]
pub struct MetaPeerClient {
election: Option<ElectionRef>,
in_memory: ResetableKvStoreRef,
in_memory: ResettableKvStoreRef,
#[builder(default = "ChannelManager::default()")]
channel_manager: ChannelManager,
#[builder(default = "3")]
Expand Down
8 changes: 4 additions & 4 deletions src/meta-srv/src/metasrv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use crate::handler::HeartbeatHandlerGroup;
use crate::lock::DistLockRef;
use crate::selector::{Selector, SelectorType};
use crate::sequence::SequenceRef;
use crate::service::store::kv::{KvStoreRef, ResetableKvStoreRef};
use crate::service::store::kv::{KvStoreRef, ResettableKvStoreRef};

pub const TABLE_ID_SEQ: &str = "table_id";

Expand Down Expand Up @@ -59,7 +59,7 @@ impl Default for MetaSrvOptions {
pub struct Context {
pub datanode_lease_secs: i64,
pub server_addr: String,
pub in_memory: ResetableKvStoreRef,
pub in_memory: ResettableKvStoreRef,
pub kv_store: KvStoreRef,
pub election: Option<ElectionRef>,
pub skip_all: Arc<AtomicBool>,
Expand Down Expand Up @@ -93,7 +93,7 @@ pub struct MetaSrv {
options: MetaSrvOptions,
// It is only valid at the leader node and is used to temporarily
// store some data that will not be persisted.
in_memory: ResetableKvStoreRef,
in_memory: ResettableKvStoreRef,
kv_store: KvStoreRef,
table_id_sequence: SequenceRef,
selector: SelectorRef,
Expand Down Expand Up @@ -142,7 +142,7 @@ impl MetaSrv {
}

#[inline]
pub fn in_memory(&self) -> ResetableKvStoreRef {
pub fn in_memory(&self) -> ResettableKvStoreRef {
self.in_memory.clone()
}

Expand Down
6 changes: 3 additions & 3 deletions src/meta-srv/src/metasrv/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,14 @@ use crate::lock::DistLockRef;
use crate::metasrv::{ElectionRef, MetaSrv, MetaSrvOptions, SelectorRef, TABLE_ID_SEQ};
use crate::selector::lease_based::LeaseBasedSelector;
use crate::sequence::Sequence;
use crate::service::store::kv::{KvStoreRef, ResetableKvStoreRef};
use crate::service::store::kv::{KvStoreRef, ResettableKvStoreRef};
use crate::service::store::memory::MemStore;

// TODO(fys): try use derive_builder macro
pub struct MetaSrvBuilder {
options: Option<MetaSrvOptions>,
kv_store: Option<KvStoreRef>,
in_memory: Option<ResetableKvStoreRef>,
in_memory: Option<ResettableKvStoreRef>,
selector: Option<SelectorRef>,
handler_group: Option<HeartbeatHandlerGroup>,
election: Option<ElectionRef>,
Expand Down Expand Up @@ -63,7 +63,7 @@ impl MetaSrvBuilder {
self
}

pub fn in_memory(mut self, in_memory: ResetableKvStoreRef) -> Self {
pub fn in_memory(mut self, in_memory: ResettableKvStoreRef) -> Self {
self.in_memory = Some(in_memory);
self
}
Expand Down
4 changes: 2 additions & 2 deletions src/meta-srv/src/service/store/kv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use api::v1::meta::{
use crate::error::Result;

pub type KvStoreRef = Arc<dyn KvStore>;
pub type ResetableKvStoreRef = Arc<dyn ResetableKvStore>;
pub type ResettableKvStoreRef = Arc<dyn ResettableKvStore>;

#[async_trait::async_trait]
pub trait KvStore: Send + Sync {
Expand All @@ -40,6 +40,6 @@ pub trait KvStore: Send + Sync {
async fn move_value(&self, req: MoveValueRequest) -> Result<MoveValueResponse>;
}

pub trait ResetableKvStore: KvStore {
pub trait ResettableKvStore: KvStore {
fn reset(&self);
}
4 changes: 2 additions & 2 deletions src/meta-srv/src/service/store/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use api::v1::meta::{
use parking_lot::RwLock;

use crate::error::Result;
use crate::service::store::kv::{KvStore, ResetableKvStore};
use crate::service::store::kv::{KvStore, ResettableKvStore};

pub struct MemStore {
inner: RwLock<BTreeMap<Vec<u8>, Vec<u8>>>,
Expand All @@ -44,7 +44,7 @@ impl MemStore {
}
}

impl ResetableKvStore for MemStore {
impl ResettableKvStore for MemStore {
fn reset(&self) {
self.inner.write().clear();
}
Expand Down
6 changes: 3 additions & 3 deletions src/promql/src/range_array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ pub type RangeTuple = (u32, u32);
/// └───────────────┼───────────────┘
/// ```
///
/// Then the [DictionaryArray] can be expanded to serveral ranges like this:
/// Then the [DictionaryArray] can be expanded to several ranges like this:
///
/// ```text
/// Keys
Expand Down Expand Up @@ -96,7 +96,7 @@ impl RangeArray {
unsafe { Ok(Self::from_ranges_unchecked(values, ranges)) }
}

/// Construct [RangeArray] from given range without checking its validaty.
/// Construct [RangeArray] from given range without checking its validity.
///
/// # Safety
///
Expand Down Expand Up @@ -194,7 +194,7 @@ impl RangeArray {
)
}

/// Build datatype of wrappered [RangeArray] on given value type.
/// Build datatype of wrapped [RangeArray] on given value type.
pub fn convert_data_type(value_type: DataType) -> DataType {
DataType::Dictionary(Box::new(Self::key_type()), Box::new(value_type))
}
Expand Down
2 changes: 1 addition & 1 deletion src/script/src/python/coprocessor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ pub struct PyQueryEngine {

#[pyclass]
impl PyQueryEngine {
// TODO(discord9): find a better way to call sql query api, now we don't if we are in async contex or not
// TODO(discord9): find a better way to call sql query api, now we don't if we are in async context or not
/// return sql query results in List[List[PyVector]], or List[usize] for AffectedRows number if no recordbatches is returned
#[pymethod]
fn sql(&self, s: String, vm: &VirtualMachine) -> PyResult<PyListRef> {
Expand Down
2 changes: 1 addition & 1 deletion src/storage/src/sst/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ impl<'a> ParquetWriter<'a> {

// TODO(hl): Since OpenDAL's writer is async and ArrowWriter requires a `std::io::Write`,
// here we use a Vec<u8> to buffer all parquet bytes in memory and write to object store
// at a time. Maybe we should find a better way to brige ArrowWriter and OpenDAL's object.
// at a time. Maybe we should find a better way to bridge ArrowWriter and OpenDAL's object.
let mut buf = vec![];
let mut arrow_writer = ArrowWriter::try_new(&mut buf, schema.clone(), Some(writer_props))
.context(WriteParquetSnafu)?;
Expand Down
3 changes: 3 additions & 0 deletions typos.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,5 @@
[default.extend-words]
ue = "ue"
datas = "datas"
[files]
extend-exclude = ["corrupted"]

0 comments on commit 6833b40

Please sign in to comment.