Skip to content

Commit

Permalink
Merge branch 'main' into disable-compression
Browse files Browse the repository at this point in the history
  • Loading branch information
clux authored Nov 19, 2024
2 parents 22dfd0a + 8b5230f commit 63fde5a
Show file tree
Hide file tree
Showing 10 changed files with 168 additions and 39 deletions.
8 changes: 4 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ async-broadcast = "0.7.0"
async-stream = "0.3.5"
async-trait = "0.1.64"
backoff = "0.4.0"
base64 = "0.22.0"
base64 = "0.22.1"
bytes = "1.1.0"
chrono = { version = "0.4.34", default-features = false }
darling = "0.20.3"
Expand All @@ -49,7 +49,7 @@ futures = { version = "0.3.17", default-features = false }
hashbrown = "0.15.0"
home = "0.5.4"
http = "1.1.0"
http-body = "1.0.0"
http-body = "1.0.1"
http-body-util = "0.1.2"
hyper = "1.2.0"
hyper-util = "0.1.9"
Expand All @@ -59,7 +59,7 @@ hyper-socks2 = { version = "0.9.0", default-features = false }
hyper-timeout = "0.5.1"
json-patch = "3"
jsonptr = "0.6"
jsonpath-rust = "0.5.0"
jsonpath-rust = "0.7.3"
k8s-openapi = { version = "0.23.0", default-features = false }
openssl = "0.10.36"
parking_lot = "0.12.0"
Expand All @@ -68,7 +68,7 @@ pin-project = "1.0.4"
proc-macro2 = "1.0.29"
quote = "1.0.10"
rand = "0.8.3"
rustls = { version = "0.23.0", default-features = false }
rustls = { version = "0.23.16", default-features = false }
rustls-pemfile = "2.0.0"
schemars = "0.8.6"
secrecy = "0.10.2"
Expand Down
4 changes: 4 additions & 0 deletions deny.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ db-urls = ["https://github.com/rustsec/advisory-db"]
# remove them when we have to
yanked = "warn"

ignore = [
"RUSTSEC-2024-0384", # instant dep via unmaintained backoff dep
]

[licenses]
# See https://spdx.org/licenses/ for list of possible licenses
Expand Down Expand Up @@ -60,6 +63,7 @@ unknown-git = "deny"
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
allow-git = ["https://github.com/tyrone-wu/runtime-macros.git"]


[bans]
multiple-versions = "deny"

Expand Down
8 changes: 4 additions & 4 deletions examples/dynamic_jsonpath.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use anyhow::{Context, Error};
use jsonpath_rust::JsonPathInst;
use jsonpath_rust::JsonPath;
use k8s_openapi::api::core::v1::Pod;
use kube::{
api::{Api, ListParams},
Expand All @@ -18,7 +18,7 @@ async fn main() -> anyhow::Result<()> {
let jsonpath = {
let path = std::env::var("JSONPATH").unwrap_or_else(|_| ".items[*].spec.containers[*].image".into());
format!("${path}")
.parse::<JsonPathInst>()
.parse::<JsonPath>()
.map_err(Error::msg)
.with_context(|| {
format!(
Expand All @@ -34,8 +34,8 @@ async fn main() -> anyhow::Result<()> {

// Use the given JSONPATH to filter the ObjectList
let list_json = serde_json::to_value(&list)?;
for res in jsonpath.find_slice(&list_json, Default::default()) {
info!("\t\t {}", *res);
for res in jsonpath.find_slice(&list_json) {
info!("\t\t {}", res.to_data());
}
Ok(())
}
2 changes: 1 addition & 1 deletion kube-client/src/api/entry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ enum Dirtiness {
New,
}

impl<'a, K> OccupiedEntry<'a, K> {
impl<K> OccupiedEntry<'_, K> {
/// Borrow the object
pub fn get(&self) -> &K {
&self.object
Expand Down
21 changes: 9 additions & 12 deletions kube-client/src/client/auth/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use http::{
header::{InvalidHeaderValue, AUTHORIZATION},
HeaderValue, Request,
};
use jsonpath_rust::{path::config::JsonPathConfig, JsonPathInst};
use jsonpath_rust::JsonPath;
use secrecy::{ExposeSecret, SecretString};
use serde::{Deserialize, Serialize};
use thiserror::Error;
Expand Down Expand Up @@ -498,33 +498,30 @@ fn token_from_gcp_provider(provider: &AuthProviderConfig) -> Result<ProviderToke
}

fn extract_value(json: &serde_json::Value, context: &str, path: &str) -> Result<String, Error> {
let cfg = JsonPathConfig::default(); // no need for regex caching here
let parsed_path = path
.trim_matches(|c| c == '"' || c == '{' || c == '}')
.parse::<JsonPathInst>()
.parse::<JsonPath>()
.map_err(|err| {
Error::AuthExec(format!(
"Failed to parse {context:?} as a JsonPath: {path}\n
Error: {err}"
))
})?;

let res = parsed_path.find_slice(json, cfg);
let res = parsed_path.find_slice(json);

let Some(res) = res.into_iter().next() else {
return Err(Error::AuthExec(format!(
"Target {context:?} value {path:?} not found"
)));
};

if let Some(val) = res.as_str() {
Ok(val.to_owned())
} else {
Err(Error::AuthExec(format!(
"Target {:?} value {:?} is not a string: {:?}",
context, path, *res
)))
}
let jval = res.to_data();
let val = jval.as_str().ok_or(Error::AuthExec(format!(
"Target {context:?} value {path:?} is not a string"
)))?;

Ok(val.to_string())
}

/// ExecCredentials is used by exec-based plugins to communicate credentials to
Expand Down
48 changes: 34 additions & 14 deletions kube-client/src/client/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,26 +92,46 @@ impl TryFrom<Config> for ClientBuilder<GenericService> {
}

match config.proxy_url.as_ref() {
#[cfg(feature = "socks5")]
Some(proxy_url) if proxy_url.scheme_str() == Some("socks5") => {
let connector = hyper_socks2::SocksConnector {
proxy_addr: proxy_url.clone(),
auth: None,
connector,
};

make_generic_builder(connector, config)
#[cfg(feature = "socks5")]
{
let connector = hyper_socks2::SocksConnector {
proxy_addr: proxy_url.clone(),
auth: None,
connector,
};
make_generic_builder(connector, config)
}

#[cfg(not(feature = "socks5"))]
Err(Error::ProxyProtocolDisabled {
proxy_url: proxy_url.clone(),
protocol_feature: "kube/socks5",
})
}

#[cfg(feature = "http-proxy")]
Some(proxy_url) if proxy_url.scheme_str() == Some("http") => {
let proxy = hyper_http_proxy::Proxy::new(hyper_http_proxy::Intercept::All, proxy_url.clone());
let connector = hyper_http_proxy::ProxyConnector::from_proxy_unsecured(connector, proxy);

make_generic_builder(connector, config)
#[cfg(feature = "http-proxy")]
{
let proxy =
hyper_http_proxy::Proxy::new(hyper_http_proxy::Intercept::All, proxy_url.clone());
let connector = hyper_http_proxy::ProxyConnector::from_proxy_unsecured(connector, proxy);

make_generic_builder(connector, config)
}

#[cfg(not(feature = "http-proxy"))]
Err(Error::ProxyProtocolDisabled {
proxy_url: proxy_url.clone(),
protocol_feature: "kube/http-proxy",
})
}

_ => make_generic_builder(connector, config),
Some(proxy_url) => Err(Error::ProxyProtocolUnsupported {
proxy_url: proxy_url.clone(),
}),

None => make_generic_builder(connector, config),
}
}
}
Expand Down
16 changes: 16 additions & 0 deletions kube-client/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
//! Error handling and error types
use http::Uri;
use thiserror::Error;

pub use kube_core::ErrorResponse;
Expand All @@ -25,6 +26,21 @@ pub enum Error {
#[error("ServiceError: {0}")]
Service(#[source] tower::BoxError),

/// Returned when the configured proxy uses an unsupported protocol.
#[error("configured proxy {proxy_url:?} uses an unsupported protocol")]
ProxyProtocolUnsupported {
/// The URL of the proxy.
proxy_url: Uri,
},
/// Returned when the configured proxy uses a protocol that requires a Cargo feature that is currently disabled
#[error("configured proxy {proxy_url:?} requires the disabled feature {protocol_feature:?}")]
ProxyProtocolDisabled {
/// The URL of the proxy.
proxy_url: Uri,
/// The Cargo feature that the proxy protocol requires.
protocol_feature: &'static str,
},

/// UTF-8 Error
#[error("UTF-8 Error: {0}")]
FromUtf8(#[source] std::string::FromUtf8Error),
Expand Down
72 changes: 69 additions & 3 deletions kube-derive/src/custom_resource.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

use darling::{FromDeriveInput, FromMeta};
use proc_macro2::{Ident, Literal, Span, TokenStream};
use quote::ToTokens;
use quote::{ToTokens, TokenStreamExt};
use syn::{parse_quote, Data, DeriveInput, Path, Visibility};

/// Values we can parse from #[kube(attrs)]
Expand Down Expand Up @@ -37,6 +37,44 @@ struct KubeAttrs {
scale: Option<String>,
#[darling(default)]
crates: Crates,
#[darling(multiple, rename = "annotation")]
annotations: Vec<KVTuple>,
#[darling(multiple, rename = "label")]
labels: Vec<KVTuple>,
}

#[derive(Debug)]
struct KVTuple(String, String);

impl FromMeta for KVTuple {
fn from_list(items: &[darling::ast::NestedMeta]) -> darling::Result<Self> {
if items.len() == 2 {
if let (
darling::ast::NestedMeta::Lit(syn::Lit::Str(key)),
darling::ast::NestedMeta::Lit(syn::Lit::Str(value)),
) = (&items[0], &items[1])
{
return Ok(KVTuple(key.value(), value.value()));
}
}

Err(darling::Error::unsupported_format(
"expected `\"key\", \"value\"` format",
))
}
}

impl From<(&'static str, &'static str)> for KVTuple {
fn from((key, value): (&'static str, &'static str)) -> Self {
Self(key.to_string(), value.to_string())
}
}

impl ToTokens for KVTuple {
fn to_tokens(&self, tokens: &mut TokenStream) {
let (k, v) = (&self.0, &self.1);
tokens.append_all(quote! { (#k, #v) });
}
}

#[derive(Debug, FromMeta)]
Expand Down Expand Up @@ -172,6 +210,8 @@ pub(crate) fn derive(input: proc_macro2::TokenStream) -> proc_macro2::TokenStrea
serde_json,
std,
},
annotations,
labels,
} = kube_attrs;

let struct_name = kind_struct.unwrap_or_else(|| kind.clone());
Expand Down Expand Up @@ -247,6 +287,18 @@ pub(crate) fn derive(input: proc_macro2::TokenStream) -> proc_macro2::TokenStrea
derive_paths.push(syn::parse_quote! { #schemars::JsonSchema });
}

let meta_annotations = if !annotations.is_empty() {
quote! { Some(std::collections::BTreeMap::from([#((#annotations.0.to_string(), #annotations.1.to_string()),)*])) }
} else {
quote! { None }
};

let meta_labels = if !labels.is_empty() {
quote! { Some(std::collections::BTreeMap::from([#((#labels.0.to_string(), #labels.1.to_string()),)*])) }
} else {
quote! { None }
};

let docstr =
doc.unwrap_or_else(|| format!(" Auto-generated derived type for {ident} via `CustomResource`"));
let quoted_serde = Literal::string(&serde.to_token_stream().to_string());
Expand All @@ -268,6 +320,8 @@ pub(crate) fn derive(input: proc_macro2::TokenStream) -> proc_macro2::TokenStrea
pub fn new(name: &str, spec: #ident) -> Self {
Self {
metadata: #k8s_openapi::apimachinery::pkg::apis::meta::v1::ObjectMeta {
annotations: #meta_annotations,
labels: #meta_labels,
name: Some(name.to_string()),
..Default::default()
},
Expand Down Expand Up @@ -382,7 +436,17 @@ pub(crate) fn derive(input: proc_macro2::TokenStream) -> proc_macro2::TokenStrea
let categories_json = serde_json::to_string(&categories).unwrap();
let short_json = serde_json::to_string(&shortnames).unwrap();
let crd_meta_name = format!("{plural}.{group}");
let crd_meta = quote! { { "name": #crd_meta_name } };

let mut crd_meta = TokenStream::new();
crd_meta.extend(quote! { "name": #crd_meta_name });

if !annotations.is_empty() {
crd_meta.extend(quote! { , "annotations": #meta_annotations });
}

if !labels.is_empty() {
crd_meta.extend(quote! { , "labels": #meta_labels });
}

let schemagen = if schema_mode.use_in_crd() {
quote! {
Expand Down Expand Up @@ -426,7 +490,9 @@ pub(crate) fn derive(input: proc_macro2::TokenStream) -> proc_macro2::TokenStrea
#schemagen

let jsondata = #serde_json::json!({
"metadata": #crd_meta,
"metadata": {
#crd_meta
},
"spec": {
"group": #group,
"scope": #scope,
Expand Down
6 changes: 6 additions & 0 deletions kube-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,12 @@ mod resource;
/// Sets the description of the schema in the generated CRD. If not specified
/// `Auto-generated derived type for {customResourceName} via CustomResource` will be used instead.
///
/// ## `#[kube(annotation("ANNOTATION_KEY", "ANNOTATION_VALUE"))]`
/// Add a single annotation to the generated CRD.
///
/// ## `#[kube(label("LABEL_KEY", "LABEL_VALUE"))]`
/// Add a single label to the generated CRD.
///
/// ## Example with all properties
///
/// ```rust
Expand Down
Loading

0 comments on commit 63fde5a

Please sign in to comment.