Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(telemtry): remove redundant report_event during recovery #20032

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/common/src/telemetry/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ use std::env;

use risingwave_pb::telemetry::PbTelemetryClusterType;
pub use risingwave_telemetry_event::{
current_timestamp, post_telemetry_report_pb, report_event_common, request_to_telemetry_event,
TelemetryError, TelemetryResult,
current_timestamp, post_telemetry_report_pb, report_event_common, TelemetryError,
TelemetryResult,
};
use serde::{Deserialize, Serialize};
use sysinfo::System;
Expand Down
8 changes: 7 additions & 1 deletion src/common/telemetry_event/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,12 @@ pub fn report_event_common(
attributes: Option<jsonbb::Value>, // any json string
node: String,
) {
// REMOVE ME: To find out all report_event calls
println!(
"report_event_common backtrace:\n{}",
std::backtrace::Backtrace::force_capture()
);
Comment on lines +54 to +58
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This debug code with println! and backtrace capture appears to be temporary development scaffolding. While it's helpful for debugging telemetry calls during development, it should be removed before merging to prevent performance overhead and log spam in production. Consider using tracing or debug logging if this visibility needs to be preserved in a more controlled way.

Spotted by Graphite Reviewer

Is this helpful? React 👍 or 👎 to let us know.


let event_tracking_id: String;
if let Some(tracking_id) = TELEMETRY_TRACKING_ID.get() {
event_tracking_id = tracking_id.to_string();
Expand All @@ -72,7 +78,7 @@ pub fn report_event_common(
);
}

pub fn request_to_telemetry_event(
fn request_to_telemetry_event(
tracking_id: String,
event_stage: PbTelemetryEventStage,
event_name: &str,
Expand Down
28 changes: 0 additions & 28 deletions src/stream/src/from_proto/sink.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,32 +41,6 @@ use crate::telemetry::report_event;

pub struct SinkExecutorBuilder;

fn telemetry_sink_build(
sink_id: &SinkId,
connector_name: &str,
sink_format_desc: &Option<SinkFormatDesc>,
) {
let attr = sink_format_desc.as_ref().map(|f| {
let mut builder = jsonbb::Builder::<Vec<u8>>::new();
builder.begin_object();
builder.add_string("format");
builder.add_value(jsonbb::ValueRef::String(f.format.to_string().as_str()));
builder.add_string("encode");
builder.add_value(jsonbb::ValueRef::String(f.encode.to_string().as_str()));
builder.end_object();
builder.finish()
});

report_event(
PbTelemetryEventStage::CreateStreamJob,
"sink",
sink_id.sink_id() as i64,
Some(connector_name.to_owned()),
Some(PbTelemetryDatabaseObject::Sink),
attr,
)
}

fn resolve_pk_info(
input_schema: &Schema,
log_store_table: &Table,
Expand Down Expand Up @@ -259,8 +233,6 @@ impl ExecutorBuilder for SinkExecutorBuilder {
connector, sink_id.sink_id, params.executor_id
);

telemetry_sink_build(&sink_id, connector, &sink_param.format_desc);

let exec = match node.log_store_type() {
// Default value is the normal in memory log store to be backward compatible with the
// previously unset value
Expand Down
29 changes: 0 additions & 29 deletions src/stream/src/from_proto/source/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,41 +22,12 @@ pub use fs_fetch::FsFetchExecutorBuilder;
use risingwave_common::catalog::TableId;
use risingwave_connector::source::UPSTREAM_SOURCE_KEY;
use risingwave_pb::catalog::PbStreamSourceInfo;
use risingwave_pb::telemetry::{PbTelemetryDatabaseObject, PbTelemetryEventStage};

use super::*;
use crate::telemetry::report_event;

fn get_connector_name(with_props: &BTreeMap<String, String>) -> String {
with_props
.get(UPSTREAM_SOURCE_KEY)
.map(|s| s.to_lowercase())
.unwrap_or_default()
}

fn telemetry_source_build(
source_type: &str, // "source" or "source backfill"
source_id: &TableId,
source_info: &PbStreamSourceInfo,
with_props: &BTreeMap<String, String>,
) {
let mut builder = jsonbb::Builder::<Vec<u8>>::new();
builder.begin_object();
builder.add_string("format");
builder.add_value(jsonbb::ValueRef::String(source_info.format().as_str_name()));
builder.add_string("encode");
builder.add_value(jsonbb::ValueRef::String(
source_info.row_encode().as_str_name(),
));
builder.end_object();
let value = builder.finish();

report_event(
PbTelemetryEventStage::CreateStreamJob,
source_type,
source_id.table_id as i64,
Some(get_connector_name(with_props)),
Some(PbTelemetryDatabaseObject::Source),
Some(value),
)
}
2 changes: 0 additions & 2 deletions src/stream/src/from_proto/source/trad_source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,6 @@ pub fn create_source_desc_builder(
});
}

telemetry_source_build(source_type, source_id, &source_info, &with_properties);

SourceDescBuilder::new(
source_columns.clone(),
params.env.source_metrics(),
Expand Down
Loading