Skip to content

Commit fc1d9db

Browse files
deepsource analysis fix
1 parent 30353ab commit fc1d9db

File tree

3 files changed

+69
-47
lines changed

3 files changed

+69
-47
lines changed

server/src/handlers/http/cluster/mod.rs

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -575,12 +575,15 @@ pub fn init_cluster_metrics_schedular() -> Result<(), PostError> {
575575
log::info!("Cluster metrics fetched successfully from all ingestors");
576576
if let Ok(metrics_bytes) = serde_json::to_vec(&metrics) {
577577
let stream_name = INTERNAL_STREAM_NAME;
578-
if let Ok(()) = ingest_internal_stream(
579-
stream_name.to_string(),
580-
bytes::Bytes::from(metrics_bytes),
581-
)
582-
.await
583-
{
578+
579+
if matches!(
580+
ingest_internal_stream(
581+
stream_name.to_string(),
582+
bytes::Bytes::from(metrics_bytes),
583+
)
584+
.await,
585+
Ok(())
586+
) {
584587
log::info!(
585588
"Cluster metrics successfully ingested into internal stream"
586589
);

server/src/handlers/http/modal/query_server.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ impl QueryServer {
186186
analytics::init_analytics_scheduler()?;
187187
}
188188

189-
if let Ok(()) = init_cluster_metrics_schedular() {
189+
if matches!(init_cluster_metrics_schedular(), Ok(())) {
190190
log::info!("Cluster metrics scheduler started successfully");
191191
}
192192
let (localsync_handler, mut localsync_outbox, localsync_inbox) = sync::run_local_sync();

server/src/metrics/prom_utils.rs

Lines changed: 59 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -105,86 +105,89 @@ impl Metrics {
105105
if let PromValue::Gauge(val) = sample.value {
106106
prom_dress.parseable_events_ingested += val;
107107
}
108-
} else if &sample.metric == "parseable_events_ingested_size" {
108+
}
109+
if &sample.metric == "parseable_events_ingested_size" {
109110
if let PromValue::Gauge(val) = sample.value {
110111
prom_dress.parseable_events_ingested_size += val;
111112
}
112-
} else if &sample.metric == "parseable_lifetime_events_ingested" {
113+
}
114+
if &sample.metric == "parseable_lifetime_events_ingested" {
113115
if let PromValue::Gauge(val) = sample.value {
114116
prom_dress.parseable_lifetime_events_ingested += val;
115117
}
116-
} else if &sample.metric == "parseable_lifetime_events_ingested_size" {
118+
}
119+
if &sample.metric == "parseable_lifetime_events_ingested_size" {
117120
if let PromValue::Gauge(val) = sample.value {
118121
prom_dress.parseable_lifetime_events_ingested_size += val;
119122
}
120-
} else if &sample.metric == "parseable_deleted_events_ingested" {
123+
}
124+
if &sample.metric == "parseable_deleted_events_ingested" {
121125
if let PromValue::Gauge(val) = sample.value {
122126
prom_dress.parseable_deleted_events_ingested += val;
123127
}
124-
} else if &sample.metric == "parseable_deleted_events_ingested_size" {
128+
}
129+
if &sample.metric == "parseable_deleted_events_ingested_size" {
125130
if let PromValue::Gauge(val) = sample.value {
126131
prom_dress.parseable_deleted_events_ingested_size += val;
127132
}
128-
} else if sample.metric == "parseable_staging_files" {
133+
}
134+
if sample.metric == "parseable_staging_files" {
129135
if let PromValue::Gauge(val) = sample.value {
130136
prom_dress.parseable_staging_files += val;
131137
}
132-
} else if sample.metric == "process_resident_memory_bytes" {
138+
}
139+
if sample.metric == "process_resident_memory_bytes" {
133140
if let PromValue::Gauge(val) = sample.value {
134141
prom_dress.process_resident_memory_bytes += val;
135142
}
136-
} else if sample.metric == "parseable_storage_size" {
137-
if sample.labels.get("type").expect("type is present") == "data" {
138-
if let PromValue::Gauge(val) = sample.value {
139-
prom_dress.parseable_storage_size.data += val;
140-
}
141-
} else if sample.labels.get("type").expect("type is present") == "staging" {
142-
if let PromValue::Gauge(val) = sample.value {
143-
prom_dress.parseable_storage_size.staging += val;
144-
}
143+
}
144+
if sample.metric == "parseable_storage_size"
145+
&& sample.labels.get("type").expect("type is present") == "data"
146+
{
147+
if let PromValue::Gauge(val) = sample.value {
148+
prom_dress.parseable_storage_size.data += val;
149+
}
150+
}
151+
if sample.metric == "parseable_storage_size"
152+
&& sample.labels.get("type").expect("type is present") == "staging"
153+
{
154+
if let PromValue::Gauge(val) = sample.value {
155+
prom_dress.parseable_storage_size.staging += val;
145156
}
146-
} else if sample.metric == "parseable_lifetime_events_storage_size" {
147-
if sample.labels.get("type").expect("type is present") == "data" {
148-
if let PromValue::Gauge(val) = sample.value {
149-
prom_dress.parseable_lifetime_storage_size.data += val;
150-
}
157+
}
158+
159+
if sample.metric == "parseable_lifetime_events_storage_size"
160+
&& sample.labels.get("type").expect("type is present") == "data"
161+
{
162+
if let PromValue::Gauge(val) = sample.value {
163+
prom_dress.parseable_lifetime_storage_size.data += val;
151164
}
152-
} else if sample.metric == "parseable_deleted_events_storage_size"
165+
}
166+
if sample.metric == "parseable_deleted_events_storage_size"
153167
&& sample.labels.get("type").expect("type is present") == "data"
154168
{
155169
if let PromValue::Gauge(val) = sample.value {
156170
prom_dress.parseable_deleted_storage_size.data += val;
157171
}
158172
}
159173
}
160-
let about_api_json = Self::from_about_api_response(ingestor_metadata.clone())
174+
let (commit_id, staging, cache) = Self::from_about_api_response(ingestor_metadata.clone())
161175
.await
162176
.map_err(|err| {
163177
log::error!("Fatal: failed to get ingestor info: {:?}", err);
164178
PostError::Invalid(err.into())
165179
})?;
166-
let commit_id = about_api_json
167-
.get("commit")
168-
.and_then(|x| x.as_str())
169-
.unwrap_or_default();
170-
let staging = about_api_json
171-
.get("staging")
172-
.and_then(|x| x.as_str())
173-
.unwrap_or_default();
174-
let cache = about_api_json
175-
.get("cache")
176-
.and_then(|x| x.as_str())
177-
.unwrap_or_default();
178-
prom_dress.commit = commit_id.to_string();
179-
prom_dress.staging = staging.to_string();
180-
prom_dress.cache = cache.to_string();
180+
181+
prom_dress.commit = commit_id;
182+
prom_dress.staging = staging;
183+
prom_dress.cache = cache;
181184

182185
Ok(prom_dress)
183186
}
184187

185188
pub async fn from_about_api_response(
186189
ingestor_metadata: IngestorMetadata,
187-
) -> Result<serde_json::Value, PostError> {
190+
) -> Result<(String, String, String), PostError> {
188191
let uri = Url::parse(&format!(
189192
"{}{}/about",
190193
&ingestor_metadata.domain_name,
@@ -203,7 +206,23 @@ impl Metrics {
203206
let about_api_json = res.text().await.map_err(PostError::NetworkError)?;
204207
let about_api_json: serde_json::Value =
205208
serde_json::from_str(&about_api_json).map_err(PostError::SerdeError)?;
206-
Ok(about_api_json)
209+
let commit_id = about_api_json
210+
.get("commit")
211+
.and_then(|x| x.as_str())
212+
.unwrap_or_default();
213+
let staging = about_api_json
214+
.get("staging")
215+
.and_then(|x| x.as_str())
216+
.unwrap_or_default();
217+
let cache = about_api_json
218+
.get("cache")
219+
.and_then(|x| x.as_str())
220+
.unwrap_or_default();
221+
Ok((
222+
commit_id.to_string(),
223+
staging.to_string(),
224+
cache.to_string(),
225+
))
207226
} else {
208227
log::warn!(
209228
"Failed to fetch about API response from ingestor: {}\n",

0 commit comments

Comments
 (0)