use crate::util::{AlertState, RuleHealth, TargetHealth};
use serde::Deserialize;
use std::collections::HashMap;
use std::fmt;
use time::{Duration, OffsetDateTime, PrimitiveDateTime};
use url::Url;
mod de {
use serde::{Deserialize, Deserializer};
use serde_json::Value;
use std::str::FromStr;
use time::format_description::well_known::Rfc3339;
use time::format_description::FormatItem;
use time::macros::format_description;
use time::{Duration, OffsetDateTime, PrimitiveDateTime};
const BUILD_INFO_DATE_FORMAT: &[FormatItem] = format_description!(
"[year repr:full][month repr:numerical][day]-[hour repr:24]:[minute]:[second]"
);
pub(super) fn deserialize_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>
where
D: Deserializer<'de>,
{
match Value::deserialize(deserializer)? {
Value::String(s) => f64::from_str(&s).map_err(serde::de::Error::custom),
Value::Number(s) => s.as_f64().ok_or(serde::de::Error::custom(
"failed to convert sample value to float",
)),
_ => {
return Err(serde::de::Error::custom(
"unexpected type for sample value, expected string or integer",
))
}
}
}
pub(super) fn deserialize_rfc3339<'de, D>(deserializer: D) -> Result<OffsetDateTime, D::Error>
where
D: Deserializer<'de>,
{
let raw = String::deserialize(deserializer)?;
OffsetDateTime::parse(&raw, &Rfc3339)
.map_err(|e| serde::de::Error::custom(format!("error parsing '{}': {}", raw, e)))
}
pub(super) fn deserialize_build_info_date<'de, D>(
deserializer: D,
) -> Result<PrimitiveDateTime, D::Error>
where
D: Deserializer<'de>,
{
let raw = String::deserialize(deserializer)?;
PrimitiveDateTime::parse(&raw, &BUILD_INFO_DATE_FORMAT)
.map_err(|e| serde::de::Error::custom(format!("error parsing '{}': {}", raw, e)))
}
pub(super) fn deserialize_prometheus_duration<'de, D>(
deserializer: D,
) -> Result<Duration, D::Error>
where
D: Deserializer<'de>,
{
use serde::de::Error as SerdeError;
let raw_str = String::deserialize(deserializer)?;
let mut total_milliseconds: i64 = 0;
let mut raw_num = String::new();
let mut duration_iter = raw_str.chars().peekable();
while let Some(item) = duration_iter.next() {
if ('0'..='9').contains(&item) {
raw_num.push(item);
continue;
}
let num = raw_num.parse::<i64>().map_err(SerdeError::custom)?;
match item {
'y' => {
total_milliseconds += num * 1000 * 60 * 60 * 24 * 365;
}
'w' => {
total_milliseconds += num * 1000 * 60 * 60 * 24 * 7;
}
'd' => {
total_milliseconds += num * 1000 * 60 * 60 * 24;
}
'h' => {
total_milliseconds += num * 1000 * 60 * 60;
}
'm' => {
if duration_iter.next_if_eq(&'s').is_some() {
total_milliseconds += num * 1000 * 60 * 60;
} else {
total_milliseconds += num * 1000 * 60;
}
}
's' => {
total_milliseconds += num * 1000;
}
_ => return Err(SerdeError::custom("invalid time duration")),
};
raw_num.clear();
}
Ok(Duration::milliseconds(total_milliseconds))
}
}
#[derive(Debug, Deserialize)]
#[serde(tag = "status")]
pub(crate) enum ApiResponse {
#[serde(alias = "success")]
Success { data: serde_json::Value },
#[serde(alias = "error")]
Error(crate::error::ApiError),
}
#[derive(Debug, Clone, Deserialize)]
pub struct Stats {
timings: Timings,
samples: Samples,
}
impl Stats {
pub fn timings(&self) -> &Timings {
&self.timings
}
pub fn samples(&self) -> &Samples {
&self.samples
}
}
#[derive(Debug, Copy, Clone, Deserialize)]
pub struct Timings {
#[serde(alias = "evalTotalTime")]
eval_total_time: f64,
#[serde(alias = "resultSortTime")]
result_sort_time: f64,
#[serde(alias = "queryPreparationTime")]
query_preparation_time: f64,
#[serde(alias = "innerEvalTime")]
inner_eval_time: f64,
#[serde(alias = "execQueueTime")]
exec_queue_time: f64,
#[serde(alias = "execTotalTime")]
exec_total_time: f64,
}
impl Timings {
pub fn eval_total_time(&self) -> f64 {
self.eval_total_time
}
pub fn result_sort_time(&self) -> f64 {
self.result_sort_time
}
pub fn query_preparation_time(&self) -> f64 {
self.query_preparation_time
}
pub fn inner_eval_time(&self) -> f64 {
self.inner_eval_time
}
pub fn exec_queue_time(&self) -> f64 {
self.exec_queue_time
}
pub fn exec_total_time(&self) -> f64 {
self.exec_total_time
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct Samples {
#[serde(alias = "totalQueryableSamplesPerStep")]
total_queryable_samples_per_step: Option<Vec<Sample>>,
#[serde(alias = "totalQueryableSamples")]
total_queryable_samples: i64,
#[serde(alias = "peakSamples")]
peak_samples: i64,
}
impl Samples {
pub fn total_queryable_samples_per_step(&self) -> Option<&Vec<Sample>> {
self.total_queryable_samples_per_step.as_ref()
}
pub fn total_queryable_samples(&self) -> i64 {
self.total_queryable_samples
}
pub fn peak_samples(&self) -> i64 {
self.peak_samples
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct PromqlResult {
#[serde(flatten)]
pub(crate) data: Data,
pub(crate) stats: Option<Stats>,
}
impl PromqlResult {
pub fn data(&self) -> &Data {
&self.data
}
pub fn stats(&self) -> Option<&Stats> {
self.stats.as_ref()
}
}
#[derive(Clone, Debug, Deserialize)]
#[serde(tag = "resultType", content = "result")]
pub enum Data {
#[serde(alias = "vector")]
Vector(Vec<InstantVector>),
#[serde(alias = "matrix")]
Matrix(Vec<RangeVector>),
#[serde(alias = "scalar")]
Scalar(Sample),
}
impl Data {
pub fn as_vector(&self) -> Option<&[InstantVector]> {
match self {
Data::Vector(v) => Some(v.as_ref()),
_ => None,
}
}
pub fn as_matrix(&self) -> Option<&[RangeVector]> {
match self {
Data::Matrix(v) => Some(v.as_ref()),
_ => None,
}
}
pub fn as_scalar(&self) -> Option<&Sample> {
match self {
Data::Scalar(v) => Some(v),
_ => None,
}
}
pub fn is_vector(&self) -> bool {
match self {
Data::Vector(_) => true,
_ => false,
}
}
pub fn is_matrix(&self) -> bool {
match self {
Data::Matrix(_) => true,
_ => false,
}
}
pub fn is_scalar(&self) -> bool {
match self {
Data::Scalar(_) => true,
_ => false,
}
}
pub fn is_empty(&self) -> bool {
match self {
Data::Vector(v) => v.is_empty(),
Data::Matrix(v) => v.is_empty(),
Data::Scalar(_) => false,
}
}
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
pub struct InstantVector {
pub(crate) metric: HashMap<String, String>,
#[serde(alias = "value")]
pub(crate) sample: Sample,
}
impl InstantVector {
pub fn metric(&self) -> &HashMap<String, String> {
&self.metric
}
pub fn sample(&self) -> &Sample {
&self.sample
}
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
pub struct RangeVector {
pub(crate) metric: HashMap<String, String>,
#[serde(alias = "values")]
pub(crate) samples: Vec<Sample>,
}
impl RangeVector {
pub fn metric(&self) -> &HashMap<String, String> {
&self.metric
}
pub fn samples(&self) -> &[Sample] {
&self.samples
}
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
pub struct Sample {
pub(crate) timestamp: f64,
#[serde(deserialize_with = "de::deserialize_f64")]
pub(crate) value: f64,
}
impl Sample {
pub fn timestamp(&self) -> f64 {
self.timestamp
}
pub fn value(&self) -> f64 {
self.value
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct Targets {
#[serde(alias = "activeTargets")]
pub(crate) active: Vec<ActiveTarget>,
#[serde(alias = "droppedTargets")]
pub(crate) dropped: Vec<DroppedTarget>,
}
impl Targets {
pub fn active(&self) -> &[ActiveTarget] {
&self.active
}
pub fn dropped(&self) -> &[DroppedTarget] {
&self.dropped
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct ActiveTarget {
#[serde(alias = "discoveredLabels")]
pub(crate) discovered_labels: HashMap<String, String>,
pub(crate) labels: HashMap<String, String>,
#[serde(alias = "scrapePool")]
pub(crate) scrape_pool: String,
#[serde(alias = "scrapeUrl")]
pub(crate) scrape_url: Url,
#[serde(alias = "globalUrl")]
pub(crate) global_url: Url,
#[serde(alias = "lastError")]
pub(crate) last_error: String,
#[serde(alias = "lastScrape")]
#[serde(deserialize_with = "de::deserialize_rfc3339")]
pub(crate) last_scrape: OffsetDateTime,
#[serde(alias = "lastScrapeDuration")]
pub(crate) last_scrape_duration: f64,
pub(crate) health: TargetHealth,
#[serde(alias = "scrapeInterval")]
#[serde(deserialize_with = "de::deserialize_prometheus_duration")]
pub(crate) scrape_interval: Duration,
#[serde(alias = "scrapeTimeout")]
#[serde(deserialize_with = "de::deserialize_prometheus_duration")]
pub(crate) scrape_timeout: Duration,
}
impl ActiveTarget {
pub fn discovered_labels(&self) -> &HashMap<String, String> {
&self.discovered_labels
}
pub fn labels(&self) -> &HashMap<String, String> {
&self.labels
}
pub fn scrape_pool(&self) -> &str {
&self.scrape_pool
}
pub fn scrape_url(&self) -> &Url {
&self.scrape_url
}
pub fn global_url(&self) -> &Url {
&self.global_url
}
pub fn last_error(&self) -> &str {
&self.last_error
}
pub fn last_scrape(&self) -> &OffsetDateTime {
&self.last_scrape
}
pub fn last_scrape_duration(&self) -> f64 {
self.last_scrape_duration
}
pub fn health(&self) -> TargetHealth {
self.health
}
pub fn scrape_interval(&self) -> &Duration {
&self.scrape_interval
}
pub fn scrape_timeout(&self) -> &Duration {
&self.scrape_timeout
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct DroppedTarget {
#[serde(alias = "discoveredLabels")]
pub(crate) discovered_labels: HashMap<String, String>,
}
impl DroppedTarget {
pub fn discovered_labels(&self) -> &HashMap<String, String> {
&self.discovered_labels
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct RuleGroup {
pub(crate) rules: Vec<Rule>,
pub(crate) file: String,
pub(crate) interval: f64,
pub(crate) name: String,
}
impl RuleGroup {
pub fn rules(&self) -> &[Rule] {
&self.rules
}
pub fn file(&self) -> &str {
&self.file
}
pub fn interval(&self) -> f64 {
self.interval
}
pub fn name(&self) -> &str {
&self.name
}
}
#[derive(Clone, Debug, Deserialize)]
#[serde(tag = "type")]
pub enum Rule {
#[serde(alias = "recording")]
Recording(RecordingRule),
#[serde(alias = "alerting")]
Alerting(AlertingRule),
}
#[derive(Clone, Debug, Deserialize)]
pub struct AlertingRule {
pub(crate) alerts: Vec<Alert>,
pub(crate) annotations: HashMap<String, String>,
pub(crate) duration: f64,
pub(crate) health: RuleHealth,
pub(crate) labels: HashMap<String, String>,
pub(crate) name: String,
pub(crate) query: String,
}
impl AlertingRule {
pub fn alerts(&self) -> &[Alert] {
&self.alerts
}
pub fn annotations(&self) -> &HashMap<String, String> {
&self.annotations
}
pub fn duration(&self) -> f64 {
self.duration
}
pub fn health(&self) -> RuleHealth {
self.health
}
pub fn labels(&self) -> &HashMap<String, String> {
&self.labels
}
pub fn name(&self) -> &str {
&self.name
}
pub fn query(&self) -> &str {
&self.query
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct RecordingRule {
pub(crate) health: RuleHealth,
pub(crate) name: String,
pub(crate) query: String,
pub(crate) labels: Option<HashMap<String, String>>,
}
impl RecordingRule {
pub fn health(&self) -> RuleHealth {
self.health
}
pub fn name(&self) -> &str {
&self.name
}
pub fn query(&self) -> &str {
&self.query
}
pub fn labels(&self) -> &Option<HashMap<String, String>> {
&self.labels
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct Alert {
#[serde(alias = "activeAt")]
#[serde(deserialize_with = "de::deserialize_rfc3339")]
pub(crate) active_at: OffsetDateTime,
pub(crate) annotations: HashMap<String, String>,
pub(crate) labels: HashMap<String, String>,
pub(crate) state: AlertState,
pub(crate) value: String,
}
impl Alert {
pub fn active_at(&self) -> &OffsetDateTime {
&self.active_at
}
pub fn annotations(&self) -> &HashMap<String, String> {
&self.annotations
}
pub fn labels(&self) -> &HashMap<String, String> {
&self.labels
}
pub fn state(&self) -> AlertState {
self.state
}
pub fn value(&self) -> &str {
&self.value
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct Alertmanagers {
#[serde(alias = "activeAlertmanagers")]
pub(crate) active: Vec<Alertmanager>,
#[serde(alias = "droppedAlertmanagers")]
pub(crate) dropped: Vec<Alertmanager>,
}
impl Alertmanagers {
pub fn active(&self) -> &[Alertmanager] {
&self.active
}
pub fn dropped(&self) -> &[Alertmanager] {
&self.dropped
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct Alertmanager {
url: Url,
}
impl Alertmanager {
pub fn url(&self) -> &Url {
&self.url
}
}
#[derive(Debug, Copy, Clone, Deserialize)]
pub enum MetricType {
#[serde(alias = "counter")]
Counter,
#[serde(alias = "gauge")]
Gauge,
#[serde(alias = "histogram")]
Histogram,
#[serde(alias = "gaugehistogram")]
GaugeHistogram,
#[serde(alias = "summary")]
Summary,
#[serde(alias = "info")]
Info,
#[serde(alias = "stateset")]
Stateset,
#[serde(alias = "unknown")]
Unknown,
}
impl fmt::Display for MetricType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
MetricType::Counter => write!(f, "counter"),
MetricType::Gauge => write!(f, "gauge"),
MetricType::Histogram => write!(f, "histogram"),
MetricType::GaugeHistogram => write!(f, "gaugehistogram"),
MetricType::Summary => write!(f, "summary"),
MetricType::Info => write!(f, "info"),
MetricType::Stateset => write!(f, "stateset"),
MetricType::Unknown => write!(f, "unknown"),
}
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct TargetMetadata {
pub(crate) target: HashMap<String, String>,
#[serde(alias = "type")]
pub(crate) metric_type: MetricType,
pub(crate) metric: Option<String>,
pub(crate) help: String,
pub(crate) unit: String,
}
impl TargetMetadata {
pub fn target(&self) -> &HashMap<String, String> {
&self.target
}
pub fn metric_type(&self) -> MetricType {
self.metric_type
}
pub fn metric(&self) -> Option<&str> {
self.metric.as_deref()
}
pub fn help(&self) -> &str {
&self.help
}
pub fn unit(&self) -> &str {
&self.unit
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct MetricMetadata {
#[serde(alias = "type")]
pub(crate) metric_type: MetricType,
pub(crate) help: String,
pub(crate) unit: String,
}
impl MetricMetadata {
pub fn metric_type(&self) -> MetricType {
self.metric_type
}
pub fn help(&self) -> &str {
&self.help
}
pub fn unit(&self) -> &str {
&self.unit
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct BuildInformation {
pub(crate) version: String,
pub(crate) revision: String,
pub(crate) branch: String,
#[serde(alias = "buildUser")]
pub(crate) build_user: String,
#[serde(alias = "buildDate")]
#[serde(deserialize_with = "de::deserialize_build_info_date")]
pub(crate) build_date: PrimitiveDateTime,
#[serde(alias = "goVersion")]
pub(crate) go_version: String,
}
impl BuildInformation {
pub fn version(&self) -> &str {
&self.version
}
pub fn revision(&self) -> &str {
&self.revision
}
pub fn branch(&self) -> &str {
&self.branch
}
pub fn build_user(&self) -> &str {
&self.build_user
}
pub fn build_date(&self) -> &PrimitiveDateTime {
&self.build_date
}
pub fn go_version(&self) -> &str {
&self.go_version
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct RuntimeInformation {
#[serde(alias = "startTime")]
#[serde(deserialize_with = "de::deserialize_rfc3339")]
pub(crate) start_time: OffsetDateTime,
#[serde(alias = "CWD")]
pub(crate) cwd: String,
#[serde(alias = "reloadConfigSuccess")]
pub(crate) reload_config_success: bool,
#[serde(alias = "lastConfigTime")]
#[serde(deserialize_with = "de::deserialize_rfc3339")]
pub(crate) last_config_time: OffsetDateTime,
#[serde(alias = "corruptionCount")]
pub(crate) corruption_count: i64,
#[serde(alias = "goroutineCount")]
pub(crate) goroutine_count: usize,
#[serde(alias = "GOMAXPROCS")]
pub(crate) go_max_procs: usize,
#[serde(alias = "GOGC")]
pub(crate) go_gc: String,
#[serde(alias = "GODEBUG")]
pub(crate) go_debug: String,
#[serde(alias = "storageRetention")]
#[serde(deserialize_with = "de::deserialize_prometheus_duration")]
pub(crate) storage_retention: Duration,
}
impl RuntimeInformation {
pub fn start_time(&self) -> &OffsetDateTime {
&self.start_time
}
pub fn cwd(&self) -> &str {
&self.cwd
}
pub fn reload_config_success(&self) -> bool {
self.reload_config_success
}
pub fn last_config_time(&self) -> &OffsetDateTime {
&self.last_config_time
}
pub fn corruption_count(&self) -> i64 {
self.corruption_count
}
pub fn goroutine_count(&self) -> usize {
self.goroutine_count
}
pub fn go_max_procs(&self) -> usize {
self.go_max_procs
}
pub fn go_gc(&self) -> &str {
&self.go_gc
}
pub fn go_debug(&self) -> &str {
&self.go_debug
}
pub fn storage_retention(&self) -> &Duration {
&self.storage_retention
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct TsdbStatistics {
#[serde(alias = "headStats")]
pub(crate) head_stats: HeadStatistics,
#[serde(alias = "seriesCountByMetricName")]
pub(crate) series_count_by_metric_name: Vec<TsdbItemCount>,
#[serde(alias = "labelValueCountByLabelName")]
pub(crate) label_value_count_by_label_name: Vec<TsdbItemCount>,
#[serde(alias = "memoryInBytesByLabelName")]
pub(crate) memory_in_bytes_by_label_name: Vec<TsdbItemCount>,
#[serde(alias = "seriesCountByLabelValuePair")]
pub(crate) series_count_by_label_value_pair: Vec<TsdbItemCount>,
}
impl TsdbStatistics {
pub fn head_stats(&self) -> HeadStatistics {
self.head_stats
}
pub fn series_count_by_metric_name(&self) -> &[TsdbItemCount] {
&self.series_count_by_metric_name
}
pub fn label_value_count_by_label_name(&self) -> &[TsdbItemCount] {
&self.label_value_count_by_label_name
}
pub fn memory_in_bytes_by_label_name(&self) -> &[TsdbItemCount] {
&self.memory_in_bytes_by_label_name
}
pub fn series_count_by_label_value_pair(&self) -> &[TsdbItemCount] {
&self.series_count_by_label_value_pair
}
}
#[derive(Clone, Copy, Debug, Deserialize)]
pub struct HeadStatistics {
#[serde(alias = "numSeries")]
pub(crate) num_series: usize,
#[serde(alias = "chunkCount")]
pub(crate) chunk_count: usize,
#[serde(alias = "minTime")]
pub(crate) min_time: i64,
#[serde(alias = "maxTime")]
pub(crate) max_time: i64,
}
impl HeadStatistics {
pub fn num_series(&self) -> usize {
self.num_series
}
pub fn chunk_count(&self) -> usize {
self.chunk_count
}
pub fn min_time(&self) -> i64 {
self.min_time
}
pub fn max_time(&self) -> i64 {
self.max_time
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct TsdbItemCount {
pub(crate) name: String,
pub(crate) value: usize,
}
impl TsdbItemCount {
pub fn name(&self) -> &str {
&self.name
}
pub fn value(&self) -> usize {
self.value
}
}
#[derive(Clone, Copy, Debug, Deserialize)]
pub struct WalReplayStatistics {
pub(crate) min: usize,
pub(crate) max: usize,
pub(crate) current: usize,
pub(crate) state: Option<WalReplayState>,
}
impl WalReplayStatistics {
pub fn min(&self) -> usize {
self.min
}
pub fn max(&self) -> usize {
self.max
}
pub fn current(&self) -> usize {
self.current
}
pub fn state(&self) -> Option<WalReplayState> {
self.state
}
}
#[derive(Clone, Copy, Debug, Deserialize)]
pub enum WalReplayState {
#[serde(alias = "waiting")]
Waiting,
#[serde(alias = "in progress")]
InProgress,
#[serde(alias = "done")]
Done,
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
#[test]
fn test_api_error_deserialization() {
let data = r#"
{
"status": "error",
"data": null,
"errorType": "bad_data",
"error": "1:14: parse error: unexpected end of input in aggregation",
"warnings": []
}
"#;
let result: Result<ApiResponse, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
assert!(
matches!(result.unwrap(), ApiResponse::Error(err) if err.error_type == crate::error::ApiErrorType::BadData)
);
}
#[test]
fn test_api_success_deserialization() {
let data = r#"
{
"status": "success",
"data": {
"resultType": "scalar",
"result": [ 0, "0.0" ]
},
"warnings": []
}
"#;
let result: Result<ApiResponse, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
assert!(matches!(result.unwrap(), ApiResponse::Success { data: _ }));
}
#[test]
fn test_bad_combination_in_deserialization() {
let data = r#"
{
"status": "error",
"data": {
"resultType": "scalar",
"result": [ 0, "0.0" ]
},
"warnings": []
}
"#;
let result: Result<ApiResponse, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_err());
}
#[test]
fn test_another_bad_combination_in_deserialization() {
let data = r#"
{
"status": "success",
"warnings": []
"errorType": "bad_data",
"error": "1:14: parse error: unexpected end of input in aggregation",
}
"#;
let result: Result<ApiResponse, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_err());
}
#[test]
fn test_query_result_deserialization() {
let data = r#"
{
"resultType": "matrix",
"result": [
{
"metric": {
"__name__": "up",
"instance": "localhost:9090",
"job": "prometheus"
},
"values": [
[
1659268100,
"1"
],
[
1659268160,
"1"
],
[
1659268220,
"1"
],
[
1659268280,
"1"
]
]
}
],
"stats": {
"timings": {
"evalTotalTime": 0.000102139,
"resultSortTime": 8.7e-07,
"queryPreparationTime": 5.4169e-05,
"innerEvalTime": 3.787e-05,
"execQueueTime": 4.07e-05,
"execTotalTime": 0.000151989
},
"samples": {
"totalQueryableSamplesPerStep": [
[
1659268100,
1
],
[
1659268160,
1
],
[
1659268220,
1
],
[
1659268280,
1
]
],
"totalQueryableSamples": 4,
"peakSamples": 4
}
}
}
"#;
let result: Result<PromqlResult, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_query_result_no_per_step_stats_deserialization() {
let data = r#"
{
"resultType": "matrix",
"result": [
{
"metric": {
"__name__": "up",
"instance": "localhost:9090",
"job": "prometheus"
},
"values": [
[
1659268100,
"1"
],
[
1659268160,
"1"
],
[
1659268220,
"1"
],
[
1659268280,
"1"
]
]
}
],
"stats": {
"timings": {
"evalTotalTime": 0.000102139,
"resultSortTime": 8.7e-07,
"queryPreparationTime": 5.4169e-05,
"innerEvalTime": 3.787e-05,
"execQueueTime": 4.07e-05,
"execTotalTime": 0.000151989
},
"samples": {
"totalQueryableSamples": 4,
"peakSamples": 4
}
}
}
"#;
let result: Result<PromqlResult, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_query_result_no_stats_deserialization() {
let data = r#"
{
"resultType": "matrix",
"result": [
{
"metric": {
"__name__": "up",
"instance": "localhost:9090",
"job": "prometheus"
},
"values": [
[
1659268100,
"1"
],
[
1659268160,
"1"
],
[
1659268220,
"1"
],
[
1659268280,
"1"
]
]
}
]
}
"#;
let result: Result<PromqlResult, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_instant_vector_deserialization() {
let data = r#"
[
{
"metric": {
"__name__": "up",
"job": "prometheus",
"instance": "localhost:9090"
},
"value": [
1435781451.781,
"1"
]
},
{
"metric": {
"__name__": "up",
"job": "node",
"instance": "localhost:9100"
},
"value": [
1435781451.781,
"0"
]
}
]
"#;
let result: Result<Vec<InstantVector>, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_range_vector_deserialization() {
let data = r#"
[
{
"metric": {
"__name__": "up",
"job": "prometheus",
"instance": "localhost:9090"
},
"values": [
[
1435781430.781,
"1"
],
[
1435781445.781,
"1"
],
[
1435781460.781,
"1"
]
]
},
{
"metric": {
"__name__": "up",
"job": "node",
"instance": "localhost:9091"
},
"values": [
[
1435781430.781,
"0"
],
[
1435781445.781,
"0"
],
[
1435781460.781,
"1"
]
]
}
]
"#;
let result: Result<Vec<RangeVector>, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_target_deserialization() {
let data = r#"
{
"activeTargets": [
{
"discoveredLabels": {
"__address__": "127.0.0.1:9090",
"__metrics_path__": "/metrics",
"__scheme__": "http",
"job": "prometheus"
},
"labels": {
"instance": "127.0.0.1:9090",
"job": "prometheus"
},
"scrapePool": "prometheus",
"scrapeUrl": "http://127.0.0.1:9090/metrics",
"globalUrl": "http://example-prometheus:9090/metrics",
"lastError": "",
"lastScrape": "2017-01-17T15:07:44.723715405+01:00",
"lastScrapeDuration": 0.050688943,
"health": "up",
"scrapeInterval": "1m",
"scrapeTimeout": "10s"
}
],
"droppedTargets": [
{
"discoveredLabels": {
"__address__": "127.0.0.1:9100",
"__metrics_path__": "/metrics",
"__scheme__": "http",
"__scrape_interval__": "1m",
"__scrape_timeout__": "10s",
"job": "node"
}
}
]
}
"#;
let result: Result<Targets, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_rule_group_deserialization() {
let data = r#"
[
{
"rules": [
{
"alerts": [
{
"activeAt": "2018-07-04T20:27:12.60602144+02:00",
"annotations": {
"summary": "High request latency"
},
"labels": {
"alertname": "HighRequestLatency",
"severity": "page"
},
"state": "firing",
"value": "1e+00"
}
],
"annotations": {
"summary": "High request latency"
},
"duration": 600,
"health": "ok",
"labels": {
"severity": "page"
},
"name": "HighRequestLatency",
"query": "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5",
"type": "alerting"
},
{
"health": "ok",
"name": "job:http_inprogress_requests:sum",
"query": "sum by (job) (http_inprogress_requests)",
"type": "recording"
}
],
"file": "/rules.yaml",
"interval": 60,
"limit": 0,
"name": "example"
}
]
"#;
let result: Result<Vec<RuleGroup>, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_alert_deserialization() {
let data = r#"
[
{
"activeAt":"2018-07-04T20:27:12.60602144+02:00",
"annotations":{
},
"labels":{
"alertname":"my-alert"
},
"state":"firing",
"value":"1e+00"
}
]
"#;
let result: Result<Vec<Alert>, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_target_metadata_deserialization_1() {
let data = r#"
[
{
"target": {
"instance": "127.0.0.1:9090",
"job": "prometheus"
},
"type": "gauge",
"help": "Number of goroutines that currently exist.",
"unit": ""
},
{
"target": {
"instance": "127.0.0.1:9091",
"job": "prometheus"
},
"type": "gauge",
"help": "Number of goroutines that currently exist.",
"unit": ""
}
]
"#;
let result: Result<Vec<TargetMetadata>, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_target_metadata_deserialization_2() {
let data = r#"
[
{
"target": {
"instance": "127.0.0.1:9090",
"job": "prometheus"
},
"metric": "prometheus_treecache_zookeeper_failures_total",
"type": "counter",
"help": "The total number of ZooKeeper failures.",
"unit": ""
},
{
"target": {
"instance": "127.0.0.1:9090",
"job": "prometheus"
},
"metric": "prometheus_tsdb_reloads_total",
"type": "counter",
"help": "Number of times the database reloaded block data from disk.",
"unit": ""
}
]
"#;
let result: Result<Vec<TargetMetadata>, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_metric_metadata_deserialization() {
let data = r#"
{
"cortex_ring_tokens": [
{
"type": "gauge",
"help": "Number of tokens in the ring",
"unit": ""
}
],
"http_requests_total": [
{
"type": "counter",
"help": "Number of HTTP requests",
"unit": ""
},
{
"type": "counter",
"help": "Amount of HTTP requests",
"unit": ""
}
]
}
"#;
let result: Result<HashMap<String, Vec<MetricMetadata>>, serde_json::Error> =
serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_alertmanagers_deserialization() {
let data = r#"
{
"activeAlertmanagers": [
{
"url": "http://127.0.0.1:9090/api/v1/alerts"
}
],
"droppedAlertmanagers": [
{
"url": "http://127.0.0.1:9093/api/v1/alerts"
}
]
}
"#;
let result: Result<Alertmanagers, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_buildinformation_deserialization() {
let data = r#"
{
"version": "2.13.1",
"revision": "cb7cbad5f9a2823a622aaa668833ca04f50a0ea7",
"branch": "master",
"buildUser": "julius@desktop",
"buildDate": "20191102-16:19:59",
"goVersion": "go1.13.1"
}
"#;
let result: Result<BuildInformation, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_runtimeinformation_deserialization() {
let data = r#"
{
"startTime": "2019-11-02T17:23:59.301361365+01:00",
"CWD": "/",
"reloadConfigSuccess": true,
"lastConfigTime": "2019-11-02T17:23:59+01:00",
"timeSeriesCount": 873,
"corruptionCount": 0,
"goroutineCount": 48,
"GOMAXPROCS": 4,
"GOGC": "",
"GODEBUG": "",
"storageRetention": "15d"
}
"#;
let result: Result<RuntimeInformation, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_tsdb_stats_deserialization() {
let data = r#"
{
"headStats": {
"numSeries": 508,
"chunkCount": 937,
"minTime": 1591516800000,
"maxTime": 1598896800143
},
"seriesCountByMetricName": [
{
"name": "net_conntrack_dialer_conn_failed_total",
"value": 20
},
{
"name": "prometheus_http_request_duration_seconds_bucket",
"value": 20
}
],
"labelValueCountByLabelName": [
{
"name": "__name__",
"value": 211
},
{
"name": "event",
"value": 3
}
],
"memoryInBytesByLabelName": [
{
"name": "__name__",
"value": 8266
},
{
"name": "instance",
"value": 28
}
],
"seriesCountByLabelValuePair": [
{
"name": "job=prometheus",
"value": 425
},
{
"name": "instance=localhost:9090",
"value": 425
}
]
}
"#;
let result: Result<TsdbStatistics, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
#[test]
fn test_wal_replay_deserialization() {
let data = r#"
{
"min": 2,
"max": 5,
"current": 40,
"state": "waiting"
}
"#;
let result: Result<WalReplayStatistics, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
let data = r#"
{
"min": 2,
"max": 5,
"current": 40,
"state": "in progress"
}
"#;
let result: Result<WalReplayStatistics, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
let data = r#"
{
"min": 2,
"max": 5,
"current": 40,
"state": "done"
}
"#;
let result: Result<WalReplayStatistics, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
let data = r#"
{
"min": 2,
"max": 5,
"current": 40
}
"#;
let result: Result<WalReplayStatistics, serde_json::Error> = serde_json::from_str(data);
assert!(result.is_ok());
}
}