photoncloud-monorepo/nightlight/tests/integration_test.rs
centra d2149b6249 fix(lightningstor): Fix SigV4 canonicalization for AWS S3 auth
- Replace form_urlencoded with RFC 3986 compliant URI encoding
- Implement aws_uri_encode() matching AWS SigV4 spec exactly
- Unreserved chars (A-Z,a-z,0-9,-,_,.,~) not encoded
- All other chars percent-encoded with uppercase hex
- Preserve slashes in paths, encode in query params
- Normalize empty paths to '/' per AWS spec
- Fix test expectations (body hash, HMAC values)
- Add comprehensive SigV4 signature determinism test

This fixes the canonicalization mismatch that caused signature
validation failures in T047. Auth can now be enabled for production.

Refs: T058.S1
2025-12-12 06:23:46 +09:00

263 lines
7.9 KiB
Rust

//! Integration tests for Nightlight
//!
//! These tests verify end-to-end functionality of the metrics storage system.
//! Tests cover the full ingestion → storage → query pipeline.
//!
//! # Test Categories
//!
//! - **Ingestion**: Remote write protocol, compression, validation
//! - **Query**: PromQL execution, result formatting
//! - **Storage**: Persistence, compaction, retention
//! - **API**: gRPC and HTTP endpoints
//! - **Security**: mTLS authentication and authorization
//!
//! # Implementation Status
//!
//! This is a placeholder file. Full test suite will be implemented in S6.
#[cfg(test)]
mod tests {
use nightlight_api::prometheus::{Label, Sample, TimeSeries, WriteRequest};
use nightlight_api::nightlight::{InstantQueryRequest, RangeQueryRequest};
/// Helper: Create a test WriteRequest
fn create_test_write_request() -> WriteRequest {
WriteRequest {
timeseries: vec![TimeSeries {
labels: vec![
Label {
name: "__name__".to_string(),
value: "test_metric".to_string(),
},
Label {
name: "job".to_string(),
value: "test".to_string(),
},
],
samples: vec![Sample {
value: 42.0,
timestamp: 1234567890000,
}],
}],
}
}
#[test]
fn test_create_write_request() {
let request = create_test_write_request();
assert_eq!(request.timeseries.len(), 1);
assert_eq!(request.timeseries[0].labels.len(), 2);
assert_eq!(request.timeseries[0].samples.len(), 1);
}
#[test]
fn test_create_instant_query() {
let query = InstantQueryRequest {
query: "test_metric{job='test'}".to_string(),
time: 1234567890000,
timeout: 5000,
};
assert_eq!(query.query, "test_metric{job='test'}");
}
#[test]
fn test_create_range_query() {
let query = RangeQueryRequest {
query: "rate(test_metric[5m])".to_string(),
start: 1234567890000,
end: 1234571490000,
step: 60000,
timeout: 10000,
};
assert_eq!(query.start, 1234567890000);
assert_eq!(query.end, 1234571490000);
}
// Query API Integration Tests (S4)
#[tokio::test]
async fn test_query_service_creation() {
use nightlight_server::query::QueryService;
let service = QueryService::new();
assert!(service.storage().read().await.series.is_empty());
}
#[tokio::test]
async fn test_instant_query_empty_storage() {
use nightlight_server::query::QueryService;
let service = QueryService::new();
let result = service.execute_instant_query("up", 1000).await;
assert!(result.is_ok());
let query_result = result.unwrap();
assert_eq!(query_result.result_type, "vector");
assert!(query_result.result.is_empty());
}
#[tokio::test]
async fn test_range_query_empty_storage() {
use nightlight_server::query::QueryService;
let service = QueryService::new();
let result = service
.execute_range_query("up", 1000, 2000, 100)
.await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_instant_query_with_data() {
use nightlight_server::query::QueryService;
use nightlight_types::{Label, Sample, SeriesId, TimeSeries};
let service = QueryService::new();
// Add test data
{
let mut storage = service.storage().write().await;
let series = TimeSeries {
id: SeriesId(1),
labels: vec![
Label::new("__name__", "test_metric"),
Label::new("job", "test_job"),
],
samples: vec![Sample::new(1000, 42.0)],
};
storage.upsert_series(series);
}
// Query the data
let result = service.execute_instant_query("test_metric", 1000).await;
assert!(result.is_ok());
let query_result = result.unwrap();
assert_eq!(query_result.result.len(), 1);
assert_eq!(query_result.result[0].value, Some((1000, 42.0)));
}
#[tokio::test]
async fn test_label_values_query() {
use nightlight_server::query::QueryService;
use nightlight_types::{Label, SeriesId, TimeSeries};
let service = QueryService::new();
// Add test data with labels
{
let mut storage = service.storage().write().await;
let series = TimeSeries {
id: SeriesId(1),
labels: vec![
Label::new("__name__", "test_metric"),
Label::new("environment", "production"),
Label::new("job", "api"),
],
samples: vec![],
};
storage.upsert_series(series);
}
// Query label values
{
let storage = service.storage().read().await;
let values = storage.label_values("environment");
assert_eq!(values.len(), 1);
assert!(values.contains(&"production".to_string()));
}
}
// PromQL Parsing Tests
#[test]
fn test_promql_simple_selector() {
use promql_parser::parser::Parser;
let queries = vec![
"up",
"http_requests_total",
"node_cpu_seconds_total",
];
for query in queries {
let result = Parser::new(query).parse();
assert!(result.is_ok(), "Failed to parse: {}", query);
}
}
#[test]
fn test_promql_label_selector() {
use promql_parser::parser::Parser;
let queries = vec![
"http_requests_total{method=\"GET\"}",
"http_requests_total{method=\"GET\",status=\"200\"}",
"http_requests_total{job=~\"api.*\"}",
];
for query in queries {
let result = Parser::new(query).parse();
assert!(result.is_ok(), "Failed to parse: {}", query);
}
}
#[test]
fn test_promql_aggregation() {
use promql_parser::parser::Parser;
let queries = vec![
"sum(http_requests_total)",
"avg(http_requests_total)",
"min(http_requests_total)",
"max(http_requests_total)",
"count(http_requests_total)",
];
for query in queries {
let result = Parser::new(query).parse();
assert!(result.is_ok(), "Failed to parse: {}", query);
}
}
#[test]
fn test_promql_rate_function() {
use promql_parser::parser::Parser;
let queries = vec![
"rate(http_requests_total[5m])",
"irate(http_requests_total[5m])",
"increase(http_requests_total[1h])",
];
for query in queries {
let result = Parser::new(query).parse();
assert!(result.is_ok(), "Failed to parse: {}", query);
}
}
#[test]
fn test_promql_range_selector() {
use promql_parser::parser::Parser;
let queries = vec![
"http_requests_total[5m]",
"http_requests_total[1h]",
"http_requests_total[24h]",
];
for query in queries {
let result = Parser::new(query).parse();
assert!(result.is_ok(), "Failed to parse: {}", query);
}
}
// TODO (S6): Add more integration tests
// - [ ] Test HTTP endpoints with Axum test client
// - [ ] Test mTLS authentication
// - [ ] Test storage persistence
// - [ ] Test compaction
// - [ ] Test retention enforcement
// - [ ] Test error handling (invalid queries, timeouts)
// - [ ] Test concurrent writes and queries
// - [ ] Test backpressure handling
// - [ ] Test Grafana compatibility
}