Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

suggestions #9

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 5 additions & 15 deletions src/event/format/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ use arrow_array::RecordBatch;
use arrow_schema::{DataType, Field, Schema, TimeUnit};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use serde_json::Value;

use crate::{
metadata::SchemaVersion,
Expand Down Expand Up @@ -92,31 +92,21 @@ impl Display for LogSource {
}
}

/// Contains the format name and a list of known field names that are associated with the said format.
/// Stored on disk as part of `ObjectStoreFormat` in stream.json
#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct LogSourceEntry {
pub log_source_format: LogSource,
pub fields: HashSet<String>,
}

impl LogSourceEntry {
pub fn new(log_source_format: &LogSource, fields: HashSet<String>) -> Self {
pub fn new(log_source_format: LogSource, fields: HashSet<String>) -> Self {
LogSourceEntry {
log_source_format: log_source_format.clone(),
log_source_format,
fields,
}
}

pub fn add_log_source(&mut self, log_source_format: LogSource, fields: HashSet<String>) {
self.log_source_format = log_source_format;
self.fields = fields;
}

pub fn to_value(&self) -> Value {
json!([{
"log_source_format": self.log_source_format,
"fields": self.fields,
}])
}
}

// Global Trait for event format
Expand Down
8 changes: 4 additions & 4 deletions src/handlers/http/ingest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ pub async fn ingest(req: HttpRequest, Json(json): Json<Value>) -> Result<HttpRes
return Err(PostError::OtelNotSupported);
}

let log_source_entry = LogSourceEntry::new(&log_source, HashSet::new());
let log_source_entry = LogSourceEntry::new(log_source.clone(), HashSet::new());
PARSEABLE
.create_stream_if_not_exists(
&stream_name,
Expand Down Expand Up @@ -130,7 +130,7 @@ pub async fn handle_otel_logs_ingestion(
let stream_name = stream_name.to_str().unwrap().to_owned();

let log_source_entry = LogSourceEntry::new(
&log_source,
log_source.clone(),
OTEL_LOG_KNOWN_FIELD_LIST
.iter()
.map(|&s| s.to_string())
Expand Down Expand Up @@ -168,7 +168,7 @@ pub async fn handle_otel_metrics_ingestion(
}
let stream_name = stream_name.to_str().unwrap().to_owned();
let log_source_entry = LogSourceEntry::new(
&log_source,
log_source.clone(),
OTEL_METRICS_KNOWN_FIELD_LIST
.iter()
.map(|&s| s.to_string())
Expand Down Expand Up @@ -207,7 +207,7 @@ pub async fn handle_otel_traces_ingestion(
}
let stream_name = stream_name.to_str().unwrap().to_owned();
let log_source_entry = LogSourceEntry::new(
&log_source,
log_source.clone(),
OTEL_TRACES_KNOWN_FIELD_LIST
.iter()
.map(|&s| s.to_string())
Expand Down
9 changes: 4 additions & 5 deletions src/migration/stream_metadata_migration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -191,14 +191,13 @@ pub fn v5_v6(mut stream_metadata: Value) -> Value {
"version".to_owned(),
Value::String(storage::CURRENT_SCHEMA_VERSION.into()),
);
let log_source = stream_metadata_map.get("log_source");
let mut log_source_entry = LogSourceEntry::default();
if log_source.is_some() {
if let Ok(log_source) = serde_json::from_value::<LogSource>(log_source.unwrap().clone()) {
log_source_entry.add_log_source(log_source, HashSet::new());
if let Some(log_source) = stream_metadata_map.get("log_source") {
if let Ok(log_source) = serde_json::from_value::<LogSource>(log_source.clone()) {
log_source_entry = LogSourceEntry::new(log_source, HashSet::new());
}
}
stream_metadata_map.insert("log_source".to_owned(), log_source_entry.to_value());
stream_metadata_map.insert("log_source".to_owned(), json!(log_source_entry));
stream_metadata
}

Expand Down
4 changes: 2 additions & 2 deletions src/parseable/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ impl Parseable {
}

pub async fn create_internal_stream_if_not_exists(&self) -> Result<(), StreamError> {
let log_source_entry = LogSourceEntry::new(&LogSource::Pmeta, HashSet::new());
let log_source_entry = LogSourceEntry::new(LogSource::Pmeta, HashSet::new());
match self
.create_stream_if_not_exists(
INTERNAL_STREAM_NAME,
Expand Down Expand Up @@ -532,7 +532,7 @@ impl Parseable {
custom_partition.as_ref(),
static_schema_flag,
)?;
let log_source_entry = LogSourceEntry::new(&log_source, HashSet::new());
let log_source_entry = LogSourceEntry::new(log_source, HashSet::new());
self.create_stream(
stream_name.to_string(),
&time_partition,
Expand Down
Loading