Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(config): Global Default Log Schemas #1769

Merged
merged 18 commits into from Feb 14, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
61 changes: 61 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Expand Up @@ -143,6 +143,7 @@ evmap = { version = "7", features = ["bytes"] }
logfmt = "0.0.2"
notify = "4.0.14"
once_cell = "1.3"
getset = "0.1.0"
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When you get to import your own library!


[target.'cfg(unix)'.dependencies]
atty = "0.2"
Expand Down
4 changes: 2 additions & 2 deletions benches/bench.rs
Expand Up @@ -623,7 +623,7 @@ fn bench_elasticsearch_index(c: &mut Criterion) {
let mut event = Event::from("hello world");
event
.as_mut_log()
.insert(event::log_schema().timestamp_key.clone(), Utc::now());
.insert(event::log_schema().timestamp_key().clone(), Utc::now());

(Template::from("index-%Y.%m.%d"), event)
},
Expand All @@ -640,7 +640,7 @@ fn bench_elasticsearch_index(c: &mut Criterion) {
let mut event = Event::from("hello world");
event
.as_mut_log()
.insert(event::log_schema().timestamp_key.clone(), Utc::now());
.insert(event::log_schema().timestamp_key().clone(), Utc::now());

(Template::from("index"), event)
},
Expand Down
2 changes: 1 addition & 1 deletion benches/event.rs
Expand Up @@ -75,7 +75,7 @@ fn create_event(json: Value) -> LogEvent {
let mut event = Event::new_empty_log();
event
.as_mut_log()
.insert(event::log_schema().message_key.clone(), s);
.insert(event::log_schema().message_key().clone(), s);

let mut parser = JsonParser::from(JsonParserConfig::default());
parser.transform(event).unwrap().into_log()
Expand Down
25 changes: 15 additions & 10 deletions src/event/mod.rs
@@ -1,6 +1,7 @@
use self::proto::{event_wrapper::Event as EventProto, metric::Value as MetricProto, Log};
use bytes::Bytes;
use chrono::{DateTime, SecondsFormat, TimeZone, Utc};
use getset::{Getters, Setters};
use lazy_static::lazy_static;
use metric::{MetricKind, MetricValue};
use once_cell::sync::OnceCell;
Expand All @@ -23,8 +24,9 @@ pub mod proto {
include!(concat!(env!("OUT_DIR"), "/event.proto.rs"));
}

pub static LOG_SCHEMA: OnceCell<LogSchema> = OnceCell::new();

lazy_static! {
pub static ref LOG_SCHEMA: OnceCell<LogSchema> = OnceCell::new();
pub static ref PARTIAL: Atom = Atom::from("_partial");
}

Expand Down Expand Up @@ -158,7 +160,7 @@ impl<K: Into<Atom>, V: Into<Value>> FromIterator<(K, V)> for LogEvent {
}
}

pub fn log_schema<'a>() -> &'a LogSchema {
pub fn log_schema() -> &'static LogSchema {
// TODO: Help Rust project support before_each
// Support uninitialized schemas in tests to help our contributors.
// Don't do it in release because that is scary.
Expand All @@ -172,11 +174,14 @@ pub fn log_schema<'a>() -> &'a LogSchema {
LOG_SCHEMA.get().expect("Schema was not initialized")
}

#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Getters, Setters)]
pub struct LogSchema {
pub message_key: Atom,
pub timestamp_key: Atom,
pub host_key: Atom,
#[getset(get = "pub", set = "pub")]
message_key: Atom,
#[getset(get = "pub", set = "pub")]
timestamp_key: Atom,
#[getset(get = "pub", set = "pub")]
host_key: Atom,
}

impl Default for LogSchema {
Expand Down Expand Up @@ -513,7 +518,7 @@ impl From<Event> for Vec<u8> {
fn from(event: Event) -> Vec<u8> {
event
.into_log()
.remove(&log_schema().message_key)
.remove(&log_schema().message_key())
.unwrap()
.as_bytes()
.to_vec()
Expand All @@ -528,10 +533,10 @@ impl From<Bytes> for Event {

event
.as_mut_log()
.insert(log_schema().message_key.clone(), message);
.insert(log_schema().message_key().clone(), message);
event
.as_mut_log()
.insert(log_schema().timestamp_key.clone(), Utc::now());
.insert(log_schema().timestamp_key().clone(), Utc::now());

event
}
Expand Down Expand Up @@ -599,7 +604,7 @@ mod test {
"message": "raw log line",
"foo": "bar",
"bar": "baz",
"timestamp": event.as_log().get(&super::log_schema().timestamp_key),
"timestamp": event.as_log().get(&super::log_schema().timestamp_key()),
});

let actual_all = serde_json::to_value(event.as_log().all_fields()).unwrap();
Expand Down
2 changes: 1 addition & 1 deletion src/event/unflatten.rs
Expand Up @@ -398,7 +398,7 @@ mod tests {
fn unflatten_abirtrary(json in prop::json()) {
let s = serde_json::to_string(&json).unwrap();
let mut event = Event::new_empty_log();
event.as_mut_log().insert(event::log_schema().message_key.clone(), s);
event.as_mut_log().insert(event::log_schema().message_key().clone(), s);

let mut parser = JsonParser::from(JsonParserConfig::default());
let event = parser.transform(event).unwrap().into_log();
Expand Down
8 changes: 4 additions & 4 deletions src/sinks/aws_cloudwatch_logs/mod.rs
Expand Up @@ -248,7 +248,7 @@ impl CloudwatchLogsSvc {

pub fn encode_log(&self, mut log: LogEvent) -> InputLogEvent {
let timestamp =
if let Some(Value::Timestamp(ts)) = log.remove(&event::log_schema().timestamp_key) {
if let Some(Value::Timestamp(ts)) = log.remove(&event::log_schema().timestamp_key()) {
ts.timestamp_millis()
} else {
chrono::Utc::now().timestamp_millis()
Expand All @@ -261,7 +261,7 @@ impl CloudwatchLogsSvc {
}
Encoding::Text => {
let message = log
.get(&event::log_schema().message_key)
.get(&event::log_schema().message_key())
.map(|v| v.to_string_lossy())
.unwrap_or_else(|| "".into());
InputLogEvent { message, timestamp }
Expand Down Expand Up @@ -689,7 +689,7 @@ mod tests {
event.insert("key", "value");
let encoded = svc(Default::default()).encode_log(event.clone());

let ts = if let Value::Timestamp(ts) = event[&event::log_schema().timestamp_key] {
let ts = if let Value::Timestamp(ts) = event[&event::log_schema().timestamp_key()] {
ts.timestamp_millis()
} else {
panic!()
Expand All @@ -708,7 +708,7 @@ mod tests {
event.insert("key", "value");
let encoded = svc(config).encode_log(event.clone());
let map: HashMap<Atom, String> = serde_json::from_str(&encoded.message[..]).unwrap();
assert!(map.get(&event::log_schema().timestamp_key).is_none());
assert!(map.get(&event::log_schema().timestamp_key()).is_none());
}

#[test]
Expand Down
4 changes: 2 additions & 2 deletions src/sinks/aws_kinesis_firehose.rs
Expand Up @@ -213,7 +213,7 @@ fn encode_event(event: Event, encoding: &Encoding) -> Option<Record> {
}

Encoding::Text => log
.get(&event::log_schema().message_key)
.get(&event::log_schema().message_key())
.map(|v| v.as_bytes().to_vec())
.unwrap_or_default(),
};
Expand Down Expand Up @@ -246,7 +246,7 @@ mod tests {

let map: HashMap<String, String> = serde_json::from_slice(&event.data[..]).unwrap();

assert_eq!(map[&event::log_schema().message_key.to_string()], message);
assert_eq!(map[&event::log_schema().message_key().to_string()], message);
assert_eq!(map["key"], "value".to_string());
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/sinks/aws_kinesis_streams.rs
Expand Up @@ -250,7 +250,7 @@ fn encode_event(
}

Encoding::Text => log
.get(&event::log_schema().message_key)
.get(&event::log_schema().message_key())
.map(|v| v.as_bytes().to_vec())
.unwrap_or_default(),
};
Expand Down Expand Up @@ -299,7 +299,7 @@ mod tests {

let map: HashMap<String, String> = serde_json::from_slice(&event.data[..]).unwrap();

assert_eq!(map[&event::log_schema().message_key.to_string()], message);
assert_eq!(map[&event::log_schema().message_key().to_string()], message);
assert_eq!(map["key"], "value".to_string());
}

Expand Down
4 changes: 2 additions & 2 deletions src/sinks/aws_s3.rs
Expand Up @@ -400,7 +400,7 @@ fn encode_event(
.expect("Failed to encode event as json, this is a bug!"),
&Encoding::Text => {
let mut bytes = log
.get(&event::log_schema().message_key)
.get(&event::log_schema().message_key())
.map(|v| v.as_bytes().to_vec())
.unwrap_or_default();
bytes.push(b'\n');
Expand Down Expand Up @@ -442,7 +442,7 @@ mod tests {
let (bytes, _) = bytes.into_parts();
let map: HashMap<String, String> = serde_json::from_slice(&bytes[..]).unwrap();

assert_eq!(map[&event::log_schema().message_key.to_string()], message);
assert_eq!(map[&event::log_schema().message_key().to_string()], message);
assert_eq!(map["key"], "value".to_string());
}

Expand Down
2 changes: 1 addition & 1 deletion src/sinks/blackhole.rs
Expand Up @@ -62,7 +62,7 @@ impl Sink for BlackholeSink {
fn start_send(&mut self, item: Self::SinkItem) -> StartSend<Self::SinkItem, Self::SinkError> {
let message_len = match item {
Event::Log(log) => log
.get(&event::log_schema().message_key)
.get(&event::log_schema().message_key())
.map(|v| v.as_bytes().len())
.unwrap_or(0),
Event::Metric(metric) => serde_json::to_string(&metric).map(|v| v.len()).unwrap_or(0),
Expand Down
7 changes: 1 addition & 6 deletions src/sinks/console.rs
Expand Up @@ -77,12 +77,7 @@ fn encode_event(event: Event, encoding: &Encoding) -> Result<String, ()> {
}
Encoding::Text => {
let s = log
.get(
&event::LOG_SCHEMA
.get()
.expect("schema is not initialized")
.message_key,
)
.get(&event::log_schema().message_key())
.map(|v| v.to_string_lossy())
.unwrap_or_else(|| "".into());
Ok(s)
Expand Down
2 changes: 1 addition & 1 deletion src/sinks/elasticsearch.rs
Expand Up @@ -478,7 +478,7 @@ mod integration_tests {
"message": "raw log line",
"my_id": "42",
"foo": "bar",
"timestamp": input_event.as_log()[&event::log_schema().timestamp_key],
"timestamp": input_event.as_log()[&event::log_schema().timestamp_key()],
});
assert_eq!(expected, value);
}
Expand Down
2 changes: 1 addition & 1 deletion src/sinks/file/file.rs
Expand Up @@ -64,7 +64,7 @@ impl File {
.map(Bytes::from)
.expect("Unable to encode event as JSON."),
Encoding::Text => log
.get(&event::log_schema().message_key)
.get(&event::log_schema().message_key())
.map(|v| v.as_bytes())
.unwrap_or_default(),
}
Expand Down
16 changes: 8 additions & 8 deletions src/sinks/file/mod.rs
Expand Up @@ -264,35 +264,35 @@ mod tests {
];

assert_eq!(
input[0].as_log()[&event::log_schema().message_key],
input[0].as_log()[&event::log_schema().message_key()],
From::<&str>::from(&output[0][0])
);
assert_eq!(
input[1].as_log()[&event::log_schema().message_key],
input[1].as_log()[&event::log_schema().message_key()],
From::<&str>::from(&output[1][0])
);
assert_eq!(
input[2].as_log()[&event::log_schema().message_key],
input[2].as_log()[&event::log_schema().message_key()],
From::<&str>::from(&output[0][1])
);
assert_eq!(
input[3].as_log()[&event::log_schema().message_key],
input[3].as_log()[&event::log_schema().message_key()],
From::<&str>::from(&output[3][0])
);
assert_eq!(
input[4].as_log()[&event::log_schema().message_key],
input[4].as_log()[&event::log_schema().message_key()],
From::<&str>::from(&output[2][0])
);
assert_eq!(
input[5].as_log()[&event::log_schema().message_key],
input[5].as_log()[&event::log_schema().message_key()],
From::<&str>::from(&output[2][1])
);
assert_eq!(
input[6].as_log()[&event::log_schema().message_key],
input[6].as_log()[&event::log_schema().message_key()],
From::<&str>::from(&output[4][0])
);
assert_eq!(
input[7].as_log()[&event::log_schema().message_key],
input[7].as_log()[&event::log_schema().message_key()],
From::<&str>::from(&output[5][0])
);
}
Expand Down