Skip to content

Instantly share code, notes, and snippets.

@NickLarsenNZ
Last active April 8, 2024 16:46
Show Gist options
  • Save NickLarsenNZ/fbbb477230e9992e9a4aa2603edcf54f to your computer and use it in GitHub Desktop.
Save NickLarsenNZ/fbbb477230e9992e9a4aa2603edcf54f to your computer and use it in GitHub Desktop.
OTLP Log Export (errors on shutdown)
[package]
name = "log-tracing"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
log = { version = "0.4.21", default-features = false }
opentelemetry = { version = "0.22.0", features = ["logs", "metrics"] }
opentelemetry-otlp = { version = "0.15.0", features = ["logs", "metrics"] }
opentelemetry-semantic-conventions = "0.14.0"
opentelemetry_sdk = { version = "0.22.1", features = ["metrics", "rt-tokio", "logs"] }
tokio = { version = "1.37.0", features = ["full"] }
tracing = { version = "0.1.40", features = ["log", "attributes", "std", "tracing-attributes"], default-features = false }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
tracing-opentelemetry = "0.23.0"
opentelemetry-appender-tracing = "0.3.0"
tracing-log = "0.2.0"
use std::time::Duration;
mod telemetry;
#[tokio::main]
async fn main() {
telemetry::init();
async move {
log::info!("Hello");
tracing::info!("World!");
}
.await;
tokio::time::sleep(Duration::from_secs(2)).await;
opentelemetry::global::shutdown_tracer_provider();
opentelemetry::global::shutdown_logger_provider();
// Without this sleep, I get a different error:
// OpenTelemetry log error occurred. Exporter otlp encountered the following errors: the grpc server returns error (Unknown error): , detailed error message: transport error
tokio::time::sleep(Duration::from_secs(2)).await;
println!("end of main");
// Errors generated here as Tokio shuts down:
// OpenTelemetry log error occurred. cannot send message to batch processor as the channel is closed
// OpenTelemetry log error occurred. cannot send message to batch processor as the channel is closed
// OpenTelemetry log error occurred. cannot send message to batch processor as the channel is closed
// OpenTelemetry log error occurred. cannot send message to batch processor as the channel is closed
// OpenTelemetry log error occurred. cannot send message to batch processor as the channel is closed
// OpenTelemetry log error occurred. cannot send message to batch processor as the channel is closed
}
use opentelemetry::KeyValue;
use opentelemetry_appender_tracing::layer::OpenTelemetryTracingBridge;
use opentelemetry_sdk::{
logs,
Resource,
};
use opentelemetry_semantic_conventions::resource;
use tracing::level_filters::LevelFilter;
use tracing_subscriber::{layer::SubscriberExt as _, EnvFilter, Layer};
pub fn init() {
let env_filter_layer = EnvFilter::builder()
.with_default_directive(LevelFilter::TRACE.into())
.from_env_lossy();
let console_output_layer = tracing_subscriber::fmt::layer().with_filter(env_filter_layer);
let mut layers = vec![console_output_layer.boxed()];
// // We want to convert log::Records to tracing::Events which will then be part of Spans, and also be able to be shipped as logs
// tracing_log::LogTracer::init().expect(
// "unable to set the Log implementation that would convert log::Record as trace::Event",
// );
let env_filter_layer = EnvFilter::builder()
.with_default_directive(LevelFilter::DEBUG.into())
.from_env_lossy();
let log_exporter = opentelemetry_otlp::new_exporter().tonic();
let otel_log = opentelemetry_otlp::new_pipeline()
.logging()
.with_exporter(log_exporter)
.with_log_config(
logs::config().with_resource(Resource::new(vec![KeyValue::new(
resource::SERVICE_NAME,
"blah",
)])),
)
.install_batch(opentelemetry_sdk::runtime::Tokio)
.expect("install");
// // Covert `tracing::Event` to OpenTelemetry logs. `log::Record`s will already be converted to `tracing::Event` by the `tacing-log` crate with the `log-tracer` feature.
layers.push(
OpenTelemetryTracingBridge::new(otel_log.provider())
.with_filter(env_filter_layer)
.boxed(),
);
tracing::subscriber::set_global_default(tracing_subscriber::registry().with(layers))
.expect("set gloabl default");
}
@NickLarsenNZ
Copy link
Author

I can see the trace events that occur after the println!("end of main").

❯  RUST_LOG=debug cargo run
   Compiling log-tracing v0.1.0 (.../log-tracing)
    Finished dev [unoptimized + debuginfo] target(s) in 1.27s
     Running `.../log-tracing`
2024-04-08T16:24:28.843685Z  INFO log_tracing: World!
2024-04-08T16:24:28.845053Z DEBUG hyper::client::connect::dns: resolving host="localhost"
2024-04-08T16:24:28.845490Z DEBUG hyper::client::connect::http: connecting to [::1]:4317
2024-04-08T16:24:28.845663Z DEBUG hyper::client::connect::http: connected to [::1]:4317
2024-04-08T16:24:28.845725Z DEBUG h2::client: binding client connection
2024-04-08T16:24:28.845772Z DEBUG h2::client: client connection bound
2024-04-08T16:24:28.845821Z DEBUG h2::codec::framed_write: send frame=Settings { flags: (0x0), enable_push: 0, initial_window_size: 2097152, max_frame_size: 16384 }
2024-04-08T16:24:28.845988Z DEBUG tower::buffer::worker: service.ready=true processing request
2024-04-08T16:24:28.846021Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=WindowUpdate { stream_id: StreamId(0), size_increment: 5177345 }
2024-04-08T16:24:28.846303Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Headers { stream_id: StreamId(1), flags: (0x4: END_HEADERS) }
2024-04-08T16:24:28.846370Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Data { stream_id: StreamId(1) }
2024-04-08T16:24:28.846401Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Data { stream_id: StreamId(1), flags: (0x1: END_STREAM) }
2024-04-08T16:24:28.847371Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Settings { flags: (0x0), max_frame_size: 16384 }
2024-04-08T16:24:28.847430Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Settings { flags: (0x1: ACK) }
2024-04-08T16:24:28.847477Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Settings { flags: (0x1: ACK) }
2024-04-08T16:24:28.847513Z DEBUG Connection{peer=Client}: h2::proto::settings: received settings ACK; applying Settings { flags: (0x0), enable_push: 0, initial_window_size: 2097152, max_frame_size: 16384 }
2024-04-08T16:24:28.847652Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=WindowUpdate { stream_id: StreamId(0), size_increment: 138 }
2024-04-08T16:24:28.847708Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Ping { ack: false, payload: [2, 4, 16, 16, 9, 14, 7, 7] }
2024-04-08T16:24:28.847752Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Ping { ack: true, payload: [2, 4, 16, 16, 9, 14, 7, 7] }
2024-04-08T16:24:28.847912Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Headers { stream_id: StreamId(1), flags: (0x4: END_HEADERS) }
2024-04-08T16:24:28.847981Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Data { stream_id: StreamId(1) }
2024-04-08T16:24:28.848152Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Headers { stream_id: StreamId(1), flags: (0x5: END_HEADERS | END_STREAM) }
2024-04-08T16:24:29.844885Z DEBUG tower::buffer::worker: service.ready=true processing request
2024-04-08T16:24:29.845526Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Headers { stream_id: StreamId(3), flags: (0x4: END_HEADERS) }
2024-04-08T16:24:29.845609Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Data { stream_id: StreamId(3) }
2024-04-08T16:24:29.845675Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Data { stream_id: StreamId(3), flags: (0x1: END_STREAM) }
2024-04-08T16:24:29.846702Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=WindowUpdate { stream_id: StreamId(0), size_increment: 5900 }
2024-04-08T16:24:29.846767Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Ping { ack: false, payload: [2, 4, 16, 16, 9, 14, 7, 7] }
2024-04-08T16:24:29.846816Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Ping { ack: true, payload: [2, 4, 16, 16, 9, 14, 7, 7] }
2024-04-08T16:24:29.846994Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Headers { stream_id: StreamId(3), flags: (0x4: END_HEADERS) }
2024-04-08T16:24:29.847073Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Data { stream_id: StreamId(3) }
2024-04-08T16:24:29.847132Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Headers { stream_id: StreamId(3), flags: (0x5: END_HEADERS | END_STREAM) }
2024-04-08T16:24:30.844987Z DEBUG tower::buffer::worker: service.ready=true processing request
2024-04-08T16:24:30.845554Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Headers { stream_id: StreamId(5), flags: (0x4: END_HEADERS) }
2024-04-08T16:24:30.845650Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Data { stream_id: StreamId(5) }
2024-04-08T16:24:30.845718Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Data { stream_id: StreamId(5), flags: (0x1: END_STREAM) }
2024-04-08T16:24:30.846567Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=WindowUpdate { stream_id: StreamId(0), size_increment: 2900 }
2024-04-08T16:24:30.846636Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Ping { ack: false, payload: [2, 4, 16, 16, 9, 14, 7, 7] }
2024-04-08T16:24:30.846689Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Ping { ack: true, payload: [2, 4, 16, 16, 9, 14, 7, 7] }
2024-04-08T16:24:30.846757Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Headers { stream_id: StreamId(5), flags: (0x4: END_HEADERS) }
2024-04-08T16:24:30.846818Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Data { stream_id: StreamId(5) }
2024-04-08T16:24:30.846868Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Headers { stream_id: StreamId(5), flags: (0x5: END_HEADERS | END_STREAM) }
2024-04-08T16:24:31.844776Z DEBUG tower::buffer::worker: service.ready=true processing request
2024-04-08T16:24:31.845223Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Headers { stream_id: StreamId(7), flags: (0x4: END_HEADERS) }
2024-04-08T16:24:31.845318Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Data { stream_id: StreamId(7) }
2024-04-08T16:24:31.845374Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Data { stream_id: StreamId(7), flags: (0x1: END_STREAM) }
2024-04-08T16:24:31.846337Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=WindowUpdate { stream_id: StreamId(0), size_increment: 2900 }
2024-04-08T16:24:31.846417Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Ping { ack: false, payload: [2, 4, 16, 16, 9, 14, 7, 7] }
2024-04-08T16:24:31.846456Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Ping { ack: true, payload: [2, 4, 16, 16, 9, 14, 7, 7] }
2024-04-08T16:24:31.846519Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Headers { stream_id: StreamId(7), flags: (0x4: END_HEADERS) }
2024-04-08T16:24:31.846583Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Data { stream_id: StreamId(7) }
2024-04-08T16:24:31.846634Z DEBUG Connection{peer=Client}: h2::codec::framed_read: received frame=Headers { stream_id: StreamId(7), flags: (0x5: END_HEADERS | END_STREAM) }
2024-04-08T16:24:32.845093Z DEBUG tower::buffer::worker: service.ready=true processing request
end of main
2024-04-08T16:24:32.846077Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Headers { stream_id: StreamId(9), flags: (0x4: END_HEADERS) }
2024-04-08T16:24:32.846326Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Data { stream_id: StreamId(9) }
2024-04-08T16:24:32.846353Z DEBUG tower::buffer::worker: buffer closing; waking pending tasks
OpenTelemetry log error occurred. cannot send message to batch processor as the channel is closed
OpenTelemetry log error occurred. cannot send message to batch processor as the channel is closed
2024-04-08T16:24:32.846663Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=Data { stream_id: StreamId(9), flags: (0x1: END_STREAM) }
OpenTelemetry log error occurred. cannot send message to batch processor as the channel is closed
2024-04-08T16:24:32.846979Z DEBUG Connection{peer=Client}: h2::codec::framed_write: send frame=GoAway { error_code: NO_ERROR, last_stream_id: StreamId(0) }
OpenTelemetry log error occurred. cannot send message to batch processor as the channel is closed
2024-04-08T16:24:32.847232Z DEBUG Connection{peer=Client}: h2::proto::connection: Connection::poll; connection error error=GoAway(b"", NO_ERROR, Library)
OpenTelemetry log error occurred. cannot send message to batch processor as the channel is closed

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment