Created
October 18, 2017 07:05
-
-
Save rhysmccaig/6995f073a533d3db73cc18ad87e612cf to your computer and use it in GitHub Desktop.
node-rdkafka-210: Default build
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
make && make test && make e2e | |
Done processing src/binding.cc | |
Done processing src/callbacks.cc | |
Done processing src/common.cc | |
Done processing src/config.cc | |
Done processing src/connection.cc | |
Done processing src/errors.cc | |
Done processing src/kafka-consumer.cc | |
Done processing src/producer.cc | |
Done processing src/topic.cc | |
Done processing src/workers.cc | |
Done processing src/binding.h | |
Done processing src/callbacks.h | |
Done processing src/common.h | |
Done processing src/config.h | |
Done processing src/connection.h | |
Done processing src/errors.h | |
Done processing src/kafka-consumer.h | |
Done processing src/producer.h | |
Done processing src/topic.h | |
Done processing src/workers.h | |
Total errors found: 0 | |
gyp info it worked if it ends with ok | |
gyp info using node-gyp@3.6.2 | |
gyp info using node@8.5.0 | darwin | x64 | |
gyp info spawn /usr/local/bin/python2 | |
gyp info spawn args [ '/Users/mccaig/Development/node-rdkafka/node_modules/node-gyp/gyp/gyp_main.py', | |
gyp info spawn args 'binding.gyp', | |
gyp info spawn args '-f', | |
gyp info spawn args 'make', | |
gyp info spawn args '-I', | |
gyp info spawn args '/Users/mccaig/Development/node-rdkafka/build/config.gypi', | |
gyp info spawn args '-I', | |
gyp info spawn args '/Users/mccaig/Development/node-rdkafka/node_modules/node-gyp/addon.gypi', | |
gyp info spawn args '-I', | |
gyp info spawn args '/Users/mccaig/.node-gyp/8.5.0/include/node/common.gypi', | |
gyp info spawn args '-Dlibrary=shared_library', | |
gyp info spawn args '-Dvisibility=default', | |
gyp info spawn args '-Dnode_root_dir=/Users/mccaig/.node-gyp/8.5.0', | |
gyp info spawn args '-Dnode_gyp_dir=/Users/mccaig/Development/node-rdkafka/node_modules/node-gyp', | |
gyp info spawn args '-Dnode_lib_file=/Users/mccaig/.node-gyp/8.5.0/<(target_arch)/node.lib', | |
gyp info spawn args '-Dmodule_root_dir=/Users/mccaig/Development/node-rdkafka', | |
gyp info spawn args '-Dnode_engine=v8', | |
gyp info spawn args '--depth=.', | |
gyp info spawn args '--no-parallel', | |
gyp info spawn args '--generator-output', | |
gyp info spawn args 'build', | |
gyp info spawn args '-Goutput_dir=.' ] | |
gyp info ok | |
gyp info it worked if it ends with ok | |
gyp info using node-gyp@3.6.2 | |
gyp info using node@8.5.0 | darwin | x64 | |
gyp info spawn make | |
gyp info spawn args [ 'BUILDTYPE=Release', '-C', 'build' ] | |
ACTION configuring librdkafka... deps/librdkafka/config.h | |
using cache file config.cache | |
checking for OS or distribution... ok (osx) | |
checking for C compiler from CC env... failed | |
checking for gcc (by command)... ok (cached) | |
checking for C++ compiler from CXX env... failed | |
checking for C++ compiler (g++)... ok (cached) | |
checking executable ld... ok (cached) | |
checking executable nm... ok (cached) | |
checking executable objdump... ok (cached) | |
checking executable strip... ok (cached) | |
checking for debug symbols compiler flag (-g...)... ok (cached) | |
checking for pkgconfig (by command)... ok (cached) | |
checking for install (by command)... failed | |
checking for PIC (by compile)... ok (cached) | |
checking for GNU-compatible linker options... failed | |
checking for OSX linker options... ok (cached) | |
checking for GNU linker-script ld flag... failed | |
checking for Solaris linker-script ld flag... failed (ignore) | |
checking for __atomic_32 (by compile)... ok (cached) | |
checking for __atomic_64 (by compile)... ok (cached) | |
checking for socket (by compile)... ok (cached) | |
parsing version '0x000b01ff'... ok (0.11.1) | |
checking for libpthread (by pkg-config)... failed | |
checking for libpthread (by compile)... ok (cached) | |
checking for zlib (by pkg-config)... ok | |
checking for zlib (by compile)... ok (cached) | |
checking for libcrypto (by pkg-config)... failed | |
checking for libcrypto (by compile)... failed (disable) | |
checking for liblz4 (by pkg-config)... ok | |
checking for liblz4 (by compile)... ok (cached) | |
checking for libssl (by pkg-config)... ok | |
checking for libssl (by compile)... ok (cached) | |
checking for libsasl2 (by pkg-config)... failed | |
checking for libsasl2 (by compile)... ok (cached) | |
checking for crc32chw (by compile)... ok (cached) | |
checking for regex (by compile)... ok (cached) | |
checking for librt (by pkg-config)... failed | |
checking for librt (by compile)... failed | |
checking for strndup (by compile)... ok (cached) | |
checking for strerror_r (by compile)... ok (cached) | |
checking for libdl (by pkg-config)... failed | |
checking for libdl (by compile)... ok (cached) | |
checking for nm (by env NM)... ok (cached) | |
Generated Makefile.config | |
Generated config.h | |
Configuration summary: | |
prefix /usr/local | |
ARCH x86_64 | |
CPU generic | |
GEN_PKG_CONFIG y | |
ENABLE_DEVEL n | |
ENABLE_VALGRIND n | |
ENABLE_REFCNT_DEBUG n | |
ENABLE_SHAREDPTR_DEBUG n | |
ENABLE_LZ4_EXT y | |
ENABLE_SSL y | |
ENABLE_SASL y | |
MKL_APP_NAME librdkafka | |
MKL_APP_DESC_ONELINE The Apache Kafka C/C++ library | |
MKL_DISTRO osx | |
SOLIB_EXT .dylib | |
CC gcc | |
CXX g++ | |
LD ld | |
NM nm | |
OBJDUMP objdump | |
STRIP strip | |
CPPFLAGS -gstrict-dwarf -O2 -fPIC -Wall -Wsign-compare -Wfloat-equal -Wpointer-arith -Wcast-align | |
PKG_CONFIG pkg-config | |
INSTALL install | |
LIB_LDFLAGS -shared -dynamiclib -Wl,-install_name,$(DESTDIR)$(libdir)/$(LIBFILENAME) | |
RDKAFKA_VERSION_STR 0.11.1 | |
MKL_APP_VERSION 0.11.1 | |
LIBS -lpthread -lz -L/usr/local/Cellar/lz4/1.8.0/lib -llz4 -L/usr/local/Cellar/openssl/1.0.2l/lib -lssl -lsasl2 -ldl | |
CFLAGS -I/usr/local/Cellar/lz4/1.8.0/include -I/usr/local/Cellar/openssl/1.0.2l/include | |
CXXFLAGS -Wno-non-virtual-dtor | |
SYMDUMPER $(NM) -g | |
exec_prefix /usr/local | |
bindir /usr/local/bin | |
sbindir /usr/local/sbin | |
libexecdir /usr/local/libexec | |
datadir /usr/local/share | |
sysconfdir /usr/local/etc | |
sharedstatedir /usr/local/com | |
localstatedir /usr/local/var | |
libdir /usr/local/lib | |
includedir /usr/local/include | |
infodir /usr/local/info | |
mandir /usr/local/man | |
Generated config.cache | |
Now type 'make' to build | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/crc32c.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdaddr.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdavl.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdbuf.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdcrc32.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rddl.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdgz.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_assignor.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_broker.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_buf.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_cgrp.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_conf.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_event.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_feature.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_interceptor.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_lz4.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_metadata.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_metadata_cache.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_msg.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_msgset_reader.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_msgset_writer.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_offset.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_op.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_partition.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_pattern.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_plugin.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_queue.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_range_assignor.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_request.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_roundrobin_assignor.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_subscription.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_timer.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_topic.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_transport.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdlist.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdlog.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdports.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdrand.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdregex.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdstring.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdunittest.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdvarint.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/regexp.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/snappy.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/tinycthread.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_sasl.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_sasl_cyrus.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_sasl_plain.o | |
CC(target) Release/obj.target/librdkafka/deps/librdkafka/src/rdkafka_sasl_scram.o | |
LIBTOOL-STATIC Release/rdkafka.a | |
SOLINK_MODULE(target) Release/node-librdkafka.node | |
gyp info ok | |
✓ super_ | |
native addon | |
✓ exports something | |
✓ exports valid producer | |
✓ exports valid consumer | |
✓ exports version | |
Consumer | |
✓ cannot be set without a topic config | |
✓ can be given a topic config | |
✓ throws us an error if we provide an invalid configuration value | |
✓ throws us an error if topic config is given something invalid | |
✓ ignores function arguments for global configuration | |
✓ ignores function arguments for topic configuration | |
KafkaConsumer client | |
✓ is an object | |
✓ requires configuration | |
✓ has necessary methods from superclass | |
✓ has necessary bindings for librdkafka 1:1 binding | |
KafkaConsumerStream stream | |
✓ exports a stream class | |
✓ can be instantiated | |
✓ properly reads off the fake client | |
✓ properly reads off the fake with a topic function | |
✓ properly reads correct number of messages but does not stop | |
✓ can be piped around | |
✓ streams as batch when specified | |
✓ stops reading on unsubscribe | |
✓ calls the callback on destroy | |
ProducerStream stream | |
✓ exports a stream class | |
in buffer mode | |
✓ requires a topic be provided when running in buffer mode | |
✓ can be instantiated | |
✓ does not run connect if the client is already connected | |
✓ does run connect if the client is not already connected | |
✓ automatically disconnects when autoclose is not provided | |
✓ does not automatically disconnect when autoclose is set to false | |
✓ properly reads off the fake client | |
✓ passes a topic string if options are not provided | |
✓ properly handles queue errors (505ms) | |
✓ errors out when a non-queue related error occurs | |
✓ errors out when a non-queue related error occurs but does not disconnect if autoclose is false | |
✓ properly reads more than one message in order | |
✓ can be piped into a readable | |
in objectMode | |
✓ can be instantiated | |
✓ properly produces message objects | |
✓ properly handles queue errors (504ms) | |
✓ errors out when a non-queue related error occurs | |
✓ errors out when a non-queue related error occurs but does not disconnect if autoclose is false | |
✓ properly reads more than one message in order | |
✓ can be piped into a readable | |
Producer client | |
✓ is an object | |
✓ requires configuration | |
✓ has necessary methods from superclass | |
48 passing (1s) | |
Consumer/Producer | |
✓ should be able to produce, consume messages, read position: subscribe/consumeOnce (1571ms) | |
✓ should be able to produce and consume messages: consumeLoop (2007ms) | |
✓ should be able to produce and consume messages: empty key and empty value (3009ms) | |
✓ should be able to produce and consume messages: null key and null value (3012ms) | |
Exceptional case - offset_commit_cb true | |
✓ should async commit after consuming (10627ms) | |
Exceptional case - offset_commit_cb function | |
✓ should callback offset_commit_cb after commit (4015ms) | |
Consumer | |
committed and position | |
✓ before assign, committed offsets are empty | |
✓ before assign, position returns an empty array | |
✓ after assign, should get committed array without offsets (1022ms) | |
✓ after assign and commit, should get committed offsets | |
✓ after assign, before consume, position should return an array without offsets | |
✓ should obey the timeout | |
subscribe | |
✓ should be able to subscribe | |
✓ should be able to unsusbcribe | |
assign | |
✓ should be able to take an assignment | |
✓ should be able to take an empty assignment | |
disconnect | |
✓ should happen gracefully (111ms) | |
✓ should happen without issue after subscribing (118ms) | |
✓ should happen without issue after consuming (310ms) | |
✓ should happen without issue after consuming an error (112ms) | |
Consumer group/Producer | |
✓ should be able to commit, read committed and restart from the committed offset (13635ms) | |
Producer | |
with dr_cb | |
✓ should connect to Kafka | |
✓ should produce a message with a null payload and null key (1012ms) | |
✓ should produce a message with a payload and key (1015ms) | |
✓ should produce a message with a payload and key buffer (1015ms) | |
✓ should produce a message with an opaque (1005ms) | |
✓ should get 100% deliverability (1255ms) | |
with_dr_msg_cb | |
✓ should produce a message with a payload and key (1017ms) | |
✓ should produce a message with an empty payload and empty key (https://github.com/Blizzard/node-rdkafka/issues/117) (1013ms) | |
✓ should produce a message with a null payload and null key (https://github.com/Blizzard/node-rdkafka/issues/117) (1009ms) | |
✓ should produce an int64 key (https://github.com/Blizzard/node-rdkafka/issues/208) | |
31 passing (58s) | |
✓ super_ | |
native addon | |
✓ exports something | |
✓ exports valid producer | |
✓ exports valid consumer | |
✓ exports version | |
Consumer | |
✓ cannot be set without a topic config | |
✓ can be given a topic config | |
✓ throws us an error if we provide an invalid configuration value | |
✓ throws us an error if topic config is given something invalid | |
✓ ignores function arguments for global configuration | |
✓ ignores function arguments for topic configuration | |
KafkaConsumer client | |
✓ is an object | |
✓ requires configuration | |
✓ has necessary methods from superclass | |
✓ has necessary bindings for librdkafka 1:1 binding | |
KafkaConsumerStream stream | |
✓ exports a stream class | |
✓ can be instantiated | |
✓ properly reads off the fake client | |
✓ properly reads off the fake with a topic function | |
✓ properly reads correct number of messages but does not stop | |
✓ can be piped around | |
✓ streams as batch when specified | |
✓ stops reading on unsubscribe | |
✓ calls the callback on destroy | |
ProducerStream stream | |
✓ exports a stream class | |
in buffer mode | |
✓ requires a topic be provided when running in buffer mode | |
✓ can be instantiated | |
✓ does not run connect if the client is already connected | |
✓ does run connect if the client is not already connected | |
✓ automatically disconnects when autoclose is not provided | |
✓ does not automatically disconnect when autoclose is set to false | |
✓ properly reads off the fake client | |
✓ passes a topic string if options are not provided | |
✓ properly handles queue errors (503ms) | |
✓ errors out when a non-queue related error occurs | |
✓ errors out when a non-queue related error occurs but does not disconnect if autoclose is false | |
✓ properly reads more than one message in order | |
✓ can be piped into a readable | |
in objectMode | |
✓ can be instantiated | |
✓ properly produces message objects | |
✓ properly handles queue errors (504ms) | |
✓ errors out when a non-queue related error occurs | |
✓ errors out when a non-queue related error occurs but does not disconnect if autoclose is false | |
✓ properly reads more than one message in order | |
✓ can be piped into a readable | |
Producer client | |
✓ is an object | |
✓ requires configuration | |
✓ has necessary methods from superclass | |
48 passing (1s) | |
Consumer/Producer | |
✓ should be able to produce, consume messages, read position: subscribe/consumeOnce (2126ms) | |
✓ should be able to produce and consume messages: consumeLoop (2006ms) | |
✓ should be able to produce and consume messages: empty key and empty value (3010ms) | |
✓ should be able to produce and consume messages: null key and null value (3016ms) | |
Exceptional case - offset_commit_cb true | |
✓ should async commit after consuming (10627ms) | |
Exceptional case - offset_commit_cb function | |
✓ should callback offset_commit_cb after commit (4014ms) | |
Consumer | |
committed and position | |
✓ before assign, committed offsets are empty | |
✓ before assign, position returns an empty array | |
✓ after assign, should get committed array without offsets (1021ms) | |
✓ after assign and commit, should get committed offsets | |
✓ after assign, before consume, position should return an array without offsets | |
✓ should obey the timeout | |
subscribe | |
✓ should be able to subscribe | |
✓ should be able to unsusbcribe | |
assign | |
✓ should be able to take an assignment | |
✓ should be able to take an empty assignment | |
disconnect | |
✓ should happen gracefully (109ms) | |
✓ should happen without issue after subscribing (109ms) | |
✓ should happen without issue after consuming (242ms) | |
✓ should happen without issue after consuming an error (108ms) | |
Consumer group/Producer | |
✓ should be able to commit, read committed and restart from the committed offset (13679ms) | |
Producer | |
with dr_cb | |
✓ should connect to Kafka | |
✓ should produce a message with a null payload and null key (1014ms) | |
✓ should produce a message with a payload and key (1010ms) | |
✓ should produce a message with a payload and key buffer (1008ms) | |
✓ should produce a message with an opaque (1011ms) | |
✓ should get 100% deliverability (1075ms) | |
with_dr_msg_cb | |
✓ should produce a message with a payload and key (1006ms) | |
✓ should produce a message with an empty payload and empty key (https://github.com/Blizzard/node-rdkafka/issues/117) (1014ms) | |
✓ should produce a message with a null payload and null key (https://github.com/Blizzard/node-rdkafka/issues/117) (1009ms) | |
✓ should produce an int64 key (https://github.com/Blizzard/node-rdkafka/issues/208) | |
31 passing (58s) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment