Skip to content

Instantly share code, notes, and snippets.

package main
import (
"encoding/base64"
"encoding/json"
"log"
"strings"
"sync"
"github.com/Shopify/sarama"
import base64
import json
from confluent_kafka import Producer
msk_topic = 'my-msk-topic'
msk_broker_list = 'my-msk-broker-1:9092,my-msk-broker-2:9092' # Replace with your own broker list
# Create a Kafka producer with the specified broker list
producer_conf = {
'bootstrap.servers': msk_broker_list,
import base64
import json
import boto3
from kafka import KafkaProducer
msk_topic = 'my-msk-topic'
msk_broker_list = ['my-msk-broker-1:9092', 'my-msk-broker-2:9092'] # Replace with your own broker list
# Create a Kafka producer with the specified broker list
producer = KafkaProducer(bootstrap_servers=msk_broker_list)
::: {opensearch-node2}{NWq2Cpn5TBCXO1Lo4GNV6w}{NOUocwuXTX6MAwECgMFnAQ}{172.18.0.9}{172.18.0.9:9300}{dimr}{shard_indexing_pressure_enabled=true}
Hot threads at 2022-07-07T17:41:31.918Z, interval=500ms, busiestThreads=3, ignoreIdleThreads=true:
100.2% (501.1ms out of 500ms) cpu usage by thread 'opensearch[opensearch-node2][flush][T#1]'
2/10 snapshots sharing following 23 elements
app//org.apache.lucene.util.MSBRadixSorter.reorder(MSBRadixSorter.java:292)
app//org.apache.lucene.util.MSBRadixSorter.radixSort(MSBRadixSorter.java:172)
app//org.apache.lucene.util.MSBRadixSorter.sort(MSBRadixSorter.java:136)
app//org.apache.lucene.util.MSBRadixSorter.sort(MSBRadixSorter.java:129)
app//org.apache.lucene.util.BytesRefHash.sort(BytesRefHash.java:168)
::: {opensearch-node2}{NWq2Cpn5TBCXO1Lo4GNV6w}{NOUocwuXTX6MAwECgMFnAQ}{172.18.0.9}{172.18.0.9:9300}{dimr}{shard_indexing_pressure_enabled=true}
Hot threads at 2022-07-07T17:44:34.878Z, interval=500ms, busiestThreads=3, ignoreIdleThreads=true:
::: {opensearch-node1}{Sa-IjmPeQcynu2ra8_DQqg}{574_SXKeRki69hovxeNVDg}{172.18.0.3}{172.18.0.3:9300}{dimr}{shard_indexing_pressure_enabled=true}
Hot threads at 2022-07-07T17:44:34.877Z, interval=500ms, busiestThreads=3, ignoreIdleThreads=true:
92.1% (460.5ms out of 500ms) cpu usage by thread 'opensearch[opensearch-node1][snapshot][T#3]'
10/10 snapshots sharing following 18 elements
java.base@17.0.3/javax.crypto.CipherInputStream.ensureCapacity(CipherInputStream.java:113)
java.base@17.0.3/javax.crypto.CipherInputStream.getMoreData(CipherInputStream.java:156)
@ycyr
ycyr / README.md
Created February 11, 2022 21:23 — forked from danielperna84/README.md
Unseal HashiCorp Vault using systemd

Automatically unseal HashiCorp Vault via systemd

WARNING!

Automatically unsealing Vault drastically reduces the security of the stored secrets. That being said, there might be scenarios, in which this simple approach could be useful / sufficient.

How it works / installation

This requires Vault to be started by a systemd-unit named vault.service, which typically is the case when installing from a distribution package. The script vault-unseal.sh should be placed in /root and secured with 700 permissions.

@ycyr
ycyr / sone.conf
Last active January 12, 2022 13:45 — forked from tuckner/sone.conf
SentinelOne Cloudfunnel Logstash Input
input {
kafka {
bootstrap_servers => "" #configurable
group_id => "" #configurable
auto_offset_reset => "" #configurable
security_protocol => "SASL_SSL"
sasl_mechanism => "SCRAM-SHA-512"
sasl_jaas_config => "org.apache.kafka.common.security.scram.ScramLoginModule required username='' password='';"
ssl_endpoint_identification_algorithm => ""
topics => [""] #configurable
@ycyr
ycyr / README.md
Created July 13, 2021 13:08 — forked from akihikodaki/README.en.md
Linux Desktop on Apple Silicon/M1 in Practice

Linux Desktop on Apple Silicon/M1 in Practice

I bought M1 MacBook Air. It is the fastest computer I have, and I have been a GNOME/GNU/Linux user for long time. It is obvious conclusion that I need practical Linux desktop environment on Apple Silicon/M1.

Fortunately, Linux already works on Apple Silicon/M1. But how practical is it?

@ycyr
ycyr / gist:551374a8e77cfa6464f2ac4c847d0eff
Created January 26, 2021 23:05
fluentd-prometeus.conf
<source>
@type prometheus
@id in_prometheus
bind "#{ENV['FLUENTD_PROMETHEUS_BIND'] || '0.0.0.0'}"
port "#{ENV['FLUENTD_PROMETHEUS_PORT'] || '24231'}"
metrics_path "#{ENV['FLUENTD_PROMETHEUS_PATH'] || '/metrics'}"
</source>
<source>
@type prometheus_output_monitor
local cortex = import "cortex/cortex.libsonnet";
cortex {
_config+:: {
namespace: "cortex",
storage_engine: 'blocks',
blocks_storage_backend: 's3',
blocks_storage_bucket_name: 'test-buck-cortex-blocks',
blocks_storage_s3_endpoint: 's3.dualstack.ca-central-1.amazonaws.com',
storage_backend: 'aws',