I hereby claim:
- I am hiimivantang on github.
- I am hiimivantang_yb (https://keybase.io/hiimivantang_yb) on keybase.
- I have a public key ASAoaTFMbSrqLzOVYqTVE_MZWSXlTlJLUieYJJnH4Y0K9Ao
To claim this, I am signing this object:
#View cluster resources utilization at cluster level | |
select mem_total/1024/1024, (mem_assigned)/1024/1024, cpu_total,cpu_assigned from __All_virtual_server_stat; | |
#View cluster resources utilization at node level | |
select | |
zone, | |
concat (svr_ip, | |
':', | |
svr_port) cpu_capacity, | |
cpu_total, |
sudo dnf update -y | |
sudo dnf install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-9-x86_64/pgdg-redhat-repo-latest.noarch.rpm | |
sudo dnf -y module disable postgresql | |
sudo dnf -y install postgresql11-server postgresql11 | |
sudo /usr/pgsql-11/bin/postgresql-11-setup initdb | |
sudo systemctl enable postgresql-11 && sudo systemctl start postgresql-11 | |
sudo passwd postgres |
--- | |
version: '3' | |
services: | |
yb-master-n1: | |
image: yugabytedb/yugabyte:latest | |
hostname: yb-master-n1 | |
container_name: yb-master-n1 | |
privileged: true | |
ports: |
### Keybase proof | |
I hereby claim: | |
* I am hiimivantang on github. | |
* I am wtang_yb (https://keybase.io/wtang_yb) on keybase. | |
* I have a public key ASDYZ3swY2prycykAc5peyrjRnBi8g-iYAhYhVFJ8X2CbQo | |
To claim this, I am signing this object: |
I hereby claim:
To claim this, I am signing this object:
#!/bin/sh | |
wget https://github.com/prometheus/prometheus/releases/download/v2.42.0/prometheus-2.42.0.linux-amd64.tar.gz | |
tar xvfz prometheus-2.42.0.linux-amd64.tar.gz && cd prometheus-2.42.0.linux-amd64 | |
PRIVATE_IP=$(curl http://169.254.169.254/latest/meta-data/local-ipv4) | |
tee ./yugabytedb.yaml <<EOF | |
global: |
#!/bin/bash | |
MASTER_ADDRESSES="$1" | |
if [ -z "$MASTER_ADDRESSES" ]; then | |
echo "Usage $0 <master addresses and their respective ports delimited by comma. E.g 192.168.0.22:7100,192.168.0.23:7100,192.168.0.24:7100>" | |
echo "" | |
exit 1 | |
fi |
from pyspark.sql.functions import regexp_replace, asc | |
input_df = spark.read.format("binaryFile").option("pathGlobFilter", "*{}".format(FORMAT)).load(INPUT_DIR_DBFS).withColumn("path", regexp_replace("path", "dbfs:/", "/dbfs/")).select("path", "modificationTime", "length") | |
display(input_df.orderBy(asc("path"))) | |
from pyspark.sql.functions import udf, col, lit, explode | |
@udf("array<string>") |
function sendEmail(e) { | |
var subject = "Confirmation Message" | |
var message = "Hello! this is a test email!" | |
var old_value = e.oldValue | |
var new_value = e.value | |
console.log(e.range.rowStart) | |
console.log(e.range.rowEnd) |
spark.hadoop.javax.jdo.option.ConnectionURL jdbc:sqlserver://hiimivantang-azure-sql.database.windows.net:1433;database=hivemetastore;encrypt=true;trustServerCertificate=false;hostNameInCertificate=*.database.windows.net;loginTimeout=30; | |
spark.hadoop.javax.jdo.option.ConnectionUserName {{secrets/external-hms/mssql-user}} | |
spark.hadoop.javax.jdo.option.ConnectionPassword {{secrets/external-hms/mssql-password}} | |
spark.hadoop.javax.jdo.option.ConnectionDriverName com.microsoft.sqlserver.jdbc.SQLServerDriver | |
spark.sql.hive.metastore.version 2.3.0 | |
spark.sql.hive.metastore.jars maven |