Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
The changes which I made to the https://github.com/confluentinc/examples 5.3.0-post branch code on my local environment to be able to start the Confluence Platform version 5.3.0
diff --git a/connect-streams-pipeline/jdbcavroksql-connector.properties b/connect-streams-pipeline/jdbcavroksql-connector.properties
index 5490d92..4c10e09 100644
--- a/connect-streams-pipeline/jdbcavroksql-connector.properties
+++ b/connect-streams-pipeline/jdbcavroksql-connector.properties
@@ -25,7 +25,7 @@ tasks.max=1
# SQLite database stored in the file test.db, use and auto-incrementing column called 'id' to
# detect new rows as they are added, and output to topics prefixed with 'test-sqlite-jdbc-', e.g.
# a table called 'users' will be written to the topic 'test-sqlite-jdbc-users'.
-connection.url=jdbc:sqlite:/usr/local/lib/retail.db
+connection.url=jdbc:sqlite:/home/user1/java/eclipse/19.08.06_kafka/sqlite_db_files/retail.db
mode=incrementing
incrementing.column.name=id
topic.prefix=jdbcavroksql-
diff --git a/connect-streams-pipeline/jdbcgenericavro-connector.properties b/connect-streams-pipeline/jdbcgenericavro-connector.properties
index ea3140f..5a403e4 100644
--- a/connect-streams-pipeline/jdbcgenericavro-connector.properties
+++ b/connect-streams-pipeline/jdbcgenericavro-connector.properties
@@ -25,7 +25,7 @@ tasks.max=1
# SQLite database stored in the file test.db, use and auto-incrementing column called 'id' to
# detect new rows as they are added, and output to topics prefixed with 'test-sqlite-jdbc-', e.g.
# a table called 'users' will be written to the topic 'test-sqlite-jdbc-users'.
-connection.url=jdbc:sqlite:/usr/local/lib/retail.db
+connection.url=jdbc:sqlite:/home/user1/java/eclipse/19.08.06_kafka/sqlite_db_files/retail.db
mode=incrementing
incrementing.column.name=id
topic.prefix=jdbcgenericavro-
diff --git a/connect-streams-pipeline/jdbcjson-connector.properties b/connect-streams-pipeline/jdbcjson-connector.properties
index 15124d4..d520197 100644
--- a/connect-streams-pipeline/jdbcjson-connector.properties
+++ b/connect-streams-pipeline/jdbcjson-connector.properties
@@ -25,7 +25,7 @@ tasks.max=1
# SQLite database stored in the file test.db, use and auto-incrementing column called 'id' to
# detect new rows as they are added, and output to topics prefixed with 'test-sqlite-jdbc-', e.g.
# a table called 'users' will be written to the topic 'test-sqlite-jdbc-users'.
-connection.url=jdbc:sqlite:/usr/local/lib/retail.db
+connection.url=jdbc:sqlite:/home/user1/java/eclipse/19.08.06_kafka/sqlite_db_files/retail.db
mode=incrementing
incrementing.column.name=id
topic.prefix=jdbcjson-
diff --git a/connect-streams-pipeline/jdbcspecificavro-connector.properties b/connect-streams-pipeline/jdbcspecificavro-connector.properties
index 336d13f..a3feb8f 100644
--- a/connect-streams-pipeline/jdbcspecificavro-connector.properties
+++ b/connect-streams-pipeline/jdbcspecificavro-connector.properties
@@ -25,7 +25,7 @@ tasks.max=1
# SQLite database stored in the file test.db, use and auto-incrementing column called 'id' to
# detect new rows as they are added, and output to topics prefixed with 'test-sqlite-jdbc-', e.g.
# a table called 'users' will be written to the topic 'test-sqlite-jdbc-users'.
-connection.url=jdbc:sqlite:/usr/local/lib/retail.db
+connection.url=jdbc:sqlite:/home/user1/java/eclipse/19.08.06_kafka/sqlite_db_files/retail.db
mode=incrementing
incrementing.column.name=id
topic.prefix=jdbcspecificavro-
diff --git a/microservices-orders/connectors/connector_jdbc_customers.config b/microservices-orders/connectors/connector_jdbc_customers.config
index c627b17..ad1505c 100644
--- a/microservices-orders/connectors/connector_jdbc_customers.config
+++ b/microservices-orders/connectors/connector_jdbc_customers.config
@@ -2,7 +2,7 @@
"name": "jdbc-customers",
"config": {
"connector.class": "io.confluent.connect.jdbc.JdbcSourceConnector",
- "connection.url": "jdbc:sqlite:/usr/local/lib/microservices.db",
+ "connection.url": "jdbc:sqlite:/home/user1/java/eclipse/19.08.06_kafka/sqlite_db_files/microservices.db",
"mode": "incrementing",
"incrementing.column.name": "id",
"topic.prefix": "",
diff --git a/microservices-orders/connectors/connector_jdbc_customers_oss.config b/microservices-orders/connectors/connector_jdbc_customers_oss.config
index c627b17..ad1505c 100644
--- a/microservices-orders/connectors/connector_jdbc_customers_oss.config
+++ b/microservices-orders/connectors/connector_jdbc_customers_oss.config
@@ -2,7 +2,7 @@
"name": "jdbc-customers",
"config": {
"connector.class": "io.confluent.connect.jdbc.JdbcSourceConnector",
- "connection.url": "jdbc:sqlite:/usr/local/lib/microservices.db",
+ "connection.url": "jdbc:sqlite:/home/user1/java/eclipse/19.08.06_kafka/sqlite_db_files/microservices.db",
"mode": "incrementing",
"incrementing.column.name": "id",
"topic.prefix": "",
diff --git a/microservices-orders/start.sh b/microservices-orders/start.sh
index ba93e98..b5d3ca7 100755
--- a/microservices-orders/start.sh
+++ b/microservices-orders/start.sh
@@ -1,5 +1,10 @@
#!/bin/bash
+# Victor's environment variables
+export CONFLUENT_HOME=/home/user1/java/eclipse/19.08.06_kafka/confluent-5.3.0-2.12
+export PATH=$PATH:/home/user1/java/eclipse/19.08.06_kafka/confluent-cli
+export PATH=$PATH:$CONFLUENT_HOME/bin
+
# Source library
. ../utils/helper.sh
@@ -8,7 +13,7 @@ check_mvn || exit 1
check_jot || exit 1
check_netstat || exit 1
check_running_elasticsearch 5.6.5 || exit 1
-check_running_kibana || exit 1
+#check_running_kibana || exit 1
check_running_cp 5.3 || exit 1
./stop.sh
diff --git a/microservices-orders/stop.sh b/microservices-orders/stop.sh
index 2c05cfb..7744bfa 100755
--- a/microservices-orders/stop.sh
+++ b/microservices-orders/stop.sh
@@ -1,5 +1,10 @@
#!/bin/bash
+# Victor's environment variables
+export CONFLUENT_HOME=/home/user1/java/eclipse/19.08.06_kafka/confluent-5.3.0-2.12
+export PATH=$PATH:/home/user1/java/eclipse/19.08.06_kafka/confluent-cli
+export PATH=$PATH:$CONFLUENT_HOME/bin
+
# Source library
. ../utils/helper.sh
diff --git a/utils/helper.sh b/utils/helper.sh
index a7d1606..a5eb78d 100755
--- a/utils/helper.sh
+++ b/utils/helper.sh
@@ -1,5 +1,7 @@
#!/bin/bash
+SQLITE_DB_FILES_DIR=../../../sqlite_db_files
+
function check_env() {
if [[ -z "$CONFLUENT_HOME" ]]; then
echo "\$CONFLUENT_HOME is not defined. Run 'export CONFLUENT_HOME=/path/to/confluentplatform' and try again"
@@ -262,10 +264,20 @@ function check_curl() {
function prep_sqltable_locations() {
TABLE="locations"
- TABLE_PATH=/usr/local/lib/table.$TABLE
+
+# /usr/local/lob is only writable by root
+# TABLE_PATH=/usr/local/lib/table.$TABLE
+
+ echo "SQLITE_DB_FILES_DIR=$SQLITE_DB_FILES_DIR"
+ TABLE_PATH=$SQLITE_DB_FILES_DIR/table.$TABLE
+
cp ../utils/table.$TABLE $TABLE_PATH
- DB=/usr/local/lib/retail.db
+# /usr/local/lib is only writable by root
+# DB=/usr/local/lib/retail.db
+
+ DB=$SQLITE_DB_FILES_DIR/retail.db
+
echo "DROP TABLE IF EXISTS $TABLE;" | sqlite3 $DB
echo "CREATE TABLE $TABLE(id INTEGER KEY NOT NULL, name VARCHAR(255), sale INTEGER);" | sqlite3 $DB
echo ".import $TABLE_PATH $TABLE" | sqlite3 $DB
@@ -281,10 +293,20 @@ function prep_sqltable_locations() {
function prep_sqltable_customers() {
TABLE="customers"
- TABLE_PATH=/usr/local/lib/table.$TABLE
+
+# /usr/local/lib is only writable by root
+# TABLE_PATH=/usr/local/lib/table.$TABLE
+
+ echo "SQLITE_DB_FILES_DIR=$SQLITE_DB_FILES_DIR"
+ TABLE_PATH=$SQLITE_DB_FILES_DIR/table.$TABLE
+
cp ../utils/table.$TABLE $TABLE_PATH
- DB=/usr/local/lib/microservices.db
+# /usr/local/lib is only writable by root
+# DB=/usr/local/lib/microservices.db
+
+ DB=$SQLITE_DB_FILES_DIR/microservices.db
+
echo "DROP TABLE IF EXISTS $TABLE;" | sqlite3 $DB
echo "CREATE TABLE $TABLE(id INTEGER KEY NOT NULL, firstName VARCHAR(255), lastName VARCHAR(255), email VARCHAR(255), address VARCHAR(255), level VARCHAR(255));" | sqlite3 $DB
echo ".import $TABLE_PATH $TABLE" | sqlite3 $DB
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.