Skip to content

Instantly share code, notes, and snippets.

View adamcfraser's full-sized avatar

Adam Fraser adamcfraser

  • Couchbase
  • Vancouver, BC
View GitHub Profile
2020-06-28T23:09:28.413Z [INF] SGCluster: Replication definitions changed - refreshing...
2020-06-28T23:09:28.413Z [INF] DCP: c:db-local-cfgSG cfg_sg: Get, key: sgrCluster, cas: 0
2020-06-28T23:09:28.413Z [INF] DCP: c:db-local-cfgSG cfg_sg: Get, key: sgrCluster, cas: 0
2020-06-28T23:09:28.414Z [INF] SGCluster: Stopping reassigned replication pushandpull-with-target-continuous
2020-06-28T23:09:28.416Z [ERR] Unexpected panic: runtime error: invalid memory address or nil pointer dereference - stopping process
goroutine 435 [running]:
runtime/debug.Stack(0xc000f75bf8, 0xdcee00, 0x18cb9a0)
/home/couchbase/jenkins/workspace/sgw-unix-build/go1.13.4/src/runtime/debug/stack.go:24 +0x9d
github.com/couchbase/sync_gateway/base.FatalPanicHandler()
/home/couchbase/jenkins/workspace/sgw-unix-build/2.8.0/enterprise/godeps/src/github.com/couchbase/sync_gateway/base/util.go:1293 +0x57
2019-11-26T13:23:29.636-08:00 [INF] DCP: janitor: awakes, op: kick, msg: start
2019-11-26T13:23:29.637-08:00 [INF] DCP: janitor: JanitorOnce, err: janitor: skipped on nil planPIndexes
2019-11-26T13:23:29.705-08:00 [INF] DCP: janitor: awakes, op: kick, msg: the plans have changed
2019-11-26T13:23:29.707-08:00 [INF] DCP: janitor: pindexes to remove: 0
2019-11-26T13:23:29.707-08:00 [INF] DCP: janitor: pindexes to add: 16
2019-11-26T13:23:29.707-08:00 [INF] DCP: janitor: pindexes to restart: 0
2019-11-26T13:23:29.710-08:00 [INF] DCP: janitor: feeds to remove: 0
2019-11-26T13:23:29.711-08:00 [INF] DCP: janitor: feeds to add: 16
2019-11-26T13:23:29.716-08:00 [INF] DCP: janitor: awakes, op: kick, msg: cfg changed, key: planPIndexes
2019-11-26T13:23:29.718-08:00 [INF] DCP: janitor: pindexes to remove: 0
package main
import (
"encoding/json"
"fmt"
"io"
"log"
"strings"
"testing"
"time"
Create user
http --verbose PUT localhost:4985/default/_user/adam name=adam password=1234
Put a few docs as user
http --verbose -a adam:1234 PUT localhost:4984/default/doc1 channels:='["channel1", "channel2"]'
http --verbose -a adam:1234 PUT localhost:4984/default/doc2 channels:='["channel2"]'
Put a few docs as admin
http --verbose PUT localhost:4985/default/doc3 channels:='["channeladmin"]'
#!/usr/bin/env bash
set
# Abort on errors
set -e
# Output all executed shell commands
set -x
@adamcfraser
adamcfraser / bulk_get_1.txt
Last active May 28, 2018 18:36
bulk get traces
POST /db/_bulk_get?revs=true HTTP/1.1
Host: 192.168.33.25:4985
Accept-Encoding: identity
Content-Type: application/json
Content-Length: 34935
{"docs": [{"id": "ls_db_upgrade_doc_0_0"}, {"id": "ls_db_upgrade_doc_0_1"}, {"id": "ls_db_upgrade_doc_0_2"}, {"id": "ls_db_upgrade_doc_0_3"}, {"id": "ls_db_upgrade_doc_0_4"}, {"id": "ls_db_upgrade_doc_0_5"}, {"id": "ls_db_upgrade_doc_0_6"}, {"id": "ls_db_upgrade_doc_0_7"}, {"id": "ls_db_upgrade_doc_0_8"}, {"id": "ls_db_upgrade_doc_0_9"}, {"id": "ls_db_upgrade_doc_0_10"}, {"id": "ls_db_upgrade_doc_0_11"}, {"id": "ls_db_upgrade_doc_0_12"}, {"id": "ls_db_upgrade_doc_0_13"}, {"id": "ls_db_upgrade_doc_0_14"}, {"id": "ls_db_upgrade_doc_0_15"}, {"id": "ls_db_upgrade_doc_0_16"}, {"id": "ls_db_upgrade_doc_0_17"}, {"id": "ls_db_upgrade_doc_0_18"}, {"id": "ls_db_upgrade_doc_0_19"}, {"id": "ls_db_upgrade_doc_0_20"}, {"id": "ls_db_upgrade_doc_0_21"}, {"id": "ls_db_upgrade_doc_0_22"}, {"id": "ls_db_upgrade_doc_0_23"}, {"id": "ls_db_upgrade_doc_0_24"}, {"id": "ls_db_upgrade_doc_0_25"
// Connect to a memcached server.
func Connect(prot, dest string) (rv *Client, err error) {
conn, err := dialFun(prot, dest)
if err != nil {
return nil, err
}
tcpConn, isTcpConn := conn.(*net.TCPConn)
if !isTcpConn {
log.Printf("Not TCP Conn: %T", conn)
Adams-MacBook-Pro-2:bin adam$ ./sgload gateload --createreaders --createwriters --numreaders 100 --numwriters 100 --numupdaters 100 --numrevsperdoc 2 --numdocs 1000 --numchannels 10 --batchsize 100 --loglevel info --sg-url http://localhost:4984/sg_bucket/
INFO[01-04|22:46:56] Attempting to expose expvars port=9876
INFO[01-04|22:46:56] Setting loglevel level=info
INFO[01-04|22:46:56] Running gateload scenario gateLoadSpec="{LoadSpec:{SyncGatewayUrl:http://localhost:4984/sg_bucket/ SyncGatewayAdminPort:4985 MockDataStore:false StatsdEnabled:false StatsdEndpoint:localhost:8125 TestSessionID:1a1c2b9a-d789-4016-b2b4-b1ae4ff6c5ee BatchSize:100 NumChannels:10 DocSizeBytes:1024 NumDocs:1000 CompressionEnabled:false ExpvarProgressEnabled:false LogLevel:info} WriteLoadSpec:{LoadSpec:{SyncGatewayUrl:http://localhost:4984/sg_bucket/ SyncGatewayAdminPort:4985 MockDataStore:false StatsdEnabled:false StatsdEndpoint:localhost:8125 TestSessionID:1a1c2b9a-d789-4016-b2b4-b1ae4f
@adamcfraser
adamcfraser / console_test.txt
Last active October 31, 2016 22:02
Targeting Sync Gateway cookie to path
// Set a Sync Gateway session cookies targeting specific paths
cookie1String = "SyncGatewaySession=3cad4b95524179bf144fe0d92b8f09877bb86bf5;path=/db1/";
cookie2String = "SyncGatewaySession=ead9488883b4e48f3f6140da212ffb45cdcd3e81;path=/db2/";
document.cookie = cookie1String;
document.cookie = cookie2String;
// Attempt to connect to db1.
var db1Req = new XMLHttpRequest();
db1Req.open("GET", "http://localhost:4984/db1/",false);
db1Req.send();
@adamcfraser
adamcfraser / shadowWithEventsConfig.json
Created August 30, 2016 19:51
SG config with shadowing, webhook
{
"interface":":4984",
"adminInterface":":4985",
"databases": {
"sgbucket": {
"server": "http://localhost:8091",
"bucket": "sgbucket",
"shadow": {
"server": "http://localhost:8091",
"bucket": "sourcebucket"