Skip to content

Instantly share code, notes, and snippets.

@dchapes
Last active February 14, 2020 17:16
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save dchapes/6e0932cf17e9d479ae65f17acbd32db6 to your computer and use it in GitHub Desktop.
Save dchapes/6e0932cf17e9d479ae65f17acbd32db6 to your computer and use it in GitHub Desktop.
reddit help wrt Sprintf

From https://old.reddit.com/r/golang/comments/f3akld/alternative_to_fmtsprintf/ and https://gist.github.com/leearmstrong/f0110cf486068a962abb78089c72fa9c

For me benchmark output was:

BenchmarkSample/sample2         	     666	    953517 ns/op	  200211 B/op	   10012 allocs/op
BenchmarkSample/sample3         	     938	    666290 ns/op	   40032 B/op	    5006 allocs/op
BenchmarkSample/sample4         	    1699	    367997 ns/op	  159962 B/op	    5005 allocs/op
BenchmarkSample/sample5         	    3145	    190992 ns/op	      24 B/op	       4 allocs/op

Thats differences of:

benchmark                       old ns/op     new ns/op     delta
BenchmarkSample/sample{2,3}     953517        666290        -30.12%
BenchmarkSample/sample{3,4}     666290        367997        -44.77%
BenchmarkSample/sample{4,5}     367997        190992        -48.10%
BenchmarkSample/sample{2,5}     953517        190992        -79.97%

benchmark                       old allocs     new allocs     delta
BenchmarkSample/sample{2,3}     10012          5006           -50.00%
BenchmarkSample/sample{3,4}     5006           5005           -0.02%
BenchmarkSample/sample{4,5}     5005           4              -99.92%
BenchmarkSample/sample{2,5}     10012          4              -99.96%

benchmark                       old bytes     new bytes     delta
BenchmarkSample/sample{2,3}     200211        40032         -80.01%
BenchmarkSample/sample{3,4}     40032         159962        +299.59%
BenchmarkSample/sample{4,5}     159962        24            -99.98%
BenchmarkSample/sample{2,5}     200211        24            -99.99%
package sample
import (
"fmt"
"log"
"os"
)
func GenerateESRIASC(data *input) {
f, err := os.Create("/tmp/esri.asc")
if err != nil {
fmt.Println(err)
return
}
defer f.Close()
yBottom := (data.FirstRowStartLine - data.IntervalBetweenColumns*(float32(data.NumberOfRowsInField)-1.0))
f.WriteString(fmt.Sprintf("ncols %d\n", data.NumberOfColumsInField))
f.WriteString(fmt.Sprintf("nrows %d\n", data.NumberOfRowsInField))
f.WriteString(fmt.Sprintf("xllcenter %f\n", data.FirstPointFirstRow))
f.WriteString(fmt.Sprintf("yllcenter %f\n", yBottom))
f.WriteString(fmt.Sprintf("cellsize %f\n", data.IntervalBetweenColumns))
f.WriteString(fmt.Sprintf("nodata_value %f\n", data.Float32MissingDataValue))
log.Println(data.NumberOfRowsInField, "rows")
log.Println(data.NumberOfColumsInField, "columns")
log.Println(data.IntervalBetweenColumns, "pixel size")
numRows := int(data.NumberOfRowsInField)
numCols := int(data.NumberOfColumsInField)
for i := 0; i < numRows; i++ {
for j := 0; j < numCols-1; j++ {
f.WriteString(fmt.Sprintf("%d ", data.DataBlock[i*numCols+j]))
}
f.WriteString(fmt.Sprintf("%d\n", data.DataBlock[i*numCols+numCols-1]))
}
}
package sample
import (
"bufio"
"fmt"
"io"
)
// WARNING, WARNING, WARNING
//
// None of these sample# functions return errors,
// they should be checking for write errors in all cases.
// However, especially if using bufio, errors don't need
// to checked on each individial write (bufio will remember
// previous errors and return them each time; it'd be reasonable
// to expect success and just return errors from Flush).
func sample2(ww io.Writer, data *input) {
// ick, just use bufio and be done with it
// This does avoid buffering when unneeded though.
w, ok := ww.(interface {
io.Writer
io.StringWriter
})
if !ok {
bw := bufio.NewWriter(ww)
defer bw.Flush()
w = bw
}
yBottom := (data.FirstRowStartLine - data.IntervalBetweenColumns*(float32(data.NumberOfRowsInField)-1.0))
w.WriteString(fmt.Sprintf("ncols %d\n", data.NumberOfColumsInField))
w.WriteString(fmt.Sprintf("nrows %d\n", data.NumberOfRowsInField))
w.WriteString(fmt.Sprintf("xllcenter %f\n", data.FirstPointFirstRow))
w.WriteString(fmt.Sprintf("yllcenter %f\n", yBottom))
w.WriteString(fmt.Sprintf("cellsize %f\n", data.IntervalBetweenColumns))
w.WriteString(fmt.Sprintf("nodata_value %f\n", data.Float32MissingDataValue))
numRows := int(data.NumberOfRowsInField)
numCols := int(data.NumberOfColumsInField)
for i := 0; i < numRows; i++ {
for j := 0; j < numCols-1; j++ {
w.WriteString(fmt.Sprintf("%d ", data.DataBlock[i*numCols+j]))
}
w.WriteString(fmt.Sprintf("%d\n", data.DataBlock[i*numCols+numCols-1]))
}
}
package sample
import (
"fmt"
"io"
)
func sample3(w io.Writer, data *input) {
//w := bufio.NewWriter(ww)
//defer w.Flush()
yBottom := (data.FirstRowStartLine - data.IntervalBetweenColumns*(float32(data.NumberOfRowsInField)-1.0))
fmt.Fprintf(w, "ncols %d\n", data.NumberOfColumsInField)
fmt.Fprintf(w, "nrows %d\n", data.NumberOfRowsInField)
fmt.Fprintf(w, "xllcenter %f\n", data.FirstPointFirstRow)
fmt.Fprintf(w, "yllcenter %f\n", yBottom)
fmt.Fprintf(w, "cellsize %f\n", data.IntervalBetweenColumns)
fmt.Fprintf(w, "nodata_value %f\n", data.Float32MissingDataValue)
numRows := int(data.NumberOfRowsInField)
numCols := int(data.NumberOfColumsInField)
for i := 0; i < numRows; i++ {
for j := 0; j < numCols-1; j++ {
fmt.Fprintf(w, "%d ", data.DataBlock[i*numCols+j])
}
fmt.Fprintf(w, "%d\n", data.DataBlock[i*numCols+numCols-1])
}
}
package sample
import (
"bufio"
"fmt"
"io"
"strconv"
)
func sample4(ww io.Writer, data *input) {
// ick, just use bufio and be done with it
// This does avoid buffering when unneeded though.
w, ok := ww.(interface {
io.Writer
io.StringWriter
io.ByteWriter
})
if !ok {
bw := bufio.NewWriter(ww)
defer bw.Flush()
w = bw
}
yBottom := (data.FirstRowStartLine - data.IntervalBetweenColumns*(float32(data.NumberOfRowsInField)-1.0))
w.WriteString("ncols ")
w.WriteString(strconv.FormatInt(int64(data.NumberOfColumsInField), 10))
w.WriteString("\nnrows ")
w.WriteString(strconv.FormatInt(int64(data.NumberOfRowsInField), 10))
/*
// XXX can cause tiny output differences
// e.g. 0.6645601 instead of
// of 0.664560
w.WriteString("\nxllcenter ")
w.WriteString(strconv.FormatFloat(float64(data.FirstPointFirstRow), 'f', -1, 32))
*/
fmt.Fprintf(w, "\nxllcenter %f", data.FirstPointFirstRow)
fmt.Fprintf(w, "\nyllcenter %f", yBottom)
fmt.Fprintf(w, "\ncellsize %f", data.IntervalBetweenColumns)
fmt.Fprintf(w, "\nnodata_value %f\n", data.Float32MissingDataValue)
numRows := int(data.NumberOfRowsInField)
numCols := int(data.NumberOfColumsInField)
for i := 0; i < numRows; i++ {
for j := 0; j < numCols-1; j++ {
w.WriteString(strconv.FormatInt(int64(data.DataBlock[i*numCols+j]), 10))
w.WriteByte(' ')
}
w.WriteString(strconv.FormatInt(int64(data.DataBlock[i*numCols+numCols-1]), 10))
w.WriteByte('\n')
}
}
package sample
import (
"fmt"
"io"
"strconv"
)
func sample5(w io.Writer, data *input) {
//w := bufio.NewWriter(ww)
//defer w.Flush()
yBottom := (data.FirstRowStartLine - data.IntervalBetweenColumns*(float32(data.NumberOfRowsInField)-1.0))
buf := make([]byte, 0, 2000)
// Could also use w.WriteString or io.WriteString instead of appending strings
buf = append(buf, "ncols "...)
buf = strconv.AppendInt(buf, int64(data.NumberOfColumsInField), 10)
buf = append(buf, "\nnrows "...)
buf = strconv.AppendInt(buf, int64(data.NumberOfRowsInField), 10)
w.Write(buf)
/*
// XXX can cause tiny output differences
// e.g. 0.6645601 instead of
// of 0.664560
w.WriteString("\nxllcenter ")
buf = strconv.AppendFloat(buf[:0], float64(data.FirstPointFirstRow), 'f', -1, 32))
w.Write(buf)
*/
fmt.Fprintf(w, "\nxllcenter %f", data.FirstPointFirstRow)
fmt.Fprintf(w, "\nyllcenter %f", yBottom)
fmt.Fprintf(w, "\ncellsize %f", data.IntervalBetweenColumns)
fmt.Fprintf(w, "\nnodata_value %f\n", data.Float32MissingDataValue)
numRows := int(data.NumberOfRowsInField)
numCols := int(data.NumberOfColumsInField)
for i := 0; i < numRows; i++ {
// Could only buffer partial lines or just individual values if lines
// are expected to be very very long.
buf = buf[:0]
for j := 0; j < numCols-1; j++ {
buf = strconv.AppendInt(buf, int64(data.DataBlock[i*numCols+j]), 10)
buf = append(buf, ' ')
}
buf = strconv.AppendInt(buf, int64(data.DataBlock[i*numCols+numCols-1]), 10)
buf = append(buf, '\n')
w.Write(buf)
}
}
package sample
import (
"bytes"
"crypto/sha1"
"io"
"io/ioutil"
"math"
"math/rand"
"testing"
)
// guessed at types
type input struct {
NumberOfRowsInField int32
NumberOfColumsInField int32 // sic: Columns
FirstRowStartLine float32
IntervalBetweenColumns float32
FirstPointFirstRow float32
Float32MissingDataValue float64
DataBlock []int64 // len = NumberOfRowsInField * NumberOfColumsInField
}
var testInput = func() *input {
// For repeatability used a fixed seed
rng := rand.New(rand.NewSource(1))
const N = 50
in := &input{
NumberOfRowsInField: N,
NumberOfColumsInField: N * 2,
FirstRowStartLine: rng.Float32(),
IntervalBetweenColumns: rng.Float32(),
FirstPointFirstRow: rng.Float32(),
Float32MissingDataValue: math.Inf(1),
}
in.DataBlock = make([]int64, in.NumberOfRowsInField*in.NumberOfColumsInField)
for i := range in.DataBlock {
in.DataBlock[i] = rng.Int63()
}
return in
}()
const expectedSize = 1e5
var testcases = []struct {
name string
fn func(io.Writer, *input)
}{
{"sample2", sample2},
{"sample3", sample3},
{"sample4", sample4},
{"sample5", sample5},
}
// TestSample just checkts that all the functions return the same exact buffer contents.
func TestSample(t *testing.T) {
h := sha1.New()
var hashed, expected []byte
for _, tc := range testcases {
h.Reset()
tc.fn(h, testInput)
hashed = h.Sum(hashed[:0])
t.Logf("%s hash: %X", tc.name, hashed)
if expected == nil {
expected = hashed
hashed = hashed[:0:0] // to force re-allocation
} else if !bytes.Equal(hashed, expected) {
t.Errorf("%s hash don't match", tc.name)
}
}
}
func BenchmarkSample(b *testing.B) {
var buf bytes.Buffer
// We don't want output buffer re-allocates to count
// so pre-grow the buffer.
buf.Grow(expectedSize)
for _, tc := range testcases {
b.Run("buf/"+tc.name, func(b *testing.B) {
b.ReportAllocs()
for i := 0; i < b.N; i++ {
buf.Reset()
tc.fn(&buf, testInput)
}
})
}
for _, tc := range testcases {
b.Run("discard/"+tc.name, func(b *testing.B) {
b.ReportAllocs()
for i := 0; i < b.N; i++ {
tc.fn(ioutil.Discard, testInput)
}
})
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment