Skip to content

Instantly share code, notes, and snippets.

@u110
Last active February 3, 2020 07:07
Show Gist options
  • Save u110/a58ddf63f9e9e3ae5a54b6809bd96a34 to your computer and use it in GitHub Desktop.
Save u110/a58ddf63f9e9e3ae5a54b6809bd96a34 to your computer and use it in GitHub Desktop.
package main
import (
"cloud.google.com/go/bigquery"
"context"
"fmt"
"google.golang.org/api/option"
)
func showBqDatasets() {
ctx := context.Background()
// Sets your Google Cloud Platform project ID.
projectID := "hogehoge"
vec := []byte(`
{
"type": "service_account",
"project_id": "hogehoge",
....
}`)
api_key := option.WithCredentialsJSON(vec)
// Creates a client.
client, err := bigquery.NewClient(ctx, projectID, api_key)
if err != nil {
fmt.Printf("Failed to create client: %v\n", err)
}
// Show BigQuery dataset.
it := client.Datasets(ctx)
for {
ds, err := it.Next()
if err != nil {
fmt.Println(err)
break
} else {
fmt.Println(ds.DatasetID)
}
}
}
func main() {
fmt.Println("start")
showBqDatasets()
}
package main
import (
"cloud.google.com/go/bigquery"
"context"
"fmt"
"google.golang.org/api/option"
"os"
"time"
)
// GetClient get bq client
func GetClient() *bigquery.Client {
ctx := context.Background()
// Sets your Google Cloud Platform project ID.
projectID := "*******************"
vec := []byte(`
{
"type": "service_account",
...
}`)
apiKey := option.WithCredentialsJSON(vec)
// Creates a client.
client, err := bigquery.NewClient(ctx, projectID, apiKey)
if err != nil {
message := fmt.Sprint("Failed to create client: %v\n", err)
panic(message)
}
return client
}
func showBqTables() {
ctx := context.Background()
client := GetClient()
// Creates the new BigQuery dataset.
it := client.Datasets(ctx)
for {
ds, err := it.Next()
if err != nil {
fmt.Println(err)
break
} else {
fmt.Println(ds.DatasetID)
}
}
}
func main() {
fmt.Println("start")
// showBqTables()
ctx := context.Background()
client := GetClient()
// schema1 := bigquery.Schema{
// {Name: "Name", Required: true, Type: bigquery.StringFieldType},
// {Name: "Grades", Repeated: true, Type: bigquery.IntegerFieldType},
// {Name: "Optional", Required: false, Type: bigquery.IntegerFieldType},
// }
// open local file
fileRef, err := os.Open("./test.csv")
defer fileRef.Close()
if err != nil {
fmt.Println(err)
return
}
// setup local csv file
srcRef := bigquery.NewReaderSource(fileRef)
// srcRef.CSVOptions = bigquery.CSVOptions{
// SkipLeadingRows: 0, // TODO:(u110) fix
// }
srcRef.FileConfig.IgnoreUnknownValues = true
srcRef.FileConfig.SourceFormat = bigquery.CSV
// 1srcRef.FileConfig.AutoDetect = true
schema := bigquery.Schema{}
schema = append(schema,
&bigquery.FieldSchema{Name: "name", Type: bigquery.StringFieldType},
&bigquery.FieldSchema{Name: "col1", Type: bigquery.StringFieldType},
&bigquery.FieldSchema{Name: "col2", Type: bigquery.StringFieldType},
&bigquery.FieldSchema{Name: "col3", Type: bigquery.FloatFieldType},
)
srcRef.FileConfig.Schema = schema
// upload csv file
current := time.Now().Format("20060102")
targetTable := "from_csv$" + current
fmt.Println("targetTable:", targetTable)
loader := client.Dataset("test_yuuito").Table(targetTable).LoaderFrom(srcRef)
loader.SchemaUpdateOptions = []string{"ALLOW_FIELD_ADDITION"}
loader.LoadConfig.TimePartitioning = &bigquery.TimePartitioning{}
loader.LoadConfig.CreateDisposition = bigquery.CreateIfNeeded
loader.LoadConfig.WriteDisposition = bigquery.WriteAppend
job, err := loader.Run(ctx)
if err != nil {
fmt.Println("bq job run error")
fmt.Printf("job: %v\n", job)
fmt.Println(err)
return
}
jobID := job.ID()
fmt.Printf("The job ID is %s\n", jobID)
status, err := job.Wait(ctx)
if err != nil {
fmt.Println("bq job wait error")
fmt.Printf("job: %v\n", job)
fmt.Println(err)
return
}
if status.Err() != nil {
fmt.Println("bq job status error")
fmt.Println("errors:")
for _, e := range status.Errors {
fmt.Println(e.Message)
}
return
}
fmt.Println("end")
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment