View code.rb
codes = Set[] | |
while codes.size != num | |
code = (0...6).map { '1234567890qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM'.chars[rand(62)] }.join | |
next if codes.add?(code).nil? | |
end |
View delete_all_rows.go
func deleteAllRows(ctx context.Context, db *spanner.Client, tableName string) { | |
stmt := spanner.Statement{ | |
SQL: "DELETE FROM `" + tableName + "` WHERE true", | |
} | |
_, err := db.PartitionedUpdate(ctx, stmt) | |
if err != nil { | |
panic(err) | |
} | |
return |
View reverse_bit.go
func ReverseBit(num uint64) uint64 { | |
return bits.Reverse64(num) | |
} |
View spanner_delete_all_rows_by_multi_pk.go
func deleteAllRowsByMultiPK(ctx context.Context, db *spanner.Client, tableName string, primaryFirstKeyName string, primarySecondKeyName string) { | |
var primaryKeys []*spanner.Mutation | |
stmt := spanner.Statement{SQL: "SELECT `" + primaryFirstKeyName + "`, `" + primarySecondKeyName + "` FROM `" + tableName + "`"} | |
iter := db.Single().Query(ctx, stmt) | |
defer iter.Stop() | |
for { | |
row, err := iter.Next() | |
if err == iterator.Done { | |
break |
View spanner_delete_all_rows.go
func deleteAllRows(ctx context.Context, db *spanner.Client, tableName string, primaryKeyName string) { | |
var primaryKeys []string | |
stmt := spanner.Statement{SQL: `SELECT ` + primaryKeyName + ` FROM ` + tableName} | |
iter := db.Single().Query(ctx, stmt) | |
defer iter.Stop() | |
for { | |
row, err := iter.Next() | |
if err == iterator.Done { | |
break |
View more_csutom.py
import requests | |
import re | |
import json | |
from airflow import DAG | |
from airflow.contrib.sensors.gcs_sensor import GoogleCloudStoragePrefixSensor | |
from airflow.contrib.operators.gcs_to_bq import GoogleCloudStorageToBigQueryOperator | |
from airflow.exceptions import AirflowException | |
from airflow.hooks.http_hook import HttpHook | |
from airflow.operators.http_operator import SimpleHttpOperator |
View postprocessing.py
import requests | |
from airflow import DAG | |
from airflow.contrib.sensors.gcs_sensor import GoogleCloudStoragePrefixSensor | |
from airflow.contrib.operators.gcs_to_bq import GoogleCloudStorageToBigQueryOperator | |
from airflow.exceptions import AirflowException | |
from airflow.hooks.http_hook import HttpHook | |
from airflow.operators.http_operator import SimpleHttpOperator | |
from airflow.operators.python_operator import PythonOperator | |
from airflow.utils.dates import days_ago |
View import_bq.py
import requests | |
from airflow import DAG | |
from airflow.contrib.sensors.gcs_sensor import GoogleCloudStoragePrefixSensor | |
from airflow.contrib.operators.gcs_to_bq import GoogleCloudStorageToBigQueryOperator | |
from airflow.exceptions import AirflowException | |
from airflow.hooks.http_hook import HttpHook | |
from airflow.operators.http_operator import SimpleHttpOperator | |
from airflow.utils.dates import days_ago | |
from datetime import timedelta, datetime |
View preprocessing.py
import requests | |
from airflow import DAG | |
from airflow.contrib.sensors.gcs_sensor import GoogleCloudStoragePrefixSensor | |
from airflow.exceptions import AirflowException | |
from airflow.hooks.http_hook import HttpHook | |
from airflow.operators.http_operator import SimpleHttpOperator | |
from airflow.utils.dates import days_ago | |
from datetime import timedelta, datetime |
View csv_sensor.py
from airflow import DAG | |
from airflow.contrib.sensors.gcs_sensor import GoogleCloudStoragePrefixSensor | |
from airflow.utils.dates import days_ago | |
from datetime import timedelta, datetime | |
dag = DAG( | |
'inference_pipeline', # DAGの名称 | |
default_args={ | |
'start_date': days_ago(1), # DAGがスケジューリングされる日付を指定 | |
'retries': 1, # タスク失敗時のリトライ回数の指定 |
NewerOlder