HP BL460
- 48 Gb Memory
- 2 x X5675 @ 3.07GHz
- 2 x 10 gbps NIC
- 2tb NetApp NFS volume for ES data
apiVersion: "serving.kubeflow.org/v1alpha2" | |
kind: "KFService" | |
metadata: | |
name: "mnist-s3" | |
spec: | |
default: | |
predictor: | |
serviceAccountName: kfserving-sa | |
tensorflow: | |
storageUri: "s3://kfserving-eks-demo/flowers" |
apiVersion: v1 | |
kind: Secret | |
metadata: | |
name: mysecret | |
annotations: | |
serving.kubeflow.org/s3-endpoint: s3.us-west-2.amazonaws.com | |
serving.kubeflow.org/s3-usehttps: "1" | |
serving.kubeflow.org/s3-verifyssl: "1" | |
serving.kubeflow.org/s3-region: us-west-2 | |
type: Opaque |
package main | |
import ( | |
"github.com/aws/aws-sdk-go/aws" | |
"github.com/aws/aws-sdk-go/aws/session" | |
"net/http" | |
"net/url" | |
) |
#!/public/spark-0.9.1/bin/pyspark | |
import os | |
import sys | |
# Set the path for spark installation | |
# this is the path where you have built spark using sbt/sbt assembly | |
os.environ['SPARK_HOME'] = "/public/spark-0.9.1" | |
# os.environ['SPARK_HOME'] = "/home/jie/d2/spark-0.9.1" | |
# Append to PYTHONPATH so that pyspark could be found |
# Delete the possibly existing autocomplete test index | |
curl -X DELETE localhost:9200/autocomplete_test | |
# Put the config of the autocomplete index | |
curl -X PUT localhost:9200/autocomplete_test -d ' | |
{ | |
"settings" : { | |
"index" : { | |
"analysis" : { | |
"analyzer" : { |
# ======================================== | |
# Testing n-gram analysis in ElasticSearch | |
# ======================================== | |
curl -X DELETE localhost:9200/ngram_test | |
curl -X PUT localhost:9200/ngram_test -d ' | |
{ | |
"settings" : { | |
"index" : { | |
"analysis" : { |