gs://cloud-ml-data/img/flower_photos/dandelion/17388674711_6dca8a2e8b_n.jpg,dandelion gs://cloud-ml-data/img/flower_photos/sunflowers/9555824387_32b151e9b0_m.jpg,sunflowers gs://cloud-ml-data/img/flower_photos/daisy/14523675369_97c31d0b5b.jpg,daisy gs://cloud-ml-data/img/flower_photos/roses/512578026_f6e6f2ad26.jpg,roses gs://cloud-ml-data/img/flower_photos/tulips/497305666_b5d4348826_n.jpg,tulips...
daisy
0
tulips
4
daisy dandelion roses sunflowers tulips
--output_path
# Assign appropriate values. PROJECT=$(gcloud config list project --format "value(core.project)") JOB_ID="flowers_${USER}_$(date +%Y%m%d_%H%M%S)" BUCKET="gs://${PROJECT}-ml" GCS_PATH="${BUCKET}/${USER}/${JOB_ID}" DICT_FILE=gs://cloud-ml-data/img/flower_photos/dict.txt # Preprocess the eval set. python trainer/preprocess.py \ --input_dict "$DICT_FILE" \ --input_path "gs://cloud-ml-data/img/flower_photos/eval_set.csv" \ --output_path "${GCS_PATH}/preproc/eval" \ --cloud # Preprocess the train set. python trainer/preprocess.py \ --input_dict "$DICT_FILE" \ --input_path "gs://cloud-ml-data/img/flower_photos/train_set.csv" \ --output_path "${GCS_PATH}/preproc/train" \ --cloud
# Submit training job. gcloud beta ml jobs submit training "$JOB_ID" \ --module-name trainer.task \ --package-path trainer \ --staging-bucket "$BUCKET" \ --region us-central1 \ -- \ --output_path "${GCS_PATH}/training" \ --eval_data_paths "${GCS_PATH}/preproc/eval*" \ --train_data_paths "${GCS_PATH}/preproc/train*" # Monitor training logs. gcloud beta ml jobs stream-logs "$JOB_ID"
MODEL_NAME= VERSION_NAME=v1 # for example gcloud beta ml models create ${MODEL_NAME} gcloud beta ml versions create \ --origin ${GCS_PATH}/training/model/ \ --model ${MODEL_NAME} \ ${VERSION_NAME} gcloud beta ml versions set-default --model ${MODEL_NAME} ${VERSION_NAME}
# Copy the image to local disk. gsutil cp gs://cloud-ml-data/img/flower_photos/tulips/4520577328_a94c11e806_n.jpg flower.jpg # Create request message in json format. python -c 'import base64, sys, json; img = base64.b64encode(open(sys.argv[1], "rb").read()); print json.dumps({"key":"0", "image_bytes": {"b64": img}})' flower.jpg &> request.json # Call prediction service API to get classifications gcloud beta ml predict --model ${MODEL_NAME} --json-instances request.json
predictions: - key: '0' prediction: 4 scores: - 8.11998e-09 - 2.64907e-08 - 1.10307e-06 - 3.69488e-11 - 0.999999 - 3.35913e-09
tulip
0.99
def addRequest(self, r): HARD_QUOTA = 45 SOFT_QUOTA = 25 STEPS = 10 divisor = (HARD_QUOTA - SOFT_QUOTA) / STEPS self.received += 1 self.req_modulus = (self.req_modulus + 1) % STEPS # Are we overloaded? load = self.getLoad() # Become progressively more likely to reject requests # once load > soft quota; reject everything once load # hits hard limit. threshold = int((HARD_QUOTA - load) / divisor) if self.req_modulus < threshold: # We're not too loaded self.active_requests.append(r) self.accepted += 1 else: self.rejected += 1
「Google Container Engine は、私たちが Docker コンテナの管理やオーケストレーションを行ううえで必要としているオープン性、安定性、スケーラビリティを提供してくれます。今年、私たちのお客様のところで、ブラック フライデー(米国における感謝祭翌日の金曜日。年末商戦の初日)とサイバー マンデー(感謝祭翌週の月曜日。ネット ショッピングが盛況)の間にサービスの停止やダウンタイム、中断はまったく発生しませんでした。Google Container Engine はそれに一役買っています」 - GroupBy の最高技術責任者(CTO)、Will Warren 氏
$ base64 -i ~/path/to/downloads/credentials.json
‘BASE64_CREDENTIAL_STRING’
apiVersion: v1 kind: Secret metadata: name: google-services-secret type: Opaque data: google-services.json: BASE64_CREDENTIAL_STRING
$ oc create -f google-secret.yaml
“GOOGLE_APPLICATION_CREDENTIALS”
pubsub/bigquery-controller.yaml
pubsub/twitter-stream.yaml
apiVersion: v1 kind: ReplicationController metadata: name: bigquery-controller labels: name: bigquery-controller spec: containers: … env: … - name: GOOGLE_APPLICATION_CREDENTIALS value: /etc/secretspath/google-services.json volumeMounts: - name: secrets mountPath: /etc/secretspath readOnly: true volumes: - name: secrets secret: secretName: google-services-secret
# Fire up PowerShell. powershell # Import the Cloud Tools for PowerShell module on OS X. PS > Import-Module ~/Downloads/osx.10.11-x64/Google.PowerShell.dll # List all of the images in a GCS bucket. Get-GcsObject -Bucket "quoct-photos" | Select Name, Size | Format-Table