Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ sudo mkdir -p /data && sudo mount /dev/nvme0n1 /data
```
### Copies data from your GCS bucket created earlier to the local drive.
sudo mkdir -p /data/imagenet && sudo chmod -R 777 /data
gsutil -m cp -r gs://<your bucket with imagenet>/imagenet/* /data/imagenet/
gcloud storage cp --recursive gs://<your bucket with imagenet>/imagenet/* /data/imagenet/
```

4. Install TensorFlow 1.12 compiled with CUDA 10.0, cuDNN 7.3, and AVX2.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ export USER=weicheng # Your user name.
export STORAGE_BUCKET=gs://${USER}-data

# Create storage bucket.
gsutil mb $STORAGE_BUCKET
gcloud storage buckets create $STORAGE_BUCKET

# Download COCO data.
mkdir ~/data
Expand All @@ -66,16 +66,16 @@ bash download_and_preprocess_coco.sh ~/data/coco
# Create coco directory under the bucket.
mkdir coco
touch coco/empty.txt
gsutil cp -r coco $STORAGE_BUCKET
gcloud storage cp --recursive coco $STORAGE_BUCKET

# Move data over to bucket.
gsutil -m cp data/coco/*.tfrecord gs://${USER}-data/coco
gsutil -m cp data/coco/raw-data/annotations/*.json gs://${USER}-data/coco
gcloud storage cp data/coco/*.tfrecord gs://${USER}-data/coco
gcloud storage cp data/coco/raw-data/annotations/*.json gs://${USER}-data/coco

# Create shapemask directory under the bucket.
mkdir shapemask_exp
touch shapemask_exp/empty.txt
gsutil cp -r shapemask_exp gs://${USER}-data/
gcloud storage cp --recursive shapemask_exp gs://${USER}-data/

# Back to home directory.
cd ~
Expand Down
2 changes: 1 addition & 1 deletion models/experimental/inference/setup-pool.sh
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ function check_tpu_tf_versions() {
}

function check_model_exists() {
num_versions=$(expr $(gsutil ls ${MODEL_BASE_PATH?} | wc | awk '{print $1}')-1)
num_versions=$(expr $(gcloud storage ls ${MODEL_BASE_PATH?} | wc | awk '{print $1}')-1)
if [[ $num_versions < 1 ]]; then
err "The MODEL_BASE_PATH provided is not valid."
fi
Expand Down
6 changes: 3 additions & 3 deletions models/experimental/mnist_jupyter/Cloud-TPU-Demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -108,10 +108,10 @@
"metadata": {},
"outputs": [],
"source": [
"!gsutil mb -c regional -l us-central1 gs://$GCS_DATA_BUCKET\n",
"!gsutil mb -c regional -l us-central1 gs://$GCS_CKPT_BUCKET\n",
"!gcloud storage buckets create --default-storage-class=regional --location=us-central1 gs://$GCS_DATA_BUCKET\n",
"!gcloud storage buckets create --default-storage-class=regional --location=us-central1 gs://$GCS_CKPT_BUCKET\n",
"\n",
"!gsutil iam ch serviceAccount:`gcloud alpha compute tpus describe $TPU_NAME | grep serviceAccount | cut -d' ' -f2`:admin gs://$GCS_DATA_BUCKET gs://$GCS_CKPT_BUCKET && echo 'Successfully set permissions!'"
"!gcloud storage buckets add-iam-policy-binding gs://$GCS_DATA_BUCKET --member=serviceAccount:`gcloud alpha compute tpus describe $TPU_NAME | grep serviceAccount | cut -d' ' -f2` --role=admin && gcloud storage buckets add-iam-policy-binding gs://$GCS_CKPT_BUCKET --member=serviceAccount:`gcloud alpha compute tpus describe $TPU_NAME | grep serviceAccount | cut -d' ' -f2` --role=admin && echo 'Successfully set permissions!'"
]
},
{
Expand Down
16 changes: 5 additions & 11 deletions models/official/detection/projects/openseg/OpenSeg_demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -204,10 +204,8 @@
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model/graph_def.txt...\n",
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model/saved_model.pb...\n",
"-\n",
"==\u003e NOTE: You are performing a sequence of gsutil operations that may\n",
"run significantly faster if you instead use gsutil -m cp ... Please\n",
"see the -m section under \"gsutil help options\" for further information\n",
"about when gsutil -m can be advantageous.\n",
"==\u003e NOTE: You are performing a sequence of gcloud storage operations that may\n",
"run significantly faster if you instead use gcloud storage cp ... Please\n", "see the -m section under \"gcloud storage --help\" for further information\n", "about when gsutil -m can be advantageous.\n",
"\n",
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model/variables/variables.data-00000-of-00001...\n",
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model/variables/variables.index...\n",
Expand All @@ -217,10 +215,7 @@
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/examples/camera.jpg...\n",
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/examples/police_car.jpg...\n",
"/ [3 files][465.1 KiB/465.1 KiB] \n",
"==\u003e NOTE: You are performing a sequence of gsutil operations that may\n",
"run significantly faster if you instead use gsutil -m cp ... Please\n",
"see the -m section under \"gsutil help options\" for further information\n",
"about when gsutil -m can be advantageous.\n",
"==\u003e NOTE: You are performing a sequence of gcloud storage operations that may\n", "run significantly faster if you instead use gcloud storage cp ... Please\n", "gcloud storage commands are parallel by default; a flag like -m is not needed.\n", "about when gsutil -m can be advantageous.\n",
"\n",
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/examples/telephone_kiosk.jpg...\n",
"/ [4 files][665.2 KiB/665.2 KiB] \n",
Expand All @@ -230,9 +225,8 @@
],
"source": [
"#@title Download files \n",
"!gsutil cp -r gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model ./\n",
"!gsutil cp -r gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/examples ./"
]
"!gcloud storage cp --recursive gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model .\n",
"!gcloud storage cp --recursive gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/examples ./" ]
},
{
"cell_type": "code",
Expand Down
4 changes: 1 addition & 3 deletions models/official/detection/projects/vild/ViLD_demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,7 @@
},
"source": [
"#@title Download files\n",
"!gsutil cp -r gs://cloud-tpu-checkpoints/detection/projects/vild/colab/image_path_v2 ./\n",
"!gsutil cp -r gs://cloud-tpu-checkpoints/detection/projects/vild/colab/examples ./"
],
"!gcloud storage cp --recursive gs://cloud-tpu-checkpoints/detection/projects/vild/colab/image_path_v2 ./\n", "!gcloud storage cp --recursive gs://cloud-tpu-checkpoints/detection/projects/vild/colab/examples ./" ],
"execution_count": null,
"outputs": [
{
Expand Down
2 changes: 1 addition & 1 deletion models/official/resnet/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ created by `ctpu up`, this argument may not be necessary.)
`$MODEL_DIR` is a GCS location (a URL starting with `gs://` where both the GCE
VM and the associated Cloud TPU have write access, something like `gs://userid-
dev-imagenet-output/model`. (TensorFlow can't create the bucket; you have to
create it with `gsutil mb <bucket>`.) This bucket is used to save checkpoints
create it with `gcloud storage buckets create <bucket>`.) This bucket is used to save checkpoints
and the training result, so that the training steps are cumulative when you
reuse the model directory. If you do 1000 steps, for example, and you reuse the
model directory, on a subsequent run, it will skip the first 1000 steps, because
Expand Down
8 changes: 4 additions & 4 deletions models/official/retinanet/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ You can create a bucket using the
the command line with gsutil:

```
gsutil mb ${GCS_BUCKET}
gcloud storage buckets create ${GCS_BUCKET}
```

## Preparing the COCO dataset
Expand Down Expand Up @@ -86,8 +86,8 @@ training. We can use `gsutil` to copy the files over. We also want to save the
annotation files: we use these to validate our model performance:

```
gsutil -m cp ./data/dir/coco/*.tfrecord ${GCS_BUCKET}/coco
gsutil cp ./data/dir/coco/raw-data/annotations/*.json ${GCS_BUCKET}/coco
gcloud storage cp ./data/dir/coco/*.tfrecord ${GCS_BUCKET}/coco
gcloud storage cp ./data/dir/coco/raw-data/annotations/*.json ${GCS_BUCKET}/coco
```

## Installing extra packages
Expand Down Expand Up @@ -221,7 +221,7 @@ test that we can read our model directory and validation files.
# export GCS_BUCKET as above

# Copy over the annotation file we created during preprocessing
gsutil cp ${GCS_BUCKET}/coco/instances_val2017.json .
gcloud storage cp ${GCS_BUCKET}/coco/instances_val2017.json .

python tpu/models/official/retinanet/retinanet_main.py \
--use_tpu=False \
Expand Down
3 changes: 1 addition & 2 deletions tools/colab/bert_finetuning_with_cloud_tpus.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -4236,8 +4236,7 @@
"# Setup task specific model and TPU running config.\n",
"BERT_PRETRAINED_DIR = 'gs://cloud-tpu-checkpoints/bert/' + BERT_MODEL \n",
"print('***** BERT pretrained directory: {} *****'.format(BERT_PRETRAINED_DIR))\n",
"!gsutil ls $BERT_PRETRAINED_DIR\n",
"\n",
"!gcloud storage ls $BERT_PRETRAINED_DIR\n", "\n",
"CONFIG_FILE = os.path.join(BERT_PRETRAINED_DIR, 'bert_config.json')\n",
"INIT_CHECKPOINT = os.path.join(BERT_PRETRAINED_DIR, 'bert_model.ckpt')\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion tools/ctpu/tutorial.md
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ variable you set in the last step will be automatically substituted so you can
copy-paste the following commands unmodified):

```bash
gsutil cp -r ./data gs://$GCS_BUCKET_NAME/mnist/data
gcloud storage cp --recursive ./data gs://$GCS_BUCKET_NAME/mnist/data
```

### Train your model ###
Expand Down
4 changes: 2 additions & 2 deletions tools/datasets/download_and_preprocess_coco_k8s.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ spec:
DEBIAN_FRONTEND=noninteractive apt-get update &&
cd /tensorflow_tpu_models/tools/datasets &&
bash download_and_preprocess_coco.sh /scratch-dir &&
gsutil -m cp /scratch-dir/*.tfrecord ${DATA_BUCKET}/coco &&
gsutil cp /scratch-dir/raw-data/annotations/*.json ${DATA_BUCKET}/coco
gcloud storage cp /scratch-dir/*.tfrecord ${DATA_BUCKET}/coco &&
gcloud storage cp /scratch-dir/raw-data/annotations/*.json ${DATA_BUCKET}/coco
env:
# [REQUIRED] Must specify the Google Cloud Storage location where the
# COCO dataset will be stored.
Expand Down
2 changes: 1 addition & 1 deletion tools/datasets/jpeg_to_tf_record.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def convert_to_example(csvline, categories):
# and this could cause confusion if earlier run has 0000-of-0005, for eg
if on_cloud:
try:
subprocess.check_call('gsutil -m rm -r {}'.format(OUTPUT_DIR).split())
subprocess.check_call('gcloud storage rm --recursive {}'.format(OUTPUT_DIR).split())
except subprocess.CalledProcessError:
pass
else:
Expand Down