Skip to content

Commit 7dc921f

Browse files
Merge pull request #3537 from bhandarivijay-png/ai-gsutil-migration-4d09aa66c2ea4a668142f540fcf1570f
chore: Migrate gsutil usage to gcloud storage
2 parents c3186a4 + 77e7c99 commit 7dc921f

12 files changed

Lines changed: 26 additions & 35 deletions

File tree

benchmarks/ResNet-50_v1.5_Performance_Comparison_TensorFlow_1.12_GCP.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ sudo mkdir -p /data && sudo mount /dev/nvme0n1 /data
156156
```
157157
### Copies data from your GCS bucket created earlier to the local drive.
158158
sudo mkdir -p /data/imagenet && sudo chmod -R 777 /data
159-
gsutil -m cp -r gs://<your bucket with imagenet>/imagenet/* /data/imagenet/
159+
gcloud storage cp --recursive gs://<your bucket with imagenet>/imagenet/* /data/imagenet/
160160
```
161161

162162
4. Install TensorFlow 1.12 compiled with CUDA 10.0, cuDNN 7.3, and AVX2.

benchmarks/ShapeMask_Performance_Comparison_TensorFlow_1.14_GCP.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ export USER=weicheng # Your user name.
5454
export STORAGE_BUCKET=gs://${USER}-data
5555
5656
# Create storage bucket.
57-
gsutil mb $STORAGE_BUCKET
57+
gcloud storage buckets create $STORAGE_BUCKET
5858
5959
# Download COCO data.
6060
mkdir ~/data
@@ -66,16 +66,16 @@ bash download_and_preprocess_coco.sh ~/data/coco
6666
# Create coco directory under the bucket.
6767
mkdir coco
6868
touch coco/empty.txt
69-
gsutil cp -r coco $STORAGE_BUCKET
69+
gcloud storage cp --recursive coco $STORAGE_BUCKET
7070
7171
# Move data over to bucket.
72-
gsutil -m cp data/coco/*.tfrecord gs://${USER}-data/coco
73-
gsutil -m cp data/coco/raw-data/annotations/*.json gs://${USER}-data/coco
72+
gcloud storage cp data/coco/*.tfrecord gs://${USER}-data/coco
73+
gcloud storage cp data/coco/raw-data/annotations/*.json gs://${USER}-data/coco
7474
7575
# Create shapemask directory under the bucket.
7676
mkdir shapemask_exp
7777
touch shapemask_exp/empty.txt
78-
gsutil cp -r shapemask_exp gs://${USER}-data/
78+
gcloud storage cp --recursive shapemask_exp gs://${USER}-data/
7979
8080
# Back to home directory.
8181
cd ~

models/experimental/inference/setup-pool.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ function check_tpu_tf_versions() {
6666
}
6767

6868
function check_model_exists() {
69-
num_versions=$(expr $(gsutil ls ${MODEL_BASE_PATH?} | wc | awk '{print $1}')-1)
69+
num_versions=$(expr $(gcloud storage ls ${MODEL_BASE_PATH?} | wc | awk '{print $1}')-1)
7070
if [[ $num_versions < 1 ]]; then
7171
err "The MODEL_BASE_PATH provided is not valid."
7272
fi

models/experimental/mnist_jupyter/Cloud-TPU-Demo.ipynb

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -108,10 +108,10 @@
108108
"metadata": {},
109109
"outputs": [],
110110
"source": [
111-
"!gsutil mb -c regional -l us-central1 gs://$GCS_DATA_BUCKET\n",
112-
"!gsutil mb -c regional -l us-central1 gs://$GCS_CKPT_BUCKET\n",
111+
"!gcloud storage buckets create --default-storage-class=regional --location=us-central1 gs://$GCS_DATA_BUCKET\n",
112+
"!gcloud storage buckets create --default-storage-class=regional --location=us-central1 gs://$GCS_CKPT_BUCKET\n",
113113
"\n",
114-
"!gsutil iam ch serviceAccount:`gcloud alpha compute tpus describe $TPU_NAME | grep serviceAccount | cut -d' ' -f2`:admin gs://$GCS_DATA_BUCKET gs://$GCS_CKPT_BUCKET && echo 'Successfully set permissions!'"
114+
"!gcloud storage buckets add-iam-policy-binding gs://$GCS_DATA_BUCKET --member=serviceAccount:`gcloud alpha compute tpus describe $TPU_NAME | grep serviceAccount | cut -d' ' -f2` --role=admin && gcloud storage buckets add-iam-policy-binding gs://$GCS_CKPT_BUCKET --member=serviceAccount:`gcloud alpha compute tpus describe $TPU_NAME | grep serviceAccount | cut -d' ' -f2` --role=admin && echo 'Successfully set permissions!'"
115115
]
116116
},
117117
{

models/official/detection/projects/openseg/OpenSeg_demo.ipynb

Lines changed: 5 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -204,10 +204,8 @@
204204
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model/graph_def.txt...\n",
205205
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model/saved_model.pb...\n",
206206
"-\n",
207-
"==\u003e NOTE: You are performing a sequence of gsutil operations that may\n",
208-
"run significantly faster if you instead use gsutil -m cp ... Please\n",
209-
"see the -m section under \"gsutil help options\" for further information\n",
210-
"about when gsutil -m can be advantageous.\n",
207+
"==\u003e NOTE: You are performing a sequence of gcloud storage operations that may\n",
208+
"run significantly faster if you instead use gcloud storage cp ... Please\n", "see the -m section under \"gcloud storage --help\" for further information\n", "about when gsutil -m can be advantageous.\n",
211209
"\n",
212210
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model/variables/variables.data-00000-of-00001...\n",
213211
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model/variables/variables.index...\n",
@@ -217,10 +215,7 @@
217215
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/examples/camera.jpg...\n",
218216
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/examples/police_car.jpg...\n",
219217
"/ [3 files][465.1 KiB/465.1 KiB] \n",
220-
"==\u003e NOTE: You are performing a sequence of gsutil operations that may\n",
221-
"run significantly faster if you instead use gsutil -m cp ... Please\n",
222-
"see the -m section under \"gsutil help options\" for further information\n",
223-
"about when gsutil -m can be advantageous.\n",
218+
"==\u003e NOTE: You are performing a sequence of gcloud storage operations that may\n", "run significantly faster if you instead use gcloud storage cp ... Please\n", "gcloud storage commands are parallel by default; a flag like -m is not needed.\n", "about when gsutil -m can be advantageous.\n",
224219
"\n",
225220
"Copying gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/examples/telephone_kiosk.jpg...\n",
226221
"/ [4 files][665.2 KiB/665.2 KiB] \n",
@@ -230,9 +225,8 @@
230225
],
231226
"source": [
232227
"#@title Download files \n",
233-
"!gsutil cp -r gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model ./\n",
234-
"!gsutil cp -r gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/examples ./"
235-
]
228+
"!gcloud storage cp --recursive gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/exported_model .\n",
229+
"!gcloud storage cp --recursive gs://cloud-tpu-checkpoints/detection/projects/openseg/colab/examples ./" ]
236230
},
237231
{
238232
"cell_type": "code",

models/official/detection/projects/vild/ViLD_demo.ipynb

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -132,9 +132,7 @@
132132
},
133133
"source": [
134134
"#@title Download files\n",
135-
"!gsutil cp -r gs://cloud-tpu-checkpoints/detection/projects/vild/colab/image_path_v2 ./\n",
136-
"!gsutil cp -r gs://cloud-tpu-checkpoints/detection/projects/vild/colab/examples ./"
137-
],
135+
"!gcloud storage cp --recursive gs://cloud-tpu-checkpoints/detection/projects/vild/colab/image_path_v2 ./\n", "!gcloud storage cp --recursive gs://cloud-tpu-checkpoints/detection/projects/vild/colab/examples ./" ],
138136
"execution_count": null,
139137
"outputs": [
140138
{

models/official/resnet/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ created by `ctpu up`, this argument may not be necessary.)
6060
`$MODEL_DIR` is a GCS location (a URL starting with `gs://` where both the GCE
6161
VM and the associated Cloud TPU have write access, something like `gs://userid-
6262
dev-imagenet-output/model`. (TensorFlow can't create the bucket; you have to
63-
create it with `gsutil mb <bucket>`.) This bucket is used to save checkpoints
63+
create it with `gcloud storage buckets create <bucket>`.) This bucket is used to save checkpoints
6464
and the training result, so that the training steps are cumulative when you
6565
reuse the model directory. If you do 1000 steps, for example, and you reuse the
6666
model directory, on a subsequent run, it will skip the first 1000 steps, because

models/official/retinanet/README.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ You can create a bucket using the
5252
the command line with gsutil:
5353

5454
```
55-
gsutil mb ${GCS_BUCKET}
55+
gcloud storage buckets create ${GCS_BUCKET}
5656
```
5757

5858
## Preparing the COCO dataset
@@ -86,8 +86,8 @@ training. We can use `gsutil` to copy the files over. We also want to save the
8686
annotation files: we use these to validate our model performance:
8787

8888
```
89-
gsutil -m cp ./data/dir/coco/*.tfrecord ${GCS_BUCKET}/coco
90-
gsutil cp ./data/dir/coco/raw-data/annotations/*.json ${GCS_BUCKET}/coco
89+
gcloud storage cp ./data/dir/coco/*.tfrecord ${GCS_BUCKET}/coco
90+
gcloud storage cp ./data/dir/coco/raw-data/annotations/*.json ${GCS_BUCKET}/coco
9191
```
9292

9393
## Installing extra packages
@@ -221,7 +221,7 @@ test that we can read our model directory and validation files.
221221
# export GCS_BUCKET as above
222222
223223
# Copy over the annotation file we created during preprocessing
224-
gsutil cp ${GCS_BUCKET}/coco/instances_val2017.json .
224+
gcloud storage cp ${GCS_BUCKET}/coco/instances_val2017.json .
225225
226226
python tpu/models/official/retinanet/retinanet_main.py \
227227
--use_tpu=False \

tools/colab/bert_finetuning_with_cloud_tpus.ipynb

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4236,8 +4236,7 @@
42364236
"# Setup task specific model and TPU running config.\n",
42374237
"BERT_PRETRAINED_DIR = 'gs://cloud-tpu-checkpoints/bert/' + BERT_MODEL \n",
42384238
"print('***** BERT pretrained directory: {} *****'.format(BERT_PRETRAINED_DIR))\n",
4239-
"!gsutil ls $BERT_PRETRAINED_DIR\n",
4240-
"\n",
4239+
"!gcloud storage ls $BERT_PRETRAINED_DIR\n", "\n",
42414240
"CONFIG_FILE = os.path.join(BERT_PRETRAINED_DIR, 'bert_config.json')\n",
42424241
"INIT_CHECKPOINT = os.path.join(BERT_PRETRAINED_DIR, 'bert_model.ckpt')\n",
42434242
"\n",

tools/ctpu/tutorial.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ variable you set in the last step will be automatically substituted so you can
148148
copy-paste the following commands unmodified):
149149

150150
```bash
151-
gsutil cp -r ./data gs://$GCS_BUCKET_NAME/mnist/data
151+
gcloud storage cp --recursive ./data gs://$GCS_BUCKET_NAME/mnist/data
152152
```
153153

154154
### Train your model ###

0 commit comments

Comments
 (0)