Skip to content

Commit 8e01e48

Browse files
authored
Migrate gsutil usage to gcloud storage (#3010)
* Migrate gsutil usage to gcloud * Manual Changes
1 parent b51cade commit 8e01e48

File tree

4 files changed

+15
-15
lines changed

4 files changed

+15
-15
lines changed

courses/ahybrid/v1.0/common/scripts/create_remote.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,12 @@
1414
# See the License for the specific language governing permissions and
1515
# limitations under the License.
1616

17-
gsutil mb $KOPS_STORE
17+
gcloud storage buckets create $KOPS_STORE
1818

1919
n=0
2020
until [ $n -ge 5 ]
2121
do
22-
gsutil ls | grep $KOPS_STORE && break
22+
gcloud storage ls | grep $KOPS_STORE && break
2323
n=$[$n+1]
2424
sleep 3
2525
done
@@ -61,7 +61,7 @@ fi
6161

6262
echo "copying the kubeconfig file for later use..."
6363
kops export kubecfg --name $C2_FULLNAME --state=$KOPS_STORE
64-
gsutil cp $KF $KOPS_STORE
64+
gcloud storage cp $KF $KOPS_STORE
6565

6666
echo "creating service account and granting role..."
6767
gcloud iam service-accounts create connect-sa-op

courses/bdml_fundamentals/demos/earthquakevm/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ cd training-data-analyst/courses/bdml_fundamentals
2727
```
2828
* Go to the Storage | Browser in the Cloud Console and create a new bucket
2929
* Copy the files to the bucket:
30-
```gsutil cp earthquakes.* gs://[YOURBUCKET]```
30+
```gcloud storage cp earthquakes.* gs://[YOURBUCKET]```
3131
* Refresh the Storage | Browser to verify that you have new files in Cloud Storage
3232
* Edit the bucket permissions and add a new member named ```allUsers```
3333
and give this member Cloud Storage Object Viewer permissions

courses/machine_learning/cloudmle/cloudmle.ipynb

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -84,9 +84,9 @@
8484
" print(response['serviceAccount'])\")\n",
8585
"\n",
8686
"echo \"Authorizing the Cloud ML Service account $SVC_ACCOUNT to access files in $BUCKET\"\n",
87-
"gsutil -m defacl ch -u $SVC_ACCOUNT:R gs://$BUCKET\n",
88-
"gsutil -m acl ch -u $SVC_ACCOUNT:R -r gs://$BUCKET # error message (if bucket is empty) can be ignored\n",
89-
"gsutil -m acl ch -u $SVC_ACCOUNT:W gs://$BUCKET"
87+
"gcloud storage buckets update --add-default-object-acl-grant entity=user-$SVC_ACCOUNT,role=READER gs://$BUCKET\n",
88+
"gcloud storage objects update --add-acl-grant entity=user-$SVC_ACCOUNT,role=READER --recursive gs://$BUCKET # error message (if bucket is empty) can be ignored\n",
89+
"gcloud storage buckets update --add-acl-grant entity=user-$SVC_ACCOUNT,role=WRITER gs://$BUCKET",
9090
]
9191
},
9292
{
@@ -271,8 +271,8 @@
271271
"source": [
272272
"%%bash\n",
273273
"echo $BUCKET\n",
274-
"gsutil -m rm -rf gs://${BUCKET}/taxifare/smallinput/\n",
275-
"gsutil -m cp ${PWD}/*.csv gs://${BUCKET}/taxifare/smallinput/"
274+
"gcloud storage rm --recursive --continue-on-error gs://${BUCKET}/taxifare/smallinput/\n",
275+
"gcloud storage cp ${PWD}/*.csv gs://${BUCKET}/taxifare/smallinput/"
276276
]
277277
},
278278
{
@@ -285,7 +285,7 @@
285285
"OUTDIR=gs://${BUCKET}/taxifare/smallinput/taxi_trained\n",
286286
"JOBNAME=lab3a_$(date -u +%y%m%d_%H%M%S)\n",
287287
"echo $OUTDIR $REGION $JOBNAME\n",
288-
"gsutil -m rm -rf $OUTDIR\n",
288+
"gcloud storage rm --recursive --continue-on-error $OUTDIR\n",
289289
"gcloud ai-platform jobs submit training $JOBNAME \\\n",
290290
" --region=$REGION \\\n",
291291
" --module-name=trainer.task \\\n",
@@ -327,8 +327,8 @@
327327
"outputs": [],
328328
"source": [
329329
"%%bash\n",
330-
"gsutil cp -r ${PWD}/taxi_trained gs://${BUCKET}/taxifare/smallinput/ \n",
331-
"gsutil ls gs://${BUCKET}/taxifare/smallinput/taxi_trained/export/exporter"
330+
"gcloud storage cp --recursive ${PWD}/taxi_trained gs://${BUCKET}/taxifare/smallinput/ \n",
331+
"gcloud storage ls gs://${BUCKET}/taxifare/smallinput/taxi_trained/export/exporter"
332332
]
333333
},
334334
{
@@ -340,7 +340,7 @@
340340
"%%bash\n",
341341
"MODEL_NAME=\"taxifare\"\n",
342342
"MODEL_VERSION=\"v1\"\n",
343-
"MODEL_LOCATION=$(gsutil ls gs://${BUCKET}/taxifare/smallinput/taxi_trained/export/exporter | tail -1)\n",
343+
"MODEL_LOCATION=$(gcloud storage ls gs://${BUCKET}/taxifare/smallinput/taxi_trained/export/exporter | tail -1)\n",
344344
"echo \"Run these commands one-by-one (the very first time, you'll create a model and then create a version)\"\n",
345345
"#gcloud ai-platform versions delete ${MODEL_VERSION} --model ${MODEL_NAME}\n",
346346
"#gcloud ai-platform models delete ${MODEL_NAME}\n",
@@ -463,7 +463,7 @@
463463
"JOBNAME=lab3a_$(date -u +%y%m%d_%H%M%S)\n",
464464
"CRS_BUCKET=cloud-training-demos # use the already exported data\n",
465465
"echo $OUTDIR $REGION $JOBNAME\n",
466-
"gsutil -m rm -rf $OUTDIR\n",
466+
"gcloud storage rm --recursive --continue-on-error $OUTDIR\n",
467467
"gcloud ai-platform jobs submit training $JOBNAME \\\n",
468468
" --region=$REGION \\\n",
469469
" --module-name=trainer.task \\\n",

courses/unstructured/ML-Tests-Solution.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@
6868
],
6969
"source": [
7070
"import subprocess\n",
71-
"images = subprocess.check_output([\"gsutil\", \"ls\", \"gs://{}/unstructured/photos\".format(BUCKET)])\n",
71+
"images = subprocess.check_output([\"gcloud\", \"storage\", \"ls\", \"gs://{}/unstructured/photos\".format(BUCKET)])\n",
7272
"images = list(filter(None,images.split('\\n')))\n",
7373
"print(images)"
7474
]

0 commit comments

Comments
 (0)