diff --git a/courses/machine_learning/asl/open_project/ASL_youtube8m_models/frame/youtube_8m_frame_level.ipynb b/courses/machine_learning/asl/open_project/ASL_youtube8m_models/frame/youtube_8m_frame_level.ipynb index c60026bc4b..19bb5e8228 100644 --- a/courses/machine_learning/asl/open_project/ASL_youtube8m_models/frame/youtube_8m_frame_level.ipynb +++ b/courses/machine_learning/asl/open_project/ASL_youtube8m_models/frame/youtube_8m_frame_level.ipynb @@ -1057,8 +1057,7 @@ "OUTDIR=gs://$BUCKET/youtube_8m_frame_level/trained_model\n", "JOBNAME=job_youtube_8m_frame_level$(date -u +%y%m%d_%H%M%S)\n", "echo $OUTDIR $REGION $JOBNAME\n", - "gsutil -m rm -rf $OUTDIR\n", - "gcloud ml-engine jobs submit training $JOBNAME \\\n", + "gcloud storage rm --recursive --continue-on-error $OUTDIR\n", "gcloud ml-engine jobs submit training $JOBNAME \\\n", " --region=$REGION \\\n", " --module-name=trainer.task \\\n", " --package-path=$PWD/trainer \\\n", @@ -1156,8 +1155,7 @@ "OUTDIR=gs://$BUCKET/youtube_8m_frame_level/hyperparam\n", "JOBNAME=job_youtube_8m_frame_level$(date -u +%y%m%d_%H%M%S)\n", "echo $OUTDIR $REGION $JOBNAME\n", - "gsutil -m rm -rf $OUTDIR\n", - "gcloud ml-engine jobs submit training $JOBNAME \\\n", + "gcloud storage rm --recursive --continue-on-error $OUTDIR\n", "gcloud ml-engine jobs submit training $JOBNAME \\\n", " --region=$REGION \\\n", " --module-name=trainer.task \\\n", " --package-path=$PWD/trainer \\\n", @@ -1215,8 +1213,7 @@ "%bash\n", "MODEL_NAME=\"youtube_8m_frame_level\"\n", "MODEL_VERSION=\"v1\"\n", - "MODEL_LOCATION=$(gsutil ls gs://$BUCKET/youtube_8m_frame_level/trained_model/export/exporter/ | tail -1)\n", - "echo \"Deleting and deploying $MODEL_NAME $MODEL_VERSION from $MODEL_LOCATION ... this will take a few minutes\"\n", + "MODEL_LOCATION=$(gcloud storage ls gs://$BUCKET/youtube_8m_frame_level/trained_model/export/exporter/ | tail -1)\n", "echo \"Deleting and deploying $MODEL_NAME $MODEL_VERSION from $MODEL_LOCATION ... this will take a few minutes\"\n", "#gcloud ml-engine versions delete ${MODEL_VERSION} --model ${MODEL_NAME}\n", "#gcloud ml-engine models delete ${MODEL_NAME}\n", "gcloud ml-engine models create $MODEL_NAME --regions $REGION\n", diff --git a/courses/machine_learning/asl/open_project/ASL_youtube8m_models/video/youtube_8m_video_level.ipynb b/courses/machine_learning/asl/open_project/ASL_youtube8m_models/video/youtube_8m_video_level.ipynb index 8fb73d0a56..5fefbef87f 100644 --- a/courses/machine_learning/asl/open_project/ASL_youtube8m_models/video/youtube_8m_video_level.ipynb +++ b/courses/machine_learning/asl/open_project/ASL_youtube8m_models/video/youtube_8m_video_level.ipynb @@ -749,8 +749,7 @@ "OUTDIR=gs://$BUCKET/youtube_8m_video_level/trained_model\n", "JOBNAME=job_youtube_8m_video_level$(date -u +%y%m%d_%H%M%S)\n", "echo $OUTDIR $REGION $JOBNAME\n", - "gsutil -m rm -rf $OUTDIR\n", - "gcloud ml-engine jobs submit training $JOBNAME \\\n", + "gcloud storage rm --recursive --continue-on-error $OUTDIR\n", "gcloud ml-engine jobs submit training $JOBNAME \\\n", " --region=$REGION \\\n", " --module-name=trainer.task \\\n", " --package-path=$PWD/trainer \\\n", @@ -855,8 +854,7 @@ "OUTDIR=gs://$BUCKET/youtube_8m_video_level/hyperparam\n", "JOBNAME=job_youtube_8m_video_level$(date -u +%y%m%d_%H%M%S)\n", "echo $OUTDIR $REGION $JOBNAME\n", - "gsutil -m rm -rf $OUTDIR\n", - "gcloud ml-engine jobs submit training $JOBNAME \\\n", + "gcloud storage rm --recursive --continue-on-error $OUTDIR\n", "gcloud ml-engine jobs submit training $JOBNAME \\\n", " --region=$REGION \\\n", " --module-name=trainer.task \\\n", " --package-path=$PWD/trainer \\\n", @@ -914,8 +912,7 @@ "%bash\n", "MODEL_NAME=\"youtube_8m_video_level\"\n", "MODEL_VERSION=\"v1\"\n", - "MODEL_LOCATION=$(gsutil ls gs://$BUCKET/youtube_8m_video_level/trained_model/export/exporter/ | tail -1)\n", - "echo \"Deleting and deploying $MODEL_NAME $MODEL_VERSION from $MODEL_LOCATION ... this will take a few minutes\"\n", + "MODEL_LOCATION=$(gcloud storage ls gs://$BUCKET/youtube_8m_video_level/trained_model/export/exporter/ | tail -1)\n", "echo \"Deleting and deploying $MODEL_NAME $MODEL_VERSION from $MODEL_LOCATION ... this will take a few minutes\"\n", "#gcloud ml-engine versions delete ${MODEL_VERSION} --model ${MODEL_NAME}\n", "#gcloud ml-engine models delete ${MODEL_NAME}\n", "gcloud ml-engine models create $MODEL_NAME --regions $REGION\n", diff --git a/courses/machine_learning/asl/open_project/ASL_youtube8m_models/video_using_datasets/youtube_8m_video_level_datasets.ipynb b/courses/machine_learning/asl/open_project/ASL_youtube8m_models/video_using_datasets/youtube_8m_video_level_datasets.ipynb index 25647b057c..e7312d90e3 100644 --- a/courses/machine_learning/asl/open_project/ASL_youtube8m_models/video_using_datasets/youtube_8m_video_level_datasets.ipynb +++ b/courses/machine_learning/asl/open_project/ASL_youtube8m_models/video_using_datasets/youtube_8m_video_level_datasets.ipynb @@ -3783,8 +3783,7 @@ "OUTDIR=gs://$BUCKET/youtube_8m_video_level_datasets/trained_model\n", "JOBNAME=job_youtube_8m_video_level_datasets$(date -u +%y%m%d_%H%M%S)\n", "echo $OUTDIR $REGION $JOBNAME\n", - "gsutil -m rm -rf $OUTDIR\n", - "gcloud ml-engine jobs submit training $JOBNAME \\\n", + "gcloud storage rm --recursive --continue-on-error $OUTDIR\n", "gcloud ml-engine jobs submit training $JOBNAME \\\n", " --region=$REGION \\\n", " --module-name=trainer.task \\\n", " --package-path=$PWD/trainer \\\n", @@ -3882,8 +3881,7 @@ "OUTDIR=gs://$BUCKET/youtube_8m_video_level_datasets/hyperparam\n", "JOBNAME=job_youtube_8m_video_level_datasets$(date -u +%y%m%d_%H%M%S)\n", "echo $OUTDIR $REGION $JOBNAME\n", - "gsutil -m rm -rf $OUTDIR\n", - "gcloud ml-engine jobs submit training $JOBNAME \\\n", + "gcloud storage rm --recursive --continue-on-error $OUTDIR\n", "gcloud ml-engine jobs submit training $JOBNAME \\\n", " --region=$REGION \\\n", " --module-name=trainer.task \\\n", " --package-path=$PWD/trainer \\\n", @@ -3941,8 +3939,7 @@ "%bash\n", "MODEL_NAME=\"youtube_8m_video_level_datasets\"\n", "MODEL_VERSION=\"v1\"\n", - "MODEL_LOCATION=$(gsutil ls gs://$BUCKET/youtube_8m_video_level_datasets/trained_model/export/exporter/ | tail -1)\n", - "echo \"Deleting and deploying $MODEL_NAME $MODEL_VERSION from $MODEL_LOCATION ... this will take a few minutes\"\n", + "MODEL_LOCATION=$(gcloud storage ls gs://$BUCKET/youtube_8m_video_level_datasets/trained_model/export/exporter/ | tail -1)\n", "echo \"Deleting and deploying $MODEL_NAME $MODEL_VERSION from $MODEL_LOCATION ... this will take a few minutes\"\n", "#gcloud ml-engine versions delete ${MODEL_VERSION} --model ${MODEL_NAME}\n", "#gcloud ml-engine models delete ${MODEL_NAME}\n", "gcloud ml-engine models create $MODEL_NAME --regions $REGION\n", diff --git a/courses/machine_learning/asl/open_project/cloud_composer_automated_ml_pipeline_taxifare/airflow/dags/subdag/preprocess.py b/courses/machine_learning/asl/open_project/cloud_composer_automated_ml_pipeline_taxifare/airflow/dags/subdag/preprocess.py index 30d5087ae9..887327a37c 100644 --- a/courses/machine_learning/asl/open_project/cloud_composer_automated_ml_pipeline_taxifare/airflow/dags/subdag/preprocess.py +++ b/courses/machine_learning/asl/open_project/cloud_composer_automated_ml_pipeline_taxifare/airflow/dags/subdag/preprocess.py @@ -98,7 +98,7 @@ def preprocess_tasks(model, parent_dag_name, child_dag_name, default_args, PROJE # BigQuery training data export to GCS bash_remove_old_data_op = BashOperator( task_id="bash_remove_old_data_{}_task".format(model.replace(".","_")), - bash_command="if gsutil ls {0}/taxifare/data/{1} 2> /dev/null; then gsutil -m rm -rf {0}/taxifare/data/{1}/*; else true; fi".format(BUCKET, model.replace(".","_")), + bash_command="if gcloud storage ls {0}/taxifare/data/{1} 2> /dev/null; then gcloud storage rm --recursive --continue-on-error {0}/taxifare/data/{1}/*; else true; fi".format(BUCKET, model.replace(".","_")), dag=dag ) diff --git a/courses/machine_learning/asl/open_project/cloud_composer_automated_ml_pipeline_taxifare/airflow/dags/subdag/training.py b/courses/machine_learning/asl/open_project/cloud_composer_automated_ml_pipeline_taxifare/airflow/dags/subdag/training.py index ba8e0a0ba9..7b0ab9131e 100644 --- a/courses/machine_learning/asl/open_project/cloud_composer_automated_ml_pipeline_taxifare/airflow/dags/subdag/training.py +++ b/courses/machine_learning/asl/open_project/cloud_composer_automated_ml_pipeline_taxifare/airflow/dags/subdag/training.py @@ -55,13 +55,13 @@ def training_tasks(model, parent_dag_name, child_dag_name, default_args, PROJECT bash_remove_old_saved_model_op = BashOperator( task_id="bash_remove_old_saved_model_{}_task".format(model.replace(".","_")), - bash_command="if gsutil ls {0} 2> /dev/null; then gsutil -m rm -rf {0}/*; else true; fi".format(MODEL_LOCATION + model.replace(".","_")), + bash_command="if gcloud storage ls {0} 2> /dev/null; then gcloud storage rm --recursive --continue-on-error {0}/*; else true; fi".format(MODEL_LOCATION + model.replace(".","_")), dag=dag ) bash_copy_new_saved_model_op = BashOperator( task_id="bash_copy_new_saved_model_{}_task".format(model.replace(".","_")), - bash_command="gsutil -m rsync -d -r `gsutil ls {0}/export/exporter/ | tail -1` {1}".format(output_dir, MODEL_LOCATION + model.replace(".","_")), + bash_command="gcloud storage rsync --delete-unmatched-destination-objects --recursive `gcloud storage ls {0}/export/exporter/ | tail -1` {1}".format(output_dir, MODEL_LOCATION + model.replace(".","_")), dag=dag ) diff --git a/courses/machine_learning/feateng/asl_2.0_feat_eng-old.ipynb b/courses/machine_learning/feateng/asl_2.0_feat_eng-old.ipynb index f003c5d2aa..9136e3a523 100644 --- a/courses/machine_learning/feateng/asl_2.0_feat_eng-old.ipynb +++ b/courses/machine_learning/feateng/asl_2.0_feat_eng-old.ipynb @@ -159,16 +159,16 @@ "fi \n", " \n", "## Create GCS bucket if it doesn't exist already...\n", - "exists=$(gsutil ls -d | grep -w gs://${PROJECT}/)\n", + "exists=$(gcloud storage ls | grep -w gs://${PROJECT}/)\n", "\n", "if [ -n \"$exists\" ]; then\n", " echo -e \"Bucket exists, let's not recreate it.\"\n", " \n", "else\n", " echo \"Creating a new GCS bucket.\"\n", - " gsutil mb -l ${REGION} gs://${PROJECT}\n", + " gcloud storage buckets create --location ${REGION} gs://${PROJECT}\n", " echo \"\\nHere are your current buckets:\"\n", - " gsutil ls\n", + " gcloud storage ls\n", "fi" ] }, @@ -3961,4 +3961,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} +} \ No newline at end of file diff --git a/courses/machine_learning/feateng/asl_2.0_feat_eng.ipynb b/courses/machine_learning/feateng/asl_2.0_feat_eng.ipynb index f003c5d2aa..8a1eaf6a5c 100644 --- a/courses/machine_learning/feateng/asl_2.0_feat_eng.ipynb +++ b/courses/machine_learning/feateng/asl_2.0_feat_eng.ipynb @@ -159,17 +159,14 @@ "fi \n", " \n", "## Create GCS bucket if it doesn't exist already...\n", - "exists=$(gsutil ls -d | grep -w gs://${PROJECT}/)\n", - "\n", + "exists=$(gcloud storage ls | grep -w gs://${PROJECT}/)\n", "\n", "if [ -n \"$exists\" ]; then\n", " echo -e \"Bucket exists, let's not recreate it.\"\n", " \n", "else\n", " echo \"Creating a new GCS bucket.\"\n", - " gsutil mb -l ${REGION} gs://${PROJECT}\n", - " echo \"\\nHere are your current buckets:\"\n", - " gsutil ls\n", - "fi" + " gcloud storage buckets create --location ${REGION} gs://${PROJECT}\n", " echo \"\\nHere are your current buckets:\"\n", + " gcloud storage ls\n", "fi" ] }, { diff --git a/courses/machine_learning/feateng/feateng.ipynb b/courses/machine_learning/feateng/feateng.ipynb index 075e93df26..a0fe308524 100644 --- a/courses/machine_learning/feateng/feateng.ipynb +++ b/courses/machine_learning/feateng/feateng.ipynb @@ -224,9 +224,7 @@ "outputs": [], "source": [ "%%bash\n", - "if gsutil ls | grep -q gs://${BUCKET}/taxifare/ch4/taxi_preproc/; then\n", - " gsutil -m rm -rf gs://$BUCKET/taxifare/ch4/taxi_preproc/\n", - "fi" + "if gcloud storage ls | grep -q gs://${BUCKET}/taxifare/ch4/taxi_preproc/; then\n", " gcloud storage rm --recursive --continue-on-error gs://$BUCKET/taxifare/ch4/taxi_preproc/\n", "fi" ] }, { @@ -346,8 +344,7 @@ ], "source": [ "%%bash\n", - "gsutil ls gs://$BUCKET/taxifare/ch4/taxi_preproc/" - ] + "gcloud storage ls gs://$BUCKET/taxifare/ch4/taxi_preproc/" ] }, { "cell_type": "markdown", @@ -370,9 +367,7 @@ "outputs": [], "source": [ "%%bash\n", - "if gsutil ls | grep -q gs://${BUCKET}/taxifare/ch4/taxi_preproc/; then\n", - " gsutil -m rm -rf gs://$BUCKET/taxifare/ch4/taxi_preproc/\n", - "fi" + "if gcloud storage ls | grep -q gs://${BUCKET}/taxifare/ch4/taxi_preproc/; then\n", " gcloud storage rm --recursive --continue-on-error gs://$BUCKET/taxifare/ch4/taxi_preproc/\n", "fi" ] }, { @@ -433,8 +428,7 @@ ], "source": [ "%%bash\n", - "gsutil ls -l gs://$BUCKET/taxifare/ch4/taxi_preproc/" - ] + "gcloud storage ls --long gs://$BUCKET/taxifare/ch4/taxi_preproc/" ] }, { "cell_type": "code", @@ -462,8 +456,7 @@ "source": [ "%%bash\n", "#print first 10 lines of first shard of train.csv\n", - "gsutil cat \"gs://$BUCKET/taxifare/ch4/taxi_preproc/train.csv-00000-of-*\" | head" - ] + "gcloud storage cat \"gs://$BUCKET/taxifare/ch4/taxi_preproc/train.csv-00000-of-*\" | head" ] }, { "cell_type": "markdown", @@ -485,9 +478,7 @@ " rm -rf sample\n", "fi\n", "mkdir sample\n", - "gsutil cat \"gs://$BUCKET/taxifare/ch4/taxi_preproc/train.csv-00000-of-*\" > sample/train.csv\n", - "gsutil cat \"gs://$BUCKET/taxifare/ch4/taxi_preproc/valid.csv-00000-of-*\" > sample/valid.csv" - ] + "gcloud storage cat \"gs://$BUCKET/taxifare/ch4/taxi_preproc/train.csv-00000-of-*\" > sample/train.csv\n", "gcloud storage cat \"gs://$BUCKET/taxifare/ch4/taxi_preproc/valid.csv-00000-of-*\" > sample/valid.csv" ] }, { "cell_type": "markdown", @@ -673,8 +664,7 @@ "OUTDIR=gs://${BUCKET}/taxifare/ch4/taxi_trained\n", "JOBNAME=lab4a_$(date -u +%y%m%d_%H%M%S)\n", "echo $OUTDIR $REGION $JOBNAME\n", - "gsutil -m rm -rf $OUTDIR\n", - "gcloud ai-platform jobs submit training $JOBNAME \\\n", + "gcloud storage rm --recursive --continue-on-error $OUTDIR\n", "gcloud ai-platform jobs submit training $JOBNAME \\\n", " --region=$REGION \\\n", " --module-name=trainer.task \\\n", " --package-path=${PWD}/taxifare/trainer \\\n", diff --git a/courses/machine_learning/feateng/hyperparam.ipynb b/courses/machine_learning/feateng/hyperparam.ipynb index f33ccbf8ea..20420afd70 100644 --- a/courses/machine_learning/feateng/hyperparam.ipynb +++ b/courses/machine_learning/feateng/hyperparam.ipynb @@ -218,7 +218,7 @@ "OUTDIR=gs://${BUCKET}/taxifare/ch4/taxi_trained\n", "JOBNAME=lab4a_$(date -u +%y%m%d_%H%M%S)\n", "echo $OUTDIR $REGION $JOBNAME\n", - "gsutil -m rm -rf $OUTDIR\n", + "gcloud storage rm --recursive --continue-on-error $OUTDIR\n", "gcloud ml-engine jobs submit training $JOBNAME \\\n", " --region=$REGION \\\n", " --module-name=trainer.task \\\n", diff --git a/courses/machine_learning/feateng/tftransform.ipynb b/courses/machine_learning/feateng/tftransform.ipynb index db9b8df5b5..d1049bcd75 100644 --- a/courses/machine_learning/feateng/tftransform.ipynb +++ b/courses/machine_learning/feateng/tftransform.ipynb @@ -96,8 +96,8 @@ "outputs": [], "source": [ "%%bash\n", - "if ! gsutil ls | grep -q gs://${BUCKET}/; then\n", - " gsutil mb -l ${REGION} gs://${BUCKET}\n", + "if ! gcloud storage ls | grep -q gs://${BUCKET}/; then\n", + " gcloud storage buckets create --location=${REGION} gs://${BUCKET}\n", "fi" ] }, @@ -265,7 +265,7 @@ " print 'Launching Dataflow job {} ... hang on'.format(job_name)\n", " OUTPUT_DIR = 'gs://{0}/taxifare/preproc_tft/'.format(BUCKET)\n", " import subprocess\n", - " subprocess.call('gsutil rm -r {}'.format(OUTPUT_DIR).split())\n", + " subprocess.call('gcloud storage rm --recursive {}'.format(OUTPUT_DIR).split())\n", " EVERY_N = 10000\n", " \n", " options = {\n", @@ -358,7 +358,7 @@ "source": [ "%%bash\n", "# ls preproc_tft\n", - "gsutil ls gs://${BUCKET}/taxifare/preproc_tft/" + "gcloud storage ls gs://${BUCKET}/taxifare/preproc_tft/" ] }, {