knitr::opts_chunk$set(echo = TRUE)
# options(googleAuthR.verbose = 0) # set when debugging

Setup

library(googleAuthR)
library(googleCloudVertexAIR)

options(googleAuthR.scopes.selected = "https://www.googleapis.com/auth/cloud-platform")

gar_auth_service(json_file = Sys.getenv("GAR_SERVICE_JSON"))

Set global arguements

projectId <- Sys.getenv("GCVA_DEFAULT_PROJECT_ID")
gcva_region_set("us-central1")
gcva_project_set(projectId)

Get training pipeline

training_pipeline <- gcva_trainingPipeline(
  trainingPipelineName = Sys.getenv("GCVA_TRAINING_PIPELINE")
    )
training_pipeline

Get model from training pipeline

The gcva_model() function will parse the modelName from the trainingPipelineJob object.

gcva_model(model = training_pipeline)

Create custom container training job

First, 1) create a TrainingPipeline that runs a CustomJob and then 2) import the resulting artifacts as a Model

job <- gcva_custom_container_training_job(
  stagingBucket = "gs://my-bucket-name", 
  displayName = "vertex-r",
  containerUri = "us-central1-docker.pkg.dev/gc-vertex-ai-r/my-docker-repo/vertex-r:latest",
  command = c("Rscript", "train.R"),
  modelServingContainerCommand = c("Rscript", "serve.R"),
  modelServingContainerImageUri = "us-central1-docker.pkg.dev/gc-vertex-ai-r/my-docker-repo/vertex-r:latest",
  machineType = "n1-standard-4"
)
job

Execute custom training job

# get dataset for next api call TESTING
dataset <- gcva_dataset(datasetName = gcva_list_datasets()[1,2])

model <- gcva_run_job(
  job = job,
  dataset = dataset,
  modelDisplayName = "vertex-r-model",
  machineType = "n1-standard-4"
)
model


justinjm/googleCloudVertexAIR documentation built on April 17, 2025, 5:04 p.m.