To perform fine-tuning or batch inference with Vertex AI, you need to upload files to Google Cloud Storage. With Obiguard, you can easily upload files to GCS and use them for fine-tuning or batch inference with Vertex AI models.

Uploading Files

from obiguard import Obiguard

client = Obiguard(
   obiguard_api_key="OBIGUARD_API_KEY",  # Your Obiguard API key
   virtual_key="VERTEX_VIRTUAL_KEY",   # Add your Vertex virtual key
   vertex_storage_bucket_name="your_bucket_name", # Specify the GCS bucket name
   provider_file_name="your_file_name.jsonl", # Specify the file name in GCS
   provider_model="gemini-1.5-flash-001" # Specify the model to use
)

upload_file_response = client.files.create(
  purpose="fine-tune", # Can be "fine-tune" or "batch"
  file=open("dataset.jsonl", "rb")
)

print(upload_file_response)

Get File

from obiguard import Obiguard

client = Obiguard(
   obiguard_api_key="OBIGUARD_API_KEY",  # Your Obiguard API key
   virtual_key="VERTEX_VIRTUAL_KEY"   # Add your Vertex virtual key
)

file = client.files.retrieve(file_id="file_id")

print(file)

Get File Content

from obiguard import Obiguard

client = Obiguard(
   obiguard_api_key="OBIGUARD_API_KEY",  # Your Obiguard API key
  virtual_key="VERTEX_VIRTUAL_KEY"   # Add your Vertex virtual key
)

file_content = client.files.content(file_id="file_id")

print(file_content)

Note: The ListFiles endpoint is not supported for Vertex AI.