Ecosystem
LLMs
- Overview
- OpenAI
- Anthropic
- Google Gemini
- Google Vertex AI
- Azure
- Bedrock
- AWS SageMaker
- Ollama
- More
- Bring Your Own LLM
Google Vertex AI
Files
Upload files to Google Cloud Storage for Vertex AI fine-tuning and batch inference
To perform fine-tuning or batch inference with Vertex AI, you need to upload files to Google Cloud Storage. With Obiguard, you can easily upload files to GCS and use them for fine-tuning or batch inference with Vertex AI models.
Uploading Files
Copy
from obiguard import Obiguard
client = Obiguard(
obiguard_api_key="OBIGUARD_API_KEY", # Your Obiguard API key
virtual_key="VERTEX_VIRTUAL_KEY", # Add your Vertex virtual key
vertex_storage_bucket_name="your_bucket_name", # Specify the GCS bucket name
provider_file_name="your_file_name.jsonl", # Specify the file name in GCS
provider_model="gemini-1.5-flash-001" # Specify the model to use
)
upload_file_response = client.files.create(
purpose="fine-tune", # Can be "fine-tune" or "batch"
file=open("dataset.jsonl", "rb")
)
print(upload_file_response)
Copy
from obiguard import Obiguard
client = Obiguard(
obiguard_api_key="OBIGUARD_API_KEY", # Your Obiguard API key
virtual_key="VERTEX_VIRTUAL_KEY", # Add your Vertex virtual key
vertex_storage_bucket_name="your_bucket_name", # Specify the GCS bucket name
provider_file_name="your_file_name.jsonl", # Specify the file name in GCS
provider_model="gemini-1.5-flash-001" # Specify the model to use
)
upload_file_response = client.files.create(
purpose="fine-tune", # Can be "fine-tune" or "batch"
file=open("dataset.jsonl", "rb")
)
print(upload_file_response)
Copy
from openai import OpenAI
from obiguard import OBIGUARD_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=OBIGUARD_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VERTEX_VIRTUAL_KEY",
obiguard_api_key="sk-obg******", # Your Obiguard API key
vertex_storage_bucket_name="your_bucket_name",
provider_file_name="your_file_name.jsonl",
provider_model="gemini-1.5-flash-001"
)
)
upload_file_response = openai.files.create(
purpose="fine-tune", # Can be "fine-tune" or "batch"
file=open("dataset.jsonl", "rb")
)
print(upload_file_response)
Get File
Copy
from obiguard import Obiguard
client = Obiguard(
obiguard_api_key="OBIGUARD_API_KEY", # Your Obiguard API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
file = client.files.retrieve(file_id="file_id")
print(file)
Copy
from obiguard import Obiguard
client = Obiguard(
obiguard_api_key="OBIGUARD_API_KEY", # Your Obiguard API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
file = client.files.retrieve(file_id="file_id")
print(file)
Copy
from openai import OpenAI
from obiguard import OBIGUARD_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=OBIGUARD_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VERTEX_VIRTUAL_KEY",
obiguard_api_key="sk-obg******", # Your Obiguard API key
)
)
file = openai.files.retrieve(file_id="file_id")
print(file)
Get File Content
Copy
from obiguard import Obiguard
client = Obiguard(
obiguard_api_key="OBIGUARD_API_KEY", # Your Obiguard API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
file_content = client.files.content(file_id="file_id")
print(file_content)
Copy
from obiguard import Obiguard
client = Obiguard(
obiguard_api_key="OBIGUARD_API_KEY", # Your Obiguard API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
file_content = client.files.content(file_id="file_id")
print(file_content)
Copy
from openai import OpenAI
from obiguard import OBIGUARD_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=OBIGUARD_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VERTEX_VIRTUAL_KEY",
obiguard_api_key="sk-obg******", # Your Obiguard API key
)
)
file_content = openai.files.content(file_id="file_id")
print(file_content)
Note: The ListFiles
endpoint is not supported for Vertex AI.
On this page
Assistant
Responses are generated using AI and may contain mistakes.