Compressing images and saving these to Google cloud storage buckets is a common activity for web applications.
In this tutorial, we will use Python
and
google client libraries
to for these tasks.
Pre-requisite for executing below code is to have a service account with Storage Admin
role,
refer How to create service account
in GCP to create service account and to download the json key. This tutorial assumes that you have bucket
created in GCP and bucket contains some images to be compressed.
Cloud storage client library
and Python Pillow
module
pip install google-cloud-storage
pip install pillow
from google.cloud import storage
import os
from PIL import Image
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = r"C:\Users\****\Desktop\key.json"
generator
object for images in [my-bucket]
storage_client = storage.Client()
my_bucket_name = "[my-bucket]" #Replace this with actual bucket name
bucket = storage_client.bucket(my_bucket_name)
object_generator = bucket.list_blobs()
for i in object_generator:
with open(i.name, 'wb') as file:
i.download_to_file(file)
Here we are giving two parameters optimize=True
, quality=70
in picture.save
method, that will compress the image according to the quality parameter.
for i in object_genertor:
with open(i.name, 'wb') as file:
i.download_to_file(file)
pic = Image.open(i.name)
pic.save("compressed_" + i.name, "JPEG", optimize=True, quality=70)
for i in object_generator:
with open(i.name, 'wb') as file:
i.download_to_file(file)
pic = Image.open(i.name)
pic.save("compressed_" + i.name, "JPEG", optimize=True, quality=70)
orig_size = os.stat(os.path.join(os.getcwd(),i.name)).st_size
print("Original size of " + i.name +" is " + str(orig_size))
compressed_size = os.stat(os.path.join(os.getcwd(),"compressed_"+i.name)).st_size
print("Size after compression for " + i.name +" is " + str(compressed_size))
for i in object_generator:
with open(i.name, 'wb') as file:
i.download_to_file(file)
pic = Image.open(i.name)
pic.save("compressed_" + i.name, "JPEG", optimize=True, quality=70)
orig_size = os.stat(os.path.join(os.getcwd(),i.name)).st_size
print("Original size of " + i.name +" is " + str(orig_size))
compressed_size = os.stat(os.path.join(os.getcwd(),"compressed_"+i.name)).st_size
print("Size after compression for " + i.name +" is " + str(compressed_size))
blob = bucket.blob("compressed_"+i.name)
blob.upload_from_filename("compressed_"+i.name)
from google.cloud import storage
import os
from PIL import Image
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = r"C:\Users\*****\Desktop\key.json"
storage_client = storage.Client()
my_bucket_name = "[my-bucket]" #Replace with actual bucket name
bucket = storage_client.bucket(my_bucket_name)
object_generator = bucket.list_blobs()
for i in object_generator:
with open(i.name, 'wb') as file:
i.download_to_file(file)
pic = Image.open(i.name)
pic.save("compressed_" + i.name, "JPEG", optimize=True, quality=70)
orig_size = os.stat(os.path.join(os.getcwd(),i.name)).st_size
print("Original size of " + i.name +" is " + str(orig_size))
compressed_size = os.stat(os.path.join(os.getcwd(),"compressed_"+i.name)).st_size
print("Size after compression for " + i.name +" is " + str(compressed_size))
blob = bucket.blob("compressed_"+i.name)
blob.upload_from_filename("compressed_"+i.name)
Similar Articles