Last active
October 30, 2025 22:55
-
-
Save gryzinsky/941ffb419b6ab132066970710cd15cf8 to your computer and use it in GitHub Desktop.
Example Website Component Resource in Pulumi
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import hashlib | |
| import os | |
| from typing import Optional, TypedDict | |
| from pulumi import ComponentResource, ResourceOptions, Output | |
| import pulumi | |
| import pulumi_gcp as gcp | |
| import pulumi_command as command | |
| class WebsiteArgs(TypedDict): | |
| folder_path: str | |
| class Website(ComponentResource): | |
| _folder_path: str | |
| def __init__(self, name: str, args: WebsiteArgs, opts: Optional[ResourceOptions] = None): | |
| super().__init__('pkg:index:Website', name, None, opts) | |
| self._folder_path = args['folder_path'] or "./www" | |
| self._create_resources() | |
| self._deployment() | |
| self.register_outputs({ | |
| "origin_url": self.origin_url, | |
| "origin_hostname": self.origin_hostname, | |
| "cdn_url": self.cdn_url, | |
| "cdn_hostname": self.cdn_hostname, | |
| }) | |
| def _create_resources(self) -> None: | |
| self.bucket = gcp.storage.Bucket( | |
| "bucket", | |
| location="US", | |
| website={ | |
| "main_page_suffix": "index.html", | |
| "not_found_page": "error.html", | |
| }, | |
| uniform_bucket_level_access=True, | |
| force_destroy=True, | |
| ) | |
| self.bucket_iam_binding = gcp.storage.BucketIAMBinding( | |
| "bucket-iam-binding", | |
| bucket=self.bucket.name, | |
| role="roles/storage.objectViewer", | |
| members=["allUsers"], | |
| ) | |
| self.backend_bucket = gcp.compute.BackendBucket( | |
| "backend-bucket", bucket_name=self.bucket.name, enable_cdn=True | |
| ) | |
| self.ip = gcp.compute.GlobalAddress("ip") | |
| # Create a URLMap to route requests to the storage bucket. | |
| self.url_map = gcp.compute.URLMap("url-map", default_service=self.backend_bucket.self_link) | |
| # Create an HTTP proxy to route requests to the URLMap. | |
| self.http_proxy = gcp.compute.TargetHttpProxy("http-proxy", url_map=self.url_map.self_link) | |
| self.http_forwarding_rule = gcp.compute.GlobalForwardingRule( | |
| "http-forwarding-rule", | |
| ip_address=self.ip.address, | |
| ip_protocol="TCP", | |
| port_range="80", | |
| target=self.http_proxy.self_link, | |
| ) | |
| self.origin_url = self.bucket.name.apply(lambda name: f"https://storage.googleapis.com/{name}/index.html") | |
| self.origin_hostname = self.bucket.name.apply(lambda name: f"storage.googleapis.com/{name}") | |
| self.cdn_url = self.ip.address.apply(lambda ip: f"http://{ip}") | |
| self.cdn_hostname = self.ip.address | |
| def _deployment(self) -> None: | |
| folder_hash = self._calculate_folder_hash(self._folder_path) | |
| self.force_sync = command.local.Command( | |
| "force-sync", | |
| create=self.bucket.name.apply(lambda bucket_name: f"gsutil -m rsync -r -d {self._folder_path} gs://{bucket_name}/"), | |
| update=self.bucket.name.apply(lambda bucket_name: f"gsutil -m rsync -r -d {self._folder_path} gs://{bucket_name}/"), | |
| triggers=[folder_hash], | |
| opts=pulumi.ResourceOptions(depends_on=[self.bucket]) | |
| ) | |
| self.invalidate_cache = command.local.Command( | |
| "invalidate-cache", | |
| create=pulumi.Output.all(self.url_map.name, gcp.config.project).apply( | |
| lambda args: f"gcloud compute url-maps invalidate-cdn-cache {args[0]} --path='/*' --project={args[1]}" | |
| ), | |
| update=pulumi.Output.all(self.url_map.name, gcp.config.project).apply( | |
| lambda args: f"gcloud compute url-maps invalidate-cdn-cache {args[0]} --path='/*' --project={args[1]}" | |
| ), | |
| triggers=[folder_hash], | |
| opts=pulumi.ResourceOptions(depends_on=[self.url_map, self.force_sync]) | |
| ) | |
| def _calculate_folder_hash(self, folder_path: str) -> str: | |
| """Calcula hash de todos arquivos no folder""" | |
| hasher = hashlib.sha256() | |
| for root, _, files in os.walk(folder_path): | |
| for file in sorted(files): | |
| file_path = os.path.join(root, file) | |
| with open(file_path, 'rb') as f: | |
| hasher.update(f.read()) | |
| digest = hasher.hexdigest() | |
| print(f"Folder hash for '{folder_path}': {digest}") | |
| return digest |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment