Skip to content

Cloud Asset Discovery

Discovering and enumerating cloud-hosted infrastructure and resources.

Overview

Cloud platforms commonly targeted:

  • Amazon Web Services (AWS)
  • Microsoft Azure
  • Google Cloud Platform (GCP)
  • DigitalOcean
  • Oracle Cloud
  • Alibaba Cloud

AWS Enumeration

S3 Bucket Discovery

# Common naming patterns
http://COMPANY-backup.s3.amazonaws.com
http://COMPANY-prod.s3.amazonaws.com
http://COMPANY-data.s3.amazonaws.com
http://COMPANY-assets.s3.amazonaws.com
http://COMPANY-images.s3.amazonaws.com
http://COMPANY-logs.s3.amazonaws.com

# Region-specific URLs
http://BUCKET.s3-eu-west-1.amazonaws.com
http://BUCKET.s3-us-east-1.amazonaws.com

# Virtual hosted-style
http://BUCKET.s3.amazonaws.com
https://BUCKET.s3.amazonaws.com

# Path-style (deprecated but still works)
http://s3.amazonaws.com/BUCKET

S3 Bucket Enumeration Tools

AWSBucketDump

# Clone and install
git clone https://github.com/jordanpotti/AWSBucketDump.git
cd AWSBucketDump
pip install -r requirements.txt

# Search for buckets containing keyword
python AWSBucketDump.py -D -l keyword -g interesting_keywords.txt

# Download interesting files
python AWSBucketDump.py -D -l company -g interesting_keywords.txt

S3Scanner

# Install
pip install s3scanner

# Scan from list
s3scanner --buckets buckets.txt

# Scan and dump
s3scanner --buckets buckets.txt --dump

# Check specific bucket
s3scanner --bucket company-backup

Bucket Stream

# Monitor certificate transparency logs for S3 buckets
git clone https://github.com/eth0izzle/bucket-stream.git
cd bucket-stream
pip install -r requirements.txt

# Run
python bucket-stream.py

# With specific keywords
python bucket-stream.py --keywords keywords.txt

S3 Bucket Permissions Testing

# Using AWS CLI
aws s3 ls s3://bucket-name --no-sign-request

# List contents
aws s3 ls s3://bucket-name --recursive --no-sign-request

# Download file
aws s3 cp s3://bucket-name/file.txt . --no-sign-request

# Upload test (if writable)
echo "test" > test.txt
aws s3 cp test.txt s3://bucket-name/ --no-sign-request

# Using curl
curl http://bucket-name.s3.amazonaws.com
curl http://s3.amazonaws.com/bucket-name

# Check ACL
aws s3api get-bucket-acl --bucket bucket-name --no-sign-request

CloudFront Distribution Discovery

# Identify CloudFront
curl -I https://target.com | grep -i cloudfront
dig target.com | grep cloudfront

# CloudFront domain patterns
https://RANDOM.cloudfront.net

AWS Service Discovery

# Elastic Load Balancers
COMPANY-prod-lb.us-east-1.elb.amazonaws.com
COMPANY-prod.elb.amazonaws.com

# API Gateway
https://RANDOM.execute-api.REGION.amazonaws.com

# Lambda functions (via API Gateway)
https://RANDOM.execute-api.REGION.amazonaws.com/prod/function-name

# Elasticsearch
https://COMPANY-es.REGION.es.amazonaws.com

# RDS
COMPANY-prod.RANDOM.REGION.rds.amazonaws.com

Azure Enumeration

Storage Account Discovery

# Storage blob patterns
https://COMPANY.blob.core.windows.net
https://COMPANYstorage.blob.core.windows.net
https://COMPANY-prod.blob.core.windows.net

# Common container names
https://COMPANY.blob.core.windows.net/images
https://COMPANY.blob.core.windows.net/backup
https://COMPANY.blob.core.windows.net/data
https://COMPANY.blob.core.windows.net/files

MicroBurst - Azure Security Assessment

# Install
git clone https://github.com/NetSPI/MicroBurst.git
cd MicroBurst
Import-Module .\MicroBurst.psm1

# Enumerate storage accounts
Invoke-EnumerateAzureBlobs -Base company

# Enumerate subdomains
Invoke-EnumerateAzureSubDomains -Base company

Azure Services Discovery

# Azure Web Apps
https://COMPANY.azurewebsites.net
https://COMPANY-prod.azurewebsites.net

# Azure Functions
https://COMPANY.azurewebsites.net/api/function-name

# Azure Databases
COMPANY.database.windows.net
COMPANY-sql.database.windows.net

# Azure CDN
https://COMPANY.azureedge.net

# Azure API Management
https://COMPANY.azure-api.net

# Azure Key Vault
https://COMPANY.vault.azure.net

Azure Blob Enumeration

# Using az CLI
az storage blob list --account-name COMPANY --container-name CONTAINER

# Anonymous access test
curl https://COMPANY.blob.core.windows.net/CONTAINER?restype=container&comp=list

# List containers
curl https://COMPANY.blob.core.windows.net/?comp=list

Google Cloud Platform (GCP)

GCP Storage Bucket Discovery

# Storage bucket patterns
https://storage.googleapis.com/COMPANY-backup
https://COMPANY.storage.googleapis.com

# Common bucket names
COMPANY-prod
COMPANY-backup
COMPANY-data
COMPANY-assets
COMPANY-logs

GCPBucketBrute

# Install
git clone https://github.com/RhinoSecurityLabs/GCPBucketBrute.git
cd GCPBucketBrute
pip3 install -r requirements.txt

# Enumerate buckets
python3 gcpbucketbrute.py -k keyword

# With wordlist
python3 gcpbucketbrute.py -w wordlist.txt

GCP Bucket Permissions

# List bucket contents
gsutil ls gs://BUCKET-NAME

# Anonymous access
curl https://storage.googleapis.com/BUCKET-NAME

# Download file
gsutil cp gs://BUCKET-NAME/file.txt .

# Test write access
gsutil cp test.txt gs://BUCKET-NAME/

GCP Services Discovery

# App Engine
https://COMPANY.appspot.com
https://PROJECT-ID.REGION.r.appspot.com

# Cloud Functions
https://REGION-PROJECT-ID.cloudfunctions.net/function-name

# Cloud Run
https://SERVICE-NAME-RANDOM.REGION.run.app

# Firebase
https://COMPANY.firebaseapp.com
https://COMPANY.firebaseio.com

Multi-Cloud Discovery

CloudBrute

# Install
git clone https://github.com/0xsha/CloudBrute.git
cd CloudBrute
go build

# Scan multiple cloud providers
./CloudBrute -d target.com -k keyword -w medium

# Specify providers
./CloudBrute -d target.com -k keyword -w medium -cloud aws,azure,gcp

cloud_enum

# Install
git clone https://github.com/initstring/cloud_enum.git
cd cloud_enum
pip3 install -r requirements.txt

# Enumerate all cloud providers
python3 cloud_enum.py -k company

# Specific keyword file
python3 cloud_enum.py -kf keywords.txt

# Specific provider
python3 cloud_enum.py -k company --aws
python3 cloud_enum.py -k company --azure
python3 cloud_enum.py -k company --gcp

CloudScraper

# Install
git clone https://github.com/jordanpotti/CloudScraper.git
cd CloudScraper
pip install -r requirements.txt

# Scrape cloud resources
python CloudScraper.py company

Subdomain Takeover

Vulnerable Cloud Services

# Azure
CNAME points to: *.azurewebsites.net (but site doesn't exist)
CNAME points to: *.cloudapp.net (but VM deleted)

# AWS
CNAME points to: *.s3.amazonaws.com (but bucket doesn't exist)
CNAME points to: *.elb.amazonaws.com (but ELB deleted)

# GCP
CNAME points to: *.appspot.com (but app doesn't exist)

# Check for takeover
subjack -w subdomains.txt -t 100 -timeout 30 -o results.txt -ssl

# Can-I-Take-Over-XYZ
# Reference: https://github.com/EdOverflow/can-i-take-over-xyz

SubOver

# Install
go install github.com/Ice3man543/SubOver@latest

# Check subdomains
SubOver -l subdomains.txt

# With threads
SubOver -l subdomains.txt -t 20

IP Range Discovery

AWS IP Ranges

# Download official AWS IP ranges
curl https://ip-ranges.amazonaws.com/ip-ranges.json | jq -r '.prefixes[] | select(.region=="us-east-1") | .ip_prefix'

# Filter by service
curl https://ip-ranges.amazonaws.com/ip-ranges.json | jq -r '.prefixes[] | select(.service=="S3") | .ip_prefix'

# All EC2 IPs
curl https://ip-ranges.amazonaws.com/ip-ranges.json | jq -r '.prefixes[] | select(.service=="EC2") | .ip_prefix'

Azure IP Ranges

# Download from Microsoft
# https://www.microsoft.com/en-us/download/details.aspx?id=56519

# Parse JSON
jq -r '.values[].properties.addressPrefixes[]' ServiceTags_Public.json

GCP IP Ranges

# GCP publishes via SPF
dig txt _cloud-netblocks.googleusercontent.com
nslookup -type=TXT _cloud-netblocks.googleusercontent.com

# Parse results
dig txt _cloud-netblocks.googleusercontent.com | grep include | grep -oP 'include:_cloud-netblocks\d+\.googleusercontent\.com'

Container Registries

Docker Hub

# Public repositories
https://hub.docker.com/r/COMPANY/

# API
curl https://hub.docker.com/v2/repositories/COMPANY/

# List tags
curl https://hub.docker.com/v2/repositories/COMPANY/REPO/tags/

AWS ECR

# List repositories (requires credentials)
aws ecr describe-repositories

# List images
aws ecr list-images --repository-name REPO-NAME

# Public ECR
https://gallery.ecr.aws/COMPANY/

Azure Container Registry

# Registry pattern
https://COMPANY.azurecr.io

# List repositories (requires auth)
az acr repository list --name COMPANY

# Anonymous access test
curl https://COMPANY.azurecr.io/v2/_catalog

Google Container Registry

# GCR patterns
https://gcr.io/PROJECT-ID/
https://eu.gcr.io/PROJECT-ID/
https://us.gcr.io/PROJECT-ID/
https://asia.gcr.io/PROJECT-ID/

# List images (requires auth)
gcloud container images list --repository=gcr.io/PROJECT-ID

# Anonymous test
curl https://gcr.io/v2/PROJECT-ID/REPO/tags/list

SaaS Discovery

Common Patterns

# Slack
COMPANY.slack.com

# Jira
COMPANY.atlassian.net

# Confluence
COMPANY.atlassian.net/wiki

# GitHub
github.com/COMPANY

# GitLab
gitlab.com/COMPANY

# Bitbucket
bitbucket.org/COMPANY

# Salesforce
COMPANY.my.salesforce.com

# Zendesk
COMPANY.zendesk.com

# Intercom
COMPANY.intercom.com

# HubSpot
COMPANY.hubspot.com

Automated Cloud Reconnaissance

Comprehensive Script

#!/bin/bash
COMPANY="target"
OUTPUT="cloud_recon"

mkdir -p $OUTPUT

echo "[+] AWS S3 Buckets"
cat > buckets.txt <<EOF
$COMPANY
$COMPANY-prod
$COMPANY-dev
$COMPANY-backup
$COMPANY-data
$COMPANY-assets
$COMPANY-logs
$COMPANY-storage
EOF

s3scanner --buckets buckets.txt > $OUTPUT/s3_results.txt

echo "[+] Azure Storage"
cat > azure_names.txt <<EOF
$COMPANY
${COMPANY}storage
${COMPANY}prod
${COMPANY}backup
EOF

for name in $(cat azure_names.txt); do
  curl -I https://${name}.blob.core.windows.net 2>/dev/null | grep -q "200 OK" && echo "[+] Found: ${name}"
done > $OUTPUT/azure_results.txt

echo "[+] GCP Buckets"
for name in $(cat buckets.txt); do
  curl -I https://storage.googleapis.com/${name} 2>/dev/null | grep -q "200 OK" && echo "[+] Found: ${name}"
done > $OUTPUT/gcp_results.txt

echo "[+] Cloud Services"
cloud_enum.py -k $COMPANY > $OUTPUT/cloud_enum.txt

echo "[+] Results in $OUTPUT/"

Quick Reference

# AWS S3 bucket check
aws s3 ls s3://company-backup --no-sign-request

# Azure blob check
curl https://company.blob.core.windows.net/backup?restype=container&comp=list

# GCP bucket check
gsutil ls gs://company-backup

# Multi-cloud enumeration
python3 cloud_enum.py -k company

# Subdomain takeover check
subjack -w subdomains.txt -t 100 -o results.txt