Get hands-on experience with 20+ free Google Cloud products and $300 in free credit for new customers.

Get Inventory of all IPs address in all projects

Hi everyone.

Thanks for your time, my ask... I need to get an Inventory off all IP that use in all projects, included VMs, Load balancing, SQL Instancias and every component that use a IP, internal or external, in csv file.

In the asset inventory I get IPs in compute.address and in sql.instance, but in specific de roug of SQL show null in the IP, any suggestion to get all IPs?

Kind regards

Solved Solved
6 4 2,478
1 ACCEPTED SOLUTION

@elholligan ,

This code is just an example for single project. The output will looks like follow 

DamianS_0-1712729959973.png

main.py file

import csv
from googleapiclient import discovery
from google.auth import default

# Authenticate and create service objects for Compute Engine and SQL Admin API
credentials, _ = default()
compute_service = discovery.build('compute', 'v1', credentials=credentials)
sqladmin_service = discovery.build('sqladmin', 'v1', credentials=credentials)

# Function to list all VM instances across all zones in a project and fetch their IPs
def list_instances(project_id):
    instances = []
    request = compute_service.instances().aggregatedList(project=project_id)
    while request is not None:
        response = request.execute()
        for _, instances_scoped_list in response['items'].items():
            for instance in instances_scoped_list.get('instances', []):
                internal_ip = instance['networkInterfaces'][0]['networkIP']
                external_ip = 'N/A'
                access_configs = instance['networkInterfaces'][0].get('accessConfigs', [])
                if access_configs:
                    external_ip = access_configs[0].get('natIP', 'N/A')
                instances.append({
                    'Project': project_id,
                    'Name': instance['name'],
                    'Type': 'VM Instance',
                    'Internal IP': internal_ip,
                    'External IP': external_ip
                })
        request = compute_service.instances().aggregatedList_next(previous_request=request, previous_response=response)
    return instances

# Function to list all SQL instances in a project and fetch their IPs
def list_sql_instances(project_id):
    sql_instances = []
    request = sqladmin_service.instances().list(project=project_id)
    response = request.execute()
    for instance in response.get('items', []):
        internal_ip, external_ip = 'N/A', 'N/A'
        for ip in instance.get('ipAddresses', []):
            if ip['type'] == 'PRIMARY':
                internal_ip = ip['ipAddress']
            else:
                external_ip = ip['ipAddress']
        sql_instances.append({
            'Project': project_id,
            'Name': instance['name'],
            'Type': 'SQL Instance',
            'Internal IP': internal_ip,
            'External IP': external_ip
        })
    return sql_instances

# Functions for listing global and regional forwarding rules (Load Balancers)
def list_global_forwarding_rules(project_id):
    rules = []
    request = compute_service.globalForwardingRules().list(project=project_id)
    while request is not None:
        response = request.execute()
        for rule in response.get('items', []):
            rules.append({
                'Project': project_id,
                'Name': rule['name'],
                'Type': 'Global Load Balancer',
                'Internal IP': 'N/A',
                'External IP': rule.get('IPAddress', 'N/A')
            })
        request = compute_service.globalForwardingRules().list_next(previous_request=request, previous_response=response)
    return rules

def list_regional_forwarding_rules(project_id):
    rules = []
    regions_request = compute_service.regions().list(project=project_id)
    regions_response = regions_request.execute()
    for region in regions_response.get('items', []):
        region_name = region['name']
        request = compute_service.forwardingRules().list(project=project_id, region=region_name)
        response = request.execute()
        for rule in response.get('items', []):
            rules.append({
                'Project': project_id,
                'Name': rule['name'],
                'Type': 'Regional Load Balancer',
                'Internal IP': 'N/A',
                'External IP': rule.get('IPAddress', 'N/A')
            })
    return rules

# Main execution block
project_id = 'YOUR_PROJECT_ID'  # Replace YOUR_PROJECT_ID with your actual project ID
instances = list_instances(project_id)
sql_instances = list_sql_instances(project_id)
global_lb_rules = list_global_forwarding_rules(project_id)
regional_lb_rules = list_regional_forwarding_rules(project_id)

# Combine all resources and write to CSV
all_resources = instances + sql_instances + global_lb_rules + regional_lb_rules
csv_file = 'gcp_resources_ip.csv'
with open(csv_file, mode='w', newline='') as file:
    writer = csv.DictWriter(file, fieldnames=['Project', 'Name', 'Type', 'Internal IP', 'External IP'])
    writer.writeheader()
    for resource in all_resources:
        writer.writerow(resource)

print(f"Data written to {csv_file}")

requirements.txt 

google-api-python-client
google-auth

both files must be placed in the same location, ideally in directory. Script will generate file named gcp_resources_ip.csv. You can utilize any kind of AI like ChatGPT or Gemini to modify this for your needs.  

cheers,
DamianS

View solution in original post

4 REPLIES 4

Hello @elholligan ,

Welcome on the Google Cloud Community. Asset Inventory will not show IPs. You can utilize gcloud command with "--format=json" however, you will have to deal with json formatting which might be different for each resource type. What you could do is use Python script instead and refer to APIs. 

cheers,
DamianS

@elholligan ,

This code is just an example for single project. The output will looks like follow 

DamianS_0-1712729959973.png

main.py file

import csv
from googleapiclient import discovery
from google.auth import default

# Authenticate and create service objects for Compute Engine and SQL Admin API
credentials, _ = default()
compute_service = discovery.build('compute', 'v1', credentials=credentials)
sqladmin_service = discovery.build('sqladmin', 'v1', credentials=credentials)

# Function to list all VM instances across all zones in a project and fetch their IPs
def list_instances(project_id):
    instances = []
    request = compute_service.instances().aggregatedList(project=project_id)
    while request is not None:
        response = request.execute()
        for _, instances_scoped_list in response['items'].items():
            for instance in instances_scoped_list.get('instances', []):
                internal_ip = instance['networkInterfaces'][0]['networkIP']
                external_ip = 'N/A'
                access_configs = instance['networkInterfaces'][0].get('accessConfigs', [])
                if access_configs:
                    external_ip = access_configs[0].get('natIP', 'N/A')
                instances.append({
                    'Project': project_id,
                    'Name': instance['name'],
                    'Type': 'VM Instance',
                    'Internal IP': internal_ip,
                    'External IP': external_ip
                })
        request = compute_service.instances().aggregatedList_next(previous_request=request, previous_response=response)
    return instances

# Function to list all SQL instances in a project and fetch their IPs
def list_sql_instances(project_id):
    sql_instances = []
    request = sqladmin_service.instances().list(project=project_id)
    response = request.execute()
    for instance in response.get('items', []):
        internal_ip, external_ip = 'N/A', 'N/A'
        for ip in instance.get('ipAddresses', []):
            if ip['type'] == 'PRIMARY':
                internal_ip = ip['ipAddress']
            else:
                external_ip = ip['ipAddress']
        sql_instances.append({
            'Project': project_id,
            'Name': instance['name'],
            'Type': 'SQL Instance',
            'Internal IP': internal_ip,
            'External IP': external_ip
        })
    return sql_instances

# Functions for listing global and regional forwarding rules (Load Balancers)
def list_global_forwarding_rules(project_id):
    rules = []
    request = compute_service.globalForwardingRules().list(project=project_id)
    while request is not None:
        response = request.execute()
        for rule in response.get('items', []):
            rules.append({
                'Project': project_id,
                'Name': rule['name'],
                'Type': 'Global Load Balancer',
                'Internal IP': 'N/A',
                'External IP': rule.get('IPAddress', 'N/A')
            })
        request = compute_service.globalForwardingRules().list_next(previous_request=request, previous_response=response)
    return rules

def list_regional_forwarding_rules(project_id):
    rules = []
    regions_request = compute_service.regions().list(project=project_id)
    regions_response = regions_request.execute()
    for region in regions_response.get('items', []):
        region_name = region['name']
        request = compute_service.forwardingRules().list(project=project_id, region=region_name)
        response = request.execute()
        for rule in response.get('items', []):
            rules.append({
                'Project': project_id,
                'Name': rule['name'],
                'Type': 'Regional Load Balancer',
                'Internal IP': 'N/A',
                'External IP': rule.get('IPAddress', 'N/A')
            })
    return rules

# Main execution block
project_id = 'YOUR_PROJECT_ID'  # Replace YOUR_PROJECT_ID with your actual project ID
instances = list_instances(project_id)
sql_instances = list_sql_instances(project_id)
global_lb_rules = list_global_forwarding_rules(project_id)
regional_lb_rules = list_regional_forwarding_rules(project_id)

# Combine all resources and write to CSV
all_resources = instances + sql_instances + global_lb_rules + regional_lb_rules
csv_file = 'gcp_resources_ip.csv'
with open(csv_file, mode='w', newline='') as file:
    writer = csv.DictWriter(file, fieldnames=['Project', 'Name', 'Type', 'Internal IP', 'External IP'])
    writer.writeheader()
    for resource in all_resources:
        writer.writerow(resource)

print(f"Data written to {csv_file}")

requirements.txt 

google-api-python-client
google-auth

both files must be placed in the same location, ideally in directory. Script will generate file named gcp_resources_ip.csv. You can utilize any kind of AI like ChatGPT or Gemini to modify this for your needs.  

cheers,
DamianS

Hi @DamianS 

Thank you so much for your help, I will work with this script.

Kind regards.

Happy to help 🙂 

cheers,
DamianS