Skip to content

Instantly share code, notes, and snippets.

@alias454
Created August 23, 2018 17:49
Show Gist options
  • Select an option

  • Save alias454/92d437811d2da3047e1fc54bfcb1378f to your computer and use it in GitHub Desktop.

Select an option

Save alias454/92d437811d2da3047e1fc54bfcb1378f to your computer and use it in GitHub Desktop.
Get carbon black inventory using api
#!/usr/bin/env python3
import os
import sys
import json
import csv
import configparser
import datetime
import requests
import urllib
import urllib.request
from urllib.parse import urlparse
from operator import itemgetter
from collections import OrderedDict
# Create an ini file to store your api key in ~/.credentials/api.key
# [carbon_black]
# api_key = 7h232gj2-23m2-2763-32f2-23r2fnsj2f2f5
credentials_file = os.path.join(os.path.expanduser("~"), '.credentials/api.key')
api_baseurl = "https://api.confer.net"
now = datetime.datetime.now().strftime("%a %b %d %H:%M:%S %Y")
def get_api_key(creds_file,section,key):
"""
Return api.key from file section located in the .credentials folder
"""
try:
config = configparser.ConfigParser()
config.read(creds_file)
# print (config.sections())
api_key_value = config.get(section, key)
return api_key_value
except (configparser.NoSectionError, configparser.NoOptionError):
print ("Could not read from file {}.\n".format(creds_file))
sys.exit(1)
def build_url(baseurl, path, args_dict):
"""
Returns a list in the structure of urlparse.ParseResult
"""
url_parts = list(urlparse(baseurl))
url_parts[2] = path
url_parts[4] = urllib.parse.urlencode(args_dict)
return urllib.parse.urlunparse(url_parts)
def get_results(request_url, api_token=""):
"""
Returns json for requested report.
"""
# print(request_url)
request = requests.get(request_url, headers={"Content-Type": "application/json", "X-Auth-Token": api_token})
return request.json()
# Grab api key from file and return the value stored in section/key
api_key = get_api_key(credentials_file, 'carbon_black', 'api_key')
# Build url for request
request_args = {'start': '1', 'rows': '10000'}
url = build_url(api_baseurl, '/integrationServices/v3/device', request_args)
# Fetch the json for the report
data = get_results(url, api_key)
# Carve up the file and export the results
# Outputs multiple files
# Output a csv for Splunk consumption
header = ['Timestamp', 'Name', 'Status', 'PolicyGroup', 'DeviceID', 'OS', 'InternalIP', 'ExternalIP', 'LastLocation']
#output = csv.writer(sys.stdout)
outputFile = open('cbd_inventory_lookup.csv', 'w') # load csv file
output = csv.writer(outputFile) #create a csv.write
output.writerow(header) # header row
keys = ['timestamp', 'name', 'status', 'policyName', 'deviceId', 'osVersion', 'lastInternalIpAddress', 'lastExternalIpAddress', 'lastLocation']
for item in data['results']:
# Create timestamp field
item['timestamp'] = now
# Prep content for output to csv
out = OrderedDict(zip(keys, itemgetter(*keys)(item)))
output.writerow(list(out.values())) # values row
# print(out.values())
# Dump out full file since we can search it for items later
with open('cbd_device_data.json', 'w') as outfile:
json.dump(data, outfile)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment