|
| 1 | +#!/usr/bin/env python |
| 2 | +import csv |
| 3 | +import logging |
| 4 | +import sys |
| 5 | +from time import sleep |
| 6 | +from urllib.parse import urlparse, urljoin |
| 7 | + |
| 8 | +import requests |
| 9 | + |
| 10 | +token = "" |
| 11 | +base_url = "https://api.probely.com" |
| 12 | +pool_size = 25 |
| 13 | +target_list_url = urljoin(base_url, "targets/") |
| 14 | +target_detail_url = urljoin(base_url, "targets/{target_id}/") |
| 15 | +start_scan_url = urljoin(base_url, "targets/{target_id}/scan_now/") |
| 16 | +scan_detail_url = urljoin(base_url, "targets/{target_id}/scans/{scan_id}/") |
| 17 | +finding_list_url = urljoin(base_url, "targets/{target_id}/findings/") |
| 18 | +session = requests.Session() |
| 19 | +sleep_time = 5 * 60 # 5 minutes |
| 20 | +logging.basicConfig( |
| 21 | + level=logging.INFO, |
| 22 | + format="%(asctime)s [%(levelname)s] %(message)s", |
| 23 | + handlers=[ |
| 24 | + logging.StreamHandler(sys.stdout), |
| 25 | + ], |
| 26 | +) |
| 27 | + |
| 28 | + |
| 29 | +def create_target(url): |
| 30 | + hostname = urlparse(url).hostname |
| 31 | + logging.info("[%s] Creating target", hostname) |
| 32 | + target_payload = { |
| 33 | + "site": { |
| 34 | + "url": url, |
| 35 | + "name": hostname, |
| 36 | + }, |
| 37 | + "labels": [ |
| 38 | + {"name": "Test"}, |
| 39 | + ], |
| 40 | + } |
| 41 | + response = session.post(target_list_url, json=target_payload) |
| 42 | + response.raise_for_status() |
| 43 | + return response.json() |
| 44 | + |
| 45 | + |
| 46 | +def delete_target(target): |
| 47 | + logging.info("[%s] Deleting targets", target["site"]["name"]) |
| 48 | + response = session.delete(target_detail_url.format(target_id=target["id"])) |
| 49 | + response.raise_for_status() |
| 50 | + |
| 51 | + |
| 52 | +def start_scan(target): |
| 53 | + logging.info("[%s] Starting scan", target["site"]["name"]) |
| 54 | + response = session.post(start_scan_url.format(target_id=target["id"])) |
| 55 | + response.raise_for_status() |
| 56 | + return response.json() |
| 57 | + |
| 58 | + |
| 59 | +def get_scan(target, scan): |
| 60 | + logging.info("[%s] Retrieving scan", target["site"]["name"]) |
| 61 | + response = session.get( |
| 62 | + scan_detail_url.format(target_id=target["id"], scan_id=scan["id"]) |
| 63 | + ) |
| 64 | + response.raise_for_status() |
| 65 | + return response.json() |
| 66 | + |
| 67 | + |
| 68 | +def get_scan_findings(target, scan): |
| 69 | + page = 1 |
| 70 | + page_total = 1 |
| 71 | + findings = [] |
| 72 | + while page <= page_total: |
| 73 | + response = session.get( |
| 74 | + finding_list_url.format(target_id=target["id"]), |
| 75 | + params={"scan": scan["id"], "length": 10000, "page": 1}, |
| 76 | + ) |
| 77 | + response.raise_for_status() |
| 78 | + response = response.json() |
| 79 | + findings.extend(response["results"]) |
| 80 | + page_total = response["page_total"] |
| 81 | + page += 1 |
| 82 | + return findings |
| 83 | + |
| 84 | + |
| 85 | +def create_and_start_scan(target_url, running_scans): |
| 86 | + global pool_size |
| 87 | + logging.info("Starting %s", target_url) |
| 88 | + try: |
| 89 | + target = create_target(target_url) |
| 90 | + scan = start_scan(target) |
| 91 | + except requests.HTTPError as exc: |
| 92 | + if ( |
| 93 | + exc.response.json().get("non_field_errors", "") |
| 94 | + and "The target pool of your subscription has no available slots" |
| 95 | + in exc.response.json().get("non_field_errors", "")[0] |
| 96 | + ): |
| 97 | + logging.error( |
| 98 | + "Reached pool limit before filling queue! Reducing pool size." |
| 99 | + ) |
| 100 | + pool_size -= 1 |
| 101 | + if pool_size == 0: |
| 102 | + logging.error("Unable to create any more targets") |
| 103 | + sys.exit(1) |
| 104 | + else: |
| 105 | + logging.error(exc) |
| 106 | + if hasattr(exc, "response"): |
| 107 | + logging.error(exc.response.content) |
| 108 | + else: |
| 109 | + running_scans[target["id"]] = (target, scan) |
| 110 | + |
| 111 | + |
| 112 | +def save_and_delete(target, scan, output_file): |
| 113 | + with open(output_file, "at", newline="") as csv_file: |
| 114 | + csv_writer = csv.writer( |
| 115 | + csv_file, delimiter=",", quotechar='"', quoting=csv.QUOTE_ALL |
| 116 | + ) |
| 117 | + findings = get_scan_findings(target, scan) |
| 118 | + for finding in findings: |
| 119 | + csv_writer.writerow(to_csv(finding)) |
| 120 | + delete_target(target) |
| 121 | + |
| 122 | + |
| 123 | +def to_csv(result): |
| 124 | + labels = result["labels"] if result.get("labels") else [] |
| 125 | + labels_name = [label["name"] for label in labels] |
| 126 | + row = [ |
| 127 | + result["id"], |
| 128 | + result["severity"], |
| 129 | + result["definition"]["name"], |
| 130 | + result["url"], |
| 131 | + result["last_found"], |
| 132 | + result["state"], |
| 133 | + result.get("assignee")["email"] if result.get("assignee") else " ", |
| 134 | + *labels_name, |
| 135 | + ] |
| 136 | + return row |
| 137 | + |
| 138 | + |
| 139 | +def main(target_list, output_file, token): |
| 140 | + # Add token to all requests |
| 141 | + global pool_size |
| 142 | + session.headers.update({"Authorization": f"JWT {token}"}) |
| 143 | + |
| 144 | + running_scans = {} |
| 145 | + while True: |
| 146 | + # Create targets and start scans up to pool_size |
| 147 | + while len(running_scans) < pool_size: |
| 148 | + try: |
| 149 | + target_url = target_list.pop() # Remove from queue |
| 150 | + except IndexError: |
| 151 | + break |
| 152 | + else: |
| 153 | + create_and_start_scan(target_url, running_scans) |
| 154 | + logging.info("%s Running scans", len(running_scans)) |
| 155 | + |
| 156 | + sleep(sleep_time) # Wait before checking scans |
| 157 | + |
| 158 | + # Check running scans |
| 159 | + logging.info("Checking on %s running scans", len(running_scans)) |
| 160 | + finished_scans = [] |
| 161 | + for target, scan in running_scans.values(): |
| 162 | + scan = get_scan(target, scan) |
| 163 | + logging.info("[%s] Scan status %s", target["site"]["name"], scan["status"]) |
| 164 | + if scan["status"] == "queued": |
| 165 | + logging.info("[%s] Scan hasn't started yet", target["site"]["name"]) |
| 166 | + elif scan["status"] in ("cancelled", "failed"): |
| 167 | + logging.error( |
| 168 | + "[%s] Scan has unexpectedly stopped", target["site"]["name"] |
| 169 | + ) |
| 170 | + elif scan["status"] == "completed": |
| 171 | + logging.info("[%s] Scan has finished", target["site"]["name"]) |
| 172 | + save_and_delete(target, scan, output_file) |
| 173 | + finished_scans.append(target["id"]) |
| 174 | + logging.info( |
| 175 | + "[%s] Results stored and target deleted", target["site"]["name"] |
| 176 | + ) |
| 177 | + else: |
| 178 | + logging.info("[%s] Scan still running", target["site"]["name"]) |
| 179 | + |
| 180 | + # Remove from running scans |
| 181 | + for id_ in finished_scans: |
| 182 | + del running_scans[id_] |
| 183 | + |
| 184 | + if not target_list and not running_scans: |
| 185 | + sys.exit(0) |
| 186 | + |
| 187 | + |
| 188 | +if __name__ == "__main__": |
| 189 | + target_list = [ |
| 190 | + "https://example.org", |
| 191 | + ] |
| 192 | + output_file = "/tmp/out.csv" |
| 193 | + main(target_list, output_file, token) |
0 commit comments