Skip to content

Commit 1876ffc

Browse files
authored
Merge pull request #9 from suskind/targets_scheduled_scans
Add script to export targets with scheduled scans
2 parents 4995dbb + 8f928c0 commit 1876ffc

1 file changed

Lines changed: 50 additions & 0 deletions

File tree

targets_scheduled_scans_csv.py

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
#!/usr/bin/env python
2+
3+
import csv
4+
import requests
5+
from urllib.parse import urljoin
6+
7+
recurrence_map = {
8+
'': 'N/A',
9+
'd': 'daily',
10+
'w': 'weekly',
11+
'm': 'monthly',
12+
'q': 'quarterly'
13+
}
14+
15+
def main():
16+
token = input("API Token:")
17+
csv_filename = input("CSV path to filename (default: ./scheduled.csv):")
18+
if csv_filename == "":
19+
csv_filename = "./scheduled.csv"
20+
21+
headers = {"Authorization": "JWT {}".format(token)}
22+
23+
api_base_url = "https://api.probely.com"
24+
targets_endpoint = urljoin(
25+
api_base_url, "targets/?include=compliance&length=10000"
26+
)
27+
28+
response = requests.get(targets_endpoint, headers=headers)
29+
results = response.json()["results"]
30+
31+
with open(csv_filename, "w") as csv_file:
32+
csv_writer = csv.writer(
33+
csv_file, delimiter=",", quotechar='"', quoting=csv.QUOTE_ALL
34+
)
35+
csv_writer.writerow(["ID", "NAME", "URL", "NEXT SCAN DATE", "RECURRENCE"])
36+
for result in results:
37+
next_scan = result["next_scan"] if result.get("next_scan") else None
38+
if not next_scan:
39+
continue
40+
row = [
41+
result["id"],
42+
result["site"]["name"],
43+
result["site"]["url"],
44+
next_scan.get("date_time", ""),
45+
recurrence_map[next_scan.get("recurrence", "")]
46+
]
47+
csv_writer.writerow(row)
48+
49+
if __name__ == '__main__':
50+
main()

0 commit comments

Comments
 (0)