-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathnmap_scan.py
More file actions
142 lines (109 loc) · 4.78 KB
/
nmap_scan.py
File metadata and controls
142 lines (109 loc) · 4.78 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
import xml.etree.ElementTree as ET
import requests
import argparse
import pandas as pd
from datetime import datetime
import urllib3
import socket
import os
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Send data to Splunk via CURL
def send_to_splunk(durr, col_headers):
this_hostname = (socket.gethostbyaddr(socket.gethostname())[0])
this_IP = (socket.gethostbyaddr(socket.gethostname())[2])
sourcetype = 'external_scans'
source = "nmap_scanV1"
data = '{"sourcetype": "'+sourcetype+'","source": "'+source+'","host":"'+this_hostname+'","event":"'
x = 0
for i in col_headers:
try: data += col_headers[x]+'='+durr[x]
except: pass
if i != len(col_headers): data += ', '
x += 1
data += '"}'
# Splunk Event colector & source type. Change to your URL
url = 'https://http-inputs-XYZ.splunkcloud.com:443/services/collector'
headers = {'Authorization': 'Splunk 12345678-ABCD-4321-EFGH-1234567890XY'}
# Send Data using curl
response = requests.post(url=url, headers=headers, data=data, verify=False)
#print (data+"\n")
return 0
# function will just parse out xml format into a single line/txt file of online IPs only
def def_JustIps(curr_date, fn_phase, result = []):
# Open file for reading
root = (ET.parse("./"+fn_phase)).getroot()
# split it by each host
for host in root.findall('host'):
temp = []
# Ignore hosts that are not 'up'
if not host.findall('status')[0].attrib['state'] == 'up': continue
# Get IP address only
ip_address = host.findall('address')[0].attrib['addr']
temp.extend((ip_address,))
result.append(temp)
df = pd.DataFrame(result)
fn = fn_phase+".txt"
df.to_csv(fn, index=False, header=False)
return fn
# function to create a CSV of result scans
def create_csv(curr_date, fn_phase, result = []):
col_headers = ['IP', 'host_name', 'OS_name', 'protocol', 'port', 'service', 'product']
# Open file for reading
root = (ET.parse("./"+fn_phase)).getroot()
# split it by each host
for host in root.findall('host'):
temp = []
# Ignore hosts that are not 'up'
if not host.findall('status')[0].attrib['state'] == 'up': continue
# Get IP address and host info.
ip_address = host.findall('address')[0].attrib['addr']
# If no hostname, then ''
host_name_element = host.findall('hostnames')
try: host_name = host_name_element[0].findall('hostname')[0].attrib['name']
except IndexError: host_name = ''
# Get the OS information if available, else ''
try:
os_element = host.findall('os')
os_name = os_element[0].findall('osmatch')[0].attrib['name']
except IndexError: os_name = ''
# Get information on ports and services
try:
#loop through each port found
port_element = host.findall('ports')
ports = port_element[0].findall('port')
for port in ports:
port_data = [ip_address, host_name, os_name]
# Ignore ports that are not 'open', skip to the next one
if not port.findall('state')[0].attrib['state'] == 'open': continue
proto = port.attrib['protocol']
port_id = port.attrib['portid']
#step inside the service tag, assume the name attrib will always be there
service = port.findall('service')[0].attrib['name']
try: product = port.findall('service')[0].attrib['product']
except (IndexError, KeyError): product = ''
port_data.extend((proto, port_id, service, product))
send_to_splunk(port_data, col_headers)
result.append(port_data)
# If no port information, just create a list of host information
except IndexError:
temp.extend(ip_address, host_name, os_name)
send_to_splunk(temp, col_headers)
result.append(temp)
df = pd.DataFrame(result)
fn = fn_phase+".csv"
df.to_csv(fn, index=False, header=col_headers)
return 0
# main function to run the scans
def main():
IPs = '123.12.2.1-12'
curr_date = datetime.now().strftime("%m-%d-%Y")
fn_phase1 = "phase1-online_IPs_only_"+curr_date
fn_phase2 = "phase2-probe_"+curr_date
# just a ping/discovery scan, to see whats online. Then output it to a txt file.
os.system('nmap [PERAMS] '+fn_phase1+' '+IPs)
fn_IP_list = def_JustIps(curr_date,fn_phase1)
# next probe all IPs from list we got, scan all ports (1 – 65535) + service/host info
os.system('nmap [PERAMS] -iL '+fn_IP_list+' -oX '+fn_phase2+' '+IPs)
create_csv(curr_date,fn_phase2)
return 0
main()