Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if not os.path.exists(fname):
utils.LOGGER.error("File %r does not exist", fname)
continue
with BroFile(fname) as brof:
bulk = db.flow.start_bulk_insert()
utils.LOGGER.debug("Parsing %s\n\t%s", fname,
"Fields:\n%s\n" % "\n".join(
"%s: %s" % (f, t)
for f, t in brof.field_types
))
if brof.path in FUNCTIONS:
func = FUNCTIONS[brof.path]
elif brof.path in flow.META_DESC:
func = any2flow(brof.path)
else:
utils.LOGGER.debug("Log format not (yet) supported for %r",
fname)
continue
for line in brof:
if not line:
continue
func(bulk, _bro2flow(line))
db.flow.bulk_commit(bulk)
if brof.path == "conn" and not args.no_cleanup:
db.flow.cleanup_flows()
"aggs": {"patterns": base},
}},
}},
}
elif field.startswith('s7.'):
flt = self.flt_and(flt, self.searchscript(name="s7-info"))
subfield = field[3:]
field = {'field': 'ports.scripts.s7-info.' + subfield}
else:
field = {"field": field}
body = {"query": flt.to_dict()}
if nested is None:
body["aggs"] = {"patterns": {"terms": dict(baseterms, **field)}}
else:
body["aggs"] = {"patterns": nested}
utils.LOGGER.debug("DB: Elasticsearch aggregation: %r", body)
result = self.db_client.search(
body=body,
index=self.indexes[0],
ignore_unavailable=True,
size=0
)
result = result["aggregations"]
while 'patterns' in result:
result = result['patterns']
result = result['buckets']
if outputproc is None:
for res in result:
yield {'_id': res['key'], 'count': res['doc_count']}
else:
for res in result:
yield {'_id': outputproc(res['key']),
try:
port['state_reason_ip'] = utils.force_int2ip(
port['state_reason_ip']
)
except ValueError:
pass
for script in port.get('scripts', []):
if script['id'] == 'ssl-cert':
if 'pem' in script['ssl-cert']:
data = ''.join(
script['ssl-cert']['pem'].splitlines()[1:-1]
).encode()
try:
newout, newinfo = xmlnmap.create_ssl_cert(data)
except Exception:
utils.LOGGER.warning('Cannot parse certificate %r',
data,
exc_info=True)
else:
script['output'] = '\n'.join(newout)
script['ssl-cert'] = newinfo
continue
try:
pubkeytype = {
'rsaEncryption': 'rsa',
'id-ecPublicKey': 'ec',
'id-dsa': 'dsa',
'dhpublicnumber': 'dh',
}[script['ssl-cert'].pop('pubkeyalgo')]
except KeyError:
pass
else:
def masscan_parse_s7info(data):
fulldata = data
output_data = {}
output_text = [""]
state = 0
service_info = {
'service_name': 'iso-tsap',
'service_devicetype': 'specialized',
}
while data:
if data[:1] != b"\x03":
utils.LOGGER.warning(
"Masscan s7-info: invalid data [%r]",
data
)
return None
length = struct.unpack(">H", data[2:4])[0]
curdata, data = data[4:length], data[length:]
if len(curdata) < length - 4:
utils.LOGGER.warning(
"Masscan s7-info: record too short [%r] length %d, should be "
"%d", curdata, len(curdata), length - 4
)
datatype = curdata[1:2]
if state == 0: # Connect Confirm
if datatype == b"\xd0": # OK
state += 1
continue
def shutdown(signum, _):
"""Sets the global variable `WANTDOWN` to `True` to stop
everything after the current files have been processed.
"""
global WANTDOWN
utils.LOGGER.info('SHUTDOWN: got signal %d, will halt after current file.',
signum)
WANTDOWN = True
nmap_record_to_view(x)
)
else:
callback = None
for fileelt in files:
with tempfile.NamedTemporaryFile(delete=False) as fdesc:
fileelt.save(fdesc)
try:
if db.nmap.store_scan(fdesc.name, categories=categories,
source=source, callback=callback):
count += 1
os.unlink(fdesc.name)
else:
utils.LOGGER.warning("Could not import %s", fdesc.name)
except Exception:
utils.LOGGER.warning("Could not import %s", fdesc.name,
exc_info=True)
return count
db.nmap.searchsource(source))
)
count = 0
for fileelt in files:
if fileelt.startswith('\x1f\x8b'):
fileelt = zlib.decompress(fileelt, 16+zlib.MAX_WBITS)
elif fileelt.startswith('BZ'):
fileelt = bz2.decompress(fileelt)
with tempfile.NamedTemporaryFile(delete=False) as fdesc:
fdesc.write(fileelt)
if db.nmap.store_scan(fdesc.name, categories=list(categories),
source=source, gettoarchive=gettoarchive):
count += 1
fdesc.unlink(fdesc.name)
else:
utils.LOGGER.warning("Could not import %s" % fdesc.name)
return count
def migrate_schema(self, colname, version):
"""Process to schema migrations in column `colname` starting
from `version`.
"""
failed = 0
while version in self.schema_migrations[colname]:
updated = False
new_version, migration_function = self.schema_migrations[
colname][version]
utils.LOGGER.info(
"Migrating column %s from version %r to %r",
colname, version, new_version,
)
# unlimited find()!
for record in self.find(colname, self.searchversion(version)):
try:
update = migration_function(record)
except Exception:
utils.LOGGER.warning(
"Cannot migrate host %s", record['_id'], exc_info=True,
)
failed += 1
else:
if update is not None:
updated = True
self.db[colname].update({"_id": record["_id"]}, update)
def _getinfos_cert(spec):
"""Extract info from a certificate (hash values, issuer, subject,
algorithm) in an handy-to-index-and-query form.
"""
# TODO: move to mongodb specific functions.
try:
cert = utils.decode_b64(spec['value'].encode())
except Exception:
utils.LOGGER.info("Cannot parse certificate for record %r", spec,
exc_info=True)
return {}
info = utils.get_cert_info(cert)
res = {}
if info:
res['infos'] = info
return res
.select_from(join(join(self.tables.scan, self.tables.port),
self.tables.script))
.where(and_(
self.tables.scan.schema_version == 10,
self.tables.script.name == "ssl-cert",
)))
for rec in self.db.execute(req):
if 'ssl-cert' in rec.data:
if 'pem' in rec.data['ssl-cert']:
data = ''.join(
rec.data['ssl-cert']['pem'].splitlines()[1:-1]
).encode()
try:
newout, newinfo = xmlnmap.create_ssl_cert(data)
except Exception:
utils.LOGGER.warning('Cannot parse certificate %r',
data,
exc_info=True)
else:
self.db.execute(
update(self.tables.script)
.where(and_(self.tables.script.port == rec.port,
self.tables.script.name == "ssl-cert"))
.values(data={"ssl-cert": newinfo},
output='\n'.join(newout))
)
continue
try:
pubkeytype = {
'rsaEncryption': 'rsa',
'id-ecPublicKey': 'ec',
'id-dsa': 'dsa',