Skip to content

Commit b895f72

Browse files
committed
Improve Netflow padding detection
1 parent 37afb3f commit b895f72

File tree

2 files changed

+103
-61
lines changed

2 files changed

+103
-61
lines changed

scapy/layers/netflow.py

Lines changed: 89 additions & 61 deletions
Original file line numberDiff line numberDiff line change
@@ -35,12 +35,31 @@
3535
from scapy.config import conf
3636
from scapy.data import IP_PROTOS
3737
from scapy.error import warning, Scapy_Exception
38-
from scapy.fields import ByteEnumField, ByteField, Field, FieldLenField, \
39-
FlagsField, IPField, IntField, MACField, \
40-
PacketListField, PadField, SecondsIntField, ShortEnumField, ShortField, \
41-
StrField, StrFixedLenField, ThreeBytesField, UTCTimeField, XByteField, \
42-
XShortField, LongField, BitField, ConditionalField, BitEnumField, \
43-
StrLenField
38+
from scapy.fields import (
39+
BitEnumField,
40+
BitField,
41+
ByteEnumField,
42+
ByteField,
43+
ConditionalField,
44+
Field,
45+
FieldLenField,
46+
FlagsField,
47+
IPField,
48+
IntField,
49+
LongField,
50+
MACField,
51+
PacketListField,
52+
SecondsIntField,
53+
ShortEnumField,
54+
ShortField,
55+
StrField,
56+
StrFixedLenField,
57+
StrLenField,
58+
ThreeBytesField,
59+
UTCTimeField,
60+
XByteField,
61+
XShortField,
62+
)
4463
from scapy.packet import Packet, bind_layers, bind_bottom_up
4564
from scapy.plist import PacketList
4665
from scapy.sessions import IPSession, DefaultSession
@@ -1388,14 +1407,11 @@ def default_payload_class(self, p):
13881407
class NetflowDataflowsetV9(Packet):
13891408
name = "Netflow DataFlowSet V9/10"
13901409
fields_desc = [ShortField("templateID", 255),
1391-
FieldLenField("length", None, length_of="records",
1392-
adjust=lambda pkt, x: x + 4 + (-x % 4)),
1393-
PadField(
1394-
PacketListField(
1395-
"records", [],
1396-
NetflowRecordV9,
1397-
length_from=lambda pkt: pkt.length - 4
1398-
), 4, padwith=b"\x00")]
1410+
ShortField("length", None),
1411+
PacketListField(
1412+
"records", [],
1413+
NetflowRecordV9,
1414+
length_from=lambda pkt: pkt.length - 4)]
13991415

14001416
@classmethod
14011417
def dispatch_hook(cls, _pkt=None, *args, **kargs):
@@ -1413,6 +1429,15 @@ def dispatch_hook(cls, _pkt=None, *args, **kargs):
14131429
return NetflowOptionsFlowset10
14141430
return cls
14151431

1432+
def post_build(self, pkt, pay):
1433+
if self.length is None:
1434+
# Padding is optional, let's apply it on build
1435+
length = len(pkt)
1436+
pad = (-length) % 4
1437+
pkt = pkt[:2] + struct.pack("!H", length + pad) + pkt[4:]
1438+
pkt += b"\x00" * pad
1439+
return pkt + pay
1440+
14161441

14171442
def _netflowv9_defragment_packet(pkt, definitions, definitions_opts, ignored):
14181443
"""Used internally to process a single packet during defragmenting"""
@@ -1467,53 +1492,56 @@ def _netflowv9_defragment_packet(pkt, definitions, definitions_opts, ignored):
14671492
current = current.payload
14681493
# Dissect flowsets
14691494
if NetflowDataflowsetV9 in pkt:
1470-
datafl = pkt[NetflowDataflowsetV9]
1471-
tid = datafl.templateID
1472-
if tid not in definitions and tid not in definitions_opts:
1473-
ignored.add(tid)
1474-
return
1475-
# All data is stored in one record, awaiting to be split
1476-
# If fieldValue is available, the record has not been
1477-
# defragmented: pop it
1478-
try:
1479-
data = datafl.records[0].fieldValue
1480-
datafl.records.pop(0)
1481-
except (IndexError, AttributeError):
1482-
return
1483-
res = []
1484-
# Flowset record
1485-
# Now, according to the flow/option data,
1486-
# let's re-dissect NetflowDataflowsetV9
1487-
if tid in definitions:
1488-
tot_len, cls = definitions[tid]
1489-
while len(data) >= tot_len:
1490-
res.append(cls(data[:tot_len]))
1491-
data = data[tot_len:]
1492-
# Inject dissected data
1493-
datafl.records = res
1494-
if data:
1495-
if len(data) <= 4:
1496-
datafl.add_payload(conf.padding_layer(data))
1497-
else:
1498-
datafl.do_dissect_payload(data)
1499-
# Options
1500-
elif tid in definitions_opts:
1501-
(scope_len, scope_cls,
1502-
option_len, option_cls) = definitions_opts[tid]
1503-
# Dissect scopes
1504-
if scope_len:
1505-
res.append(scope_cls(data[:scope_len]))
1506-
if option_len:
1507-
res.append(
1508-
option_cls(data[scope_len:scope_len + option_len])
1509-
)
1510-
if len(data) > scope_len + option_len:
1511-
res.append(
1512-
conf.padding_layer(data[scope_len + option_len:])
1513-
)
1514-
# Inject dissected data
1515-
datafl.records = res
1516-
datafl.name = "Netflow DataFlowSet V9/10 - OPTIONS"
1495+
current = pkt
1496+
while NetflowDataflowsetV9 in current:
1497+
datafl = current[NetflowDataflowsetV9]
1498+
tid = datafl.templateID
1499+
if tid not in definitions and tid not in definitions_opts:
1500+
ignored.add(tid)
1501+
return
1502+
# All data is stored in one record, awaiting to be split
1503+
# If fieldValue is available, the record has not been
1504+
# defragmented: pop it
1505+
try:
1506+
data = datafl.records[0].fieldValue
1507+
datafl.records.pop(0)
1508+
except (IndexError, AttributeError):
1509+
return
1510+
res = []
1511+
# Flowset record
1512+
# Now, according to the flow/option data,
1513+
# let's re-dissect NetflowDataflowsetV9
1514+
if tid in definitions:
1515+
tot_len, cls = definitions[tid]
1516+
while len(data) >= tot_len:
1517+
res.append(cls(data[:tot_len]))
1518+
data = data[tot_len:]
1519+
# Inject dissected data
1520+
datafl.records = res
1521+
if data:
1522+
if len(data) <= 4:
1523+
datafl.add_payload(conf.padding_layer(data))
1524+
else:
1525+
datafl.do_dissect_payload(data)
1526+
# Options
1527+
elif tid in definitions_opts:
1528+
(scope_len, scope_cls,
1529+
option_len, option_cls) = definitions_opts[tid]
1530+
# Dissect scopes
1531+
if scope_len:
1532+
res.append(scope_cls(data[:scope_len]))
1533+
if option_len:
1534+
res.append(
1535+
option_cls(data[scope_len:scope_len + option_len])
1536+
)
1537+
if len(data) > scope_len + option_len:
1538+
res.append(
1539+
conf.padding_layer(data[scope_len + option_len:])
1540+
)
1541+
# Inject dissected data
1542+
datafl.records = res
1543+
datafl.name = "Netflow DataFlowSet V9/10 - OPTIONS"
1544+
current = datafl.payload
15171545

15181546

15191547
def netflowv9_defragment(plist, verb=1):

test/scapy/layers/netflow.uts

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,20 @@ pkt4 = NetflowTemplateV9(s)
201201
assert len(pkt4.template_fields) == pkt4.fieldCount
202202
assert sum([template.fieldLength for template in pkt4.template_fields]) == 124
203203

204+
= NetflowV10/IPFIX - dissection without padding (GH3101)
205+
206+
s=b'\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x08\x00E\x00\x00f\x00\x01\x00\x00@\x11|\x84\x7f\x00\x00\x01\x7f\x00\x00\x01\x08\x07\x08\x07\x00R\xee\xa2\x00\n\x00H\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18\x01\x01\x00\x04\x00\x02\x00\x04\x00\x04\x00\x01\x00\x08\x00\x04\x00\x0c\x00\x04\x01\x01\x00\x11\x00\x00\x00\x00\x06\xc0\xa8\x00\n\xc0\xa8\x00\x0b\x01\x01\x00\x11\x00\x00\x00\x00\x06\xc0\xa8\x00\n\xc0\xa8\x00\x0b'
207+
pkt = netflowv9_defragment(Ether(s))[0]
208+
209+
for i in range(1,3):
210+
assert pkt.getlayer(NetflowDataflowsetV9, i).templateID == 257
211+
assert pkt.getlayer(NetflowDataflowsetV9, i).records[0].IN_PKTS == b'\x00\x00\x00\x00'
212+
assert pkt.getlayer(NetflowDataflowsetV9, i).records[0].PROTOCOL == 6
213+
assert pkt.getlayer(NetflowDataflowsetV9, i).records[0].IPV4_SRC_ADDR == "192.168.0.10"
214+
assert pkt.getlayer(NetflowDataflowsetV9, i).records[0].IPV4_DST_ADDR == "192.168.0.11"
215+
216+
assert not pkt.getlayer(NetflowDataflowsetV9, 2).payload
217+
204218
= NetflowV10/IPFIX - build
205219

206220
netflow_header = NetflowHeader()/NetflowHeaderV10()

0 commit comments

Comments
 (0)