[Snort-devel] [PATCH 1/2] daq_nfq: snort defines its timeout in milliseconds, not seconds

Florian Westphal fwestphal at ...2119...
Fri Apr 29 08:36:04 EDT 2011


cfg->timeout is assigned to tv_sec. This turned the default of 1000ms
in snort into a 16 minute timeout. Fix this by converting it
into seconds/usecs.
---
 both patches are against daq 0.5.

 os-daq-modules/daq_nfq.c |   11 +++++++----
 1 files changed, 7 insertions(+), 4 deletions(-)

diff --git a/os-daq-modules/daq_nfq.c b/os-daq-modules/daq_nfq.c
index 87cbf6d..d55e9d6 100644
--- a/os-daq-modules/daq_nfq.c
+++ b/os-daq-modules/daq_nfq.c
@@ -74,7 +74,7 @@ typedef struct
     volatile int count;
     int passive;
     uint32_t snaplen;
-    unsigned timeout;
+    struct timeval timeout;
 
     char error[DAQ_ERRBUF_SIZE];
     DAQ_State state;
@@ -183,7 +183,11 @@ static int nfq_daq_get_setup (
     }
 
     impl->snaplen = cfg->snaplen ? cfg->snaplen : IP_MAXPACKET;
-    impl->timeout = cfg->timeout;
+
+    /* cfg->timeout is in milliseconds */
+    impl->timeout.tv_sec = cfg->timeout / 1000;
+    impl->timeout.tv_usec = (cfg->timeout % 1000) * 1000;
+
     impl->passive = ( cfg->mode == DAQ_MODE_PASSIVE );
 
     return DAQ_SUCCESS;
@@ -485,8 +489,7 @@ static int nfq_daq_acquire (
         FD_SET(impl->sock, &fdset);
 
         // set this per call
-        tv.tv_sec = impl->timeout;
-        tv.tv_usec = 0;
+        tv = impl->timeout;
 
         // at least ipq had a timeout!
         if ( select(impl->sock+1, &fdset, NULL, NULL, &tv) < 0 )
-- 
1.7.3.4





More information about the Snort-devel mailing list