Many updates
This commit is contained in:
363
test/jobs/process_waf_event_job_test.rb
Normal file
363
test/jobs/process_waf_event_job_test.rb
Normal file
@@ -0,0 +1,363 @@
|
||||
require "test_helper"
|
||||
|
||||
class ProcessWafEventJobTest < ActiveJob::TestCase
|
||||
setup do
|
||||
@sample_event_data = {
|
||||
"request_id" => "test-event-123",
|
||||
"timestamp" => Time.current.iso8601,
|
||||
"request" => {
|
||||
"ip" => "192.168.1.100",
|
||||
"method" => "GET",
|
||||
"path" => "/api/test",
|
||||
"headers" => {
|
||||
"host" => "example.com",
|
||||
"user-agent" => "TestAgent/1.0"
|
||||
}
|
||||
},
|
||||
"response" => {
|
||||
"status_code" => 200,
|
||||
"duration_ms" => 150
|
||||
},
|
||||
"waf_action" => "allow",
|
||||
"server_name" => "test-server",
|
||||
"environment" => "test"
|
||||
}
|
||||
|
||||
@headers = { "Content-Type" => "application/json" }
|
||||
end
|
||||
|
||||
# Single Event Processing
|
||||
test "processes single event with request_id" do
|
||||
assert_difference 'Event.count', 1 do
|
||||
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||
end
|
||||
|
||||
event = Event.last
|
||||
assert_equal "test-event-123", event.request_id
|
||||
assert_equal "192.168.1.100", event.ip_address
|
||||
assert_equal "/api/test", event.request_path
|
||||
assert_equal "get", event.request_method
|
||||
assert_equal "allow", event.waf_action
|
||||
end
|
||||
|
||||
test "processes single event with legacy event_id" do
|
||||
event_data = @sample_event_data.dup
|
||||
event_data.delete("request_id")
|
||||
event_data["event_id"] = "legacy-event-456"
|
||||
|
||||
assert_difference 'Event.count', 1 do
|
||||
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||
end
|
||||
|
||||
event = Event.last
|
||||
assert_equal "legacy-event-456", event.request_id
|
||||
end
|
||||
|
||||
test "processes single event with correlation_id" do
|
||||
event_data = @sample_event_data.dup
|
||||
event_data.delete("request_id")
|
||||
event_data["correlation_id"] = "correlation-789"
|
||||
|
||||
assert_difference 'Event.count', 1 do
|
||||
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||
end
|
||||
|
||||
event = Event.last
|
||||
assert_equal "correlation-789", event.request_id
|
||||
end
|
||||
|
||||
test "generates UUID for events without ID" do
|
||||
event_data = @sample_event_data.dup
|
||||
event_data.delete("request_id")
|
||||
|
||||
assert_difference 'Event.count', 1 do
|
||||
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||
end
|
||||
|
||||
event = Event.last
|
||||
assert_not_nil event.request_id
|
||||
assert_match /\A[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\z/, event.request_id
|
||||
end
|
||||
|
||||
# Multiple Events Processing
|
||||
test "processes multiple events in events array" do
|
||||
event1 = @sample_event_data.dup
|
||||
event1["request_id"] = "event-1"
|
||||
event1["request"]["ip"] = "192.168.1.1"
|
||||
|
||||
event2 = @sample_event_data.dup
|
||||
event2["request_id"] = "event-2"
|
||||
event2["request"]["ip"] = "192.168.1.2"
|
||||
|
||||
batch_data = {
|
||||
"events" => [event1, event2]
|
||||
}
|
||||
|
||||
assert_difference 'Event.count', 2 do
|
||||
ProcessWafEventJob.perform_now(event_data: batch_data, headers: @headers)
|
||||
end
|
||||
|
||||
request_ids = Event.last(2).pluck(:request_id)
|
||||
assert_includes request_ids, "event-1"
|
||||
assert_includes request_ids, "event-2"
|
||||
end
|
||||
|
||||
# Duplicate Handling
|
||||
test "skips duplicate events" do
|
||||
# Create event first
|
||||
Event.create_from_waf_payload!("test-event-123", @sample_event_data)
|
||||
|
||||
assert_no_difference 'Event.count' do
|
||||
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||
end
|
||||
end
|
||||
|
||||
test "handles duplicates within batch" do
|
||||
event1 = @sample_event_data.dup
|
||||
event1["request_id"] = "duplicate-test"
|
||||
|
||||
event2 = @sample_event_data.dup
|
||||
event2["request_id"] = "duplicate-test"
|
||||
|
||||
batch_data = {
|
||||
"events" => [event1, event2]
|
||||
}
|
||||
|
||||
assert_difference 'Event.count', 1 do
|
||||
ProcessWafEventJob.perform_now(event_data: batch_data, headers: @headers)
|
||||
end
|
||||
end
|
||||
|
||||
# Network Range Processing
|
||||
test "creates tracking network for event IP" do
|
||||
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||
|
||||
event = Event.last
|
||||
assert_not_nil event.network_range_id
|
||||
|
||||
# Should create /24 tracking network for IPv4
|
||||
tracking_network = event.network_range
|
||||
assert_equal "192.168.1.0/24", tracking_network.network.to_s
|
||||
assert_equal "auto_generated", tracking_network.source
|
||||
assert_equal "IPAPI tracking network", tracking_network.creation_reason
|
||||
end
|
||||
|
||||
test "queues IPAPI enrichment when needed" do
|
||||
event_data = @sample_event_data.dup
|
||||
event_data["request"]["ip"] = "8.8.8.8" # Public IP that needs enrichment
|
||||
|
||||
assert_enqueued_jobs 1, only: [FetchIpapiDataJob] do
|
||||
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||
end
|
||||
end
|
||||
|
||||
test "skips IPAPI enrichment when recently queried" do
|
||||
# Create tracking network with recent query
|
||||
tracking_network = NetworkRange.create!(
|
||||
network: "192.168.1.0/24",
|
||||
source: "auto_generated",
|
||||
creation_reason: "IPAPI tracking network"
|
||||
)
|
||||
tracking_network.mark_ipapi_queried!("192.168.1.0/24")
|
||||
|
||||
assert_no_enqueued_jobs only: [FetchIpapiDataJob] do
|
||||
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||
end
|
||||
end
|
||||
|
||||
test "skips IPAPI enrichment when already fetching" do
|
||||
tracking_network = NetworkRange.create!(
|
||||
network: "192.168.1.0/24",
|
||||
source: "auto_generated",
|
||||
creation_reason: "IPAPI tracking network"
|
||||
)
|
||||
tracking_network.mark_as_fetching_api_data!(:ipapi)
|
||||
|
||||
assert_no_enqueued_jobs only: [FetchIpapiDataJob] do
|
||||
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||
end
|
||||
end
|
||||
|
||||
# WAF Policy Evaluation
|
||||
test "evaluates WAF policies when needed" do
|
||||
tracking_network = NetworkRange.create!(
|
||||
network: "192.168.1.0/24",
|
||||
source: "auto_generated",
|
||||
creation_reason: "IPAPI tracking network"
|
||||
)
|
||||
|
||||
# Mock WafPolicyMatcher
|
||||
WafPolicyMatcher.expects(:evaluate_and_mark!).with(tracking_network).returns({
|
||||
generated_rules: [],
|
||||
evaluated_policies: []
|
||||
})
|
||||
|
||||
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||
end
|
||||
|
||||
test "skips policy evaluation when not needed" do
|
||||
tracking_network = NetworkRange.create!(
|
||||
network: "192.168.1.0/24",
|
||||
source: "auto_generated",
|
||||
creation_reason: "IPAPI tracking network",
|
||||
policies_evaluated_at: 5.minutes.ago
|
||||
)
|
||||
|
||||
# Should not call WafPolicyMatcher
|
||||
WafPolicyMatcher.expects(:evaluate_and_mark!).never
|
||||
|
||||
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||
end
|
||||
|
||||
# Error Handling
|
||||
test "handles invalid event data format gracefully" do
|
||||
invalid_data = {
|
||||
"invalid" => "data"
|
||||
}
|
||||
|
||||
assert_no_difference 'Event.count' do
|
||||
assert_nothing_raised do
|
||||
ProcessWafEventJob.perform_now(event_data: invalid_data, headers: @headers)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
test "handles event creation errors gracefully" do
|
||||
invalid_event_data = @sample_event_data.dup
|
||||
invalid_event_data.delete("request") # Missing required request data
|
||||
|
||||
assert_no_difference 'Event.count' do
|
||||
assert_nothing_raised do
|
||||
ProcessWafEventJob.perform_now(event_data: invalid_event_data, headers: @headers)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
test "handles network processing errors gracefully" do
|
||||
# Create a tracking network that will cause an error
|
||||
tracking_network = NetworkRange.create!(
|
||||
network: "192.168.1.0/24",
|
||||
source: "auto_generated",
|
||||
creation_reason: "IPAPI tracking network"
|
||||
)
|
||||
|
||||
# Mock WafPolicyMatcher to raise an error
|
||||
WafPolicyMatcher.expects(:evaluate_and_mark!).with(tracking_network).raises(StandardError.new("Policy evaluation failed"))
|
||||
|
||||
# Event should still be created despite policy evaluation error
|
||||
assert_difference 'Event.count', 1 do
|
||||
assert_nothing_raised do
|
||||
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
test "handles events without network ranges" do
|
||||
event_data = @sample_event_data.dup
|
||||
event_data["request"]["ip"] = "127.0.0.1" # Private/local IP
|
||||
|
||||
assert_difference 'Event.count', 1 do
|
||||
assert_nothing_raised do
|
||||
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||
end
|
||||
end
|
||||
|
||||
event = Event.last
|
||||
assert_nil event.network_range_id
|
||||
end
|
||||
|
||||
# Performance Logging
|
||||
test "logs processing metrics" do
|
||||
log_output = StringIO.new
|
||||
logger = Logger.new(log_output)
|
||||
original_logger = Rails.logger
|
||||
Rails.logger = logger
|
||||
|
||||
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||
|
||||
log_content = log_output.string
|
||||
assert_match /Processed WAF event test-event-123 in \d+\.\d+ms/, log_content
|
||||
assert_match /Processed 1 WAF events/, log_content
|
||||
|
||||
Rails.logger = original_logger
|
||||
end
|
||||
|
||||
test "logs IPAPI fetch decisions" do
|
||||
log_output = StringIO.new
|
||||
logger = Logger.new(log_output)
|
||||
original_logger = Rails.logger
|
||||
Rails.logger = logger
|
||||
|
||||
# Use a public IP to trigger IPAPI fetch
|
||||
event_data = @sample_event_data.dup
|
||||
event_data["request"]["ip"] = "8.8.8.8"
|
||||
|
||||
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||
|
||||
log_content = log_output.string
|
||||
assert_match /Queueing IPAPI fetch for IP 8\.8\.8\.8/, log_content
|
||||
|
||||
Rails.logger = original_logger
|
||||
end
|
||||
|
||||
# IPv6 Support
|
||||
test "creates /64 tracking network for IPv6 addresses" do
|
||||
event_data = @sample_event_data.dup
|
||||
event_data["request"]["ip"] = "2001:db8::1"
|
||||
|
||||
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||
|
||||
event = Event.last
|
||||
tracking_network = event.network_range
|
||||
assert_equal "2001:db8::/64", tracking_network.network.to_s
|
||||
end
|
||||
|
||||
# Mixed Batch Processing
|
||||
test "processes mixed valid and invalid events in batch" do
|
||||
valid_event = @sample_event_data.dup
|
||||
valid_event["request_id"] = "valid-event"
|
||||
|
||||
invalid_event = {
|
||||
"invalid" => "data",
|
||||
"request_id" => "invalid-event"
|
||||
}
|
||||
|
||||
batch_data = {
|
||||
"events" => [valid_event, invalid_event]
|
||||
}
|
||||
|
||||
# Should only create the valid event
|
||||
assert_difference 'Event.count', 1 do
|
||||
ProcessWafEventJob.perform_now(event_data: batch_data, headers: @headers)
|
||||
end
|
||||
|
||||
assert_equal "valid-event", Event.last.request_id
|
||||
end
|
||||
|
||||
test "handles very large batches efficiently" do
|
||||
events = []
|
||||
100.times do |i|
|
||||
event = @sample_event_data.dup
|
||||
event["request_id"] = "batch-event-#{i}"
|
||||
event["request"]["ip"] = "192.168.#{i / 256}.#{i % 256}"
|
||||
events << event
|
||||
end
|
||||
|
||||
batch_data = {
|
||||
"events" => events
|
||||
}
|
||||
|
||||
start_time = Time.current
|
||||
ProcessWafEventJob.perform_now(event_data: batch_data, headers: @headers)
|
||||
processing_time = Time.current - start_time
|
||||
|
||||
assert_equal 100, Event.count
|
||||
assert processing_time < 5.seconds, "Processing 100 events should take less than 5 seconds"
|
||||
end
|
||||
|
||||
# Integration with Other Jobs
|
||||
test "coordinates with BackfillRecentNetworkIntelligenceJob" do
|
||||
# This would be tested based on how the job enqueues other jobs
|
||||
# Implementation depends on your specific job coordination logic
|
||||
end
|
||||
end
|
||||
Reference in New Issue
Block a user