Rename the option name to url_from_event, suggested by @cantino

This commit is contained in:
Akinori MUSHA 2015-06-19 17:43:16 +09:00
parent 1e336f029e
commit 130ca6c9af
2 changed files with 5 additions and 5 deletions

View file

@ -19,7 +19,7 @@ module Agents
`url` can be a single url, or an array of urls (for example, for multiple pages with the exact same structure but different content to scrape) `url` can be a single url, or an array of urls (for example, for multiple pages with the exact same structure but different content to scrape)
The WebsiteAgent can also scrape based on incoming events. It will scrape the url contained in the `url` key of the incoming event payload, or if you set `url_on_receive` it is used as a Liquid template to generate the url to access. If you specify `merge` as the `mode`, it will retain the old payload and update it with the new values. The WebsiteAgent can also scrape based on incoming events. It will scrape the url contained in the `url` key of the incoming event payload, or if you set `url_from_event` it is used as a Liquid template to generate the url to access. If you specify `merge` as the `mode`, it will retain the old payload and update it with the new values.
# Supported Document Types # Supported Document Types
@ -135,7 +135,7 @@ module Agents
def validate_options def validate_options
# Check for required fields # Check for required fields
errors.add(:base, "either url or url_on_receive is required") unless options['url'].present? || options['url_on_receive'].present? errors.add(:base, "either url or url_from_event is required") unless options['url'].present? || options['url_from_event'].present?
errors.add(:base, "expected_update_period_in_days is required") unless options['expected_update_period_in_days'].present? errors.add(:base, "expected_update_period_in_days is required") unless options['expected_update_period_in_days'].present?
if !options['extract'].present? && extraction_type != "json" if !options['extract'].present? && extraction_type != "json"
errors.add(:base, "extract is required for all types except json") errors.add(:base, "extract is required for all types except json")
@ -259,7 +259,7 @@ module Agents
incoming_events.each do |event| incoming_events.each do |event|
interpolate_with(event) do interpolate_with(event) do
url_to_scrape = url_to_scrape =
if url_template = options['url_on_receive'].presence if url_template = options['url_from_event'].presence
interpolate_string(url_template) interpolate_string(url_template)
else else
event.payload['url'] event.payload['url']

View file

@ -633,11 +633,11 @@ fire: hot
}.to change { Event.count }.by(1) }.to change { Event.count }.by(1)
end end
it "should use url_on_receive as url to scrape if it exists when receiving an event" do it "should use url_from_event as url to scrape if it exists when receiving an event" do
stub = stub_request(:any, 'http://example.org/?url=http%3A%2F%2Fxkcd.com') stub = stub_request(:any, 'http://example.org/?url=http%3A%2F%2Fxkcd.com')
@checker.options = @valid_options.merge( @checker.options = @valid_options.merge(
'url_on_receive' => 'http://example.org/?url={{url | uri_escape}}' 'url_from_event' => 'http://example.org/?url={{url | uri_escape}}'
) )
@checker.receive([@event]) @checker.receive([@event])