mirror of
https://github.com/dkam/probot.git
synced 2025-12-28 09:14:53 +00:00
Compare commits
10 Commits
v0.2.0
...
45c1b001cd
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
45c1b001cd | ||
|
|
f1a0b74a97 | ||
|
|
2e91518fd6 | ||
|
|
c4e1b876ce | ||
|
|
a7291bdfc3 | ||
|
|
88c7dc67f2 | ||
|
|
36b6a29039 | ||
|
|
89432b2dac | ||
|
|
ad48a4e335 | ||
|
|
fea1e2009a |
15
CHANGELOG.md
15
CHANGELOG.md
@@ -1,5 +1,20 @@
|
||||
## [Unreleased]
|
||||
|
||||
## [0.5.0] - 2024-12-24
|
||||
|
||||
- Fix bug with Disallow rule containing empty line
|
||||
|
||||
## [0.4.0] - 2024-10-31
|
||||
|
||||
- Ensure VERISON is available
|
||||
|
||||
## [0.3.0] - 2023-09-18
|
||||
|
||||
- Only return unique sitemaps.
|
||||
|
||||
## [0.2.0] - 2023-09-10
|
||||
|
||||
- Correctly handle multiple sitemaps + tests.
|
||||
## [0.1.0] - 2023-09-09
|
||||
|
||||
- Initial release
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
require "uri"
|
||||
require "net/http"
|
||||
require_relative "probot/version"
|
||||
|
||||
# https://moz.com/learn/seo/robotstxt
|
||||
# https://stackoverflow.com/questions/45293419/order-of-directives-in-robots-txt-do-they-overwrite-each-other-or-complement-ea
|
||||
@@ -30,6 +31,7 @@ class Probot
|
||||
@current_agents = ["*"]
|
||||
@current_agents.each { |agent| @rules[agent] ||= {"disallow" => [], "allow" => [], "crawl_delay" => 0} }
|
||||
@sitemaps = []
|
||||
|
||||
@site = URI(data) if data.start_with?("http")
|
||||
@doc = @site.nil? ? data : fetch_robots_txt(@site)
|
||||
parse(@doc)
|
||||
@@ -91,7 +93,9 @@ class Probot
|
||||
|
||||
# All Regex characters are escaped, then we unescape * and $ as they may used in robots.txt
|
||||
if data.allow? || data.disallow?
|
||||
@current_agents.each { |agent| rules[agent][data.key] << Regexp.new(Regexp.escape(data.value).gsub('\*', ".*").gsub('\$', "$")) }
|
||||
@current_agents.each do |agent|
|
||||
rules[agent][data.key] << Regexp.new(Regexp.escape(data.value).gsub('\*', ".*").gsub('\$', "$")) unless data.value.nil?
|
||||
end
|
||||
|
||||
# When user-agent strings are found on consecutive lines, they are considered to be part of the same record. Google ignores crawl_delay.
|
||||
subsequent_agent = false
|
||||
@@ -108,6 +112,7 @@ class Probot
|
||||
sitemap_uri = URI(data.value)
|
||||
sitemap_uri = sitemap_uri.host.nil? ? URI.join(*[site, sitemap_uri].compact) : sitemap_uri
|
||||
@sitemaps << sitemap_uri.to_s
|
||||
@sitemaps.uniq!
|
||||
next
|
||||
end
|
||||
|
||||
@@ -126,6 +131,8 @@ class Probot
|
||||
|
||||
def clean_value = raw_value.split("#").first&.strip
|
||||
|
||||
def clean_url = clean_value&.then { URI(_1).to_s }
|
||||
|
||||
def agent? = key == "user-agent"
|
||||
|
||||
def disallow? = key == "disallow"
|
||||
@@ -138,11 +145,13 @@ class Probot
|
||||
|
||||
def value
|
||||
return clean_value.to_f if crawl_delay?
|
||||
return URI(clean_value).to_s if disallow? || allow?
|
||||
return clean_url if disallow? || allow?
|
||||
|
||||
raw_value
|
||||
rescue URI::InvalidURIError
|
||||
raw_value
|
||||
rescue ArgumentError
|
||||
raw_value
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
class Probot
|
||||
VERSION = "0.2.0"
|
||||
VERSION = "0.5.0"
|
||||
end
|
||||
|
||||
@@ -13,6 +13,7 @@ Gem::Specification.new do |spec|
|
||||
spec.homepage = "http://github.com/dkam/probot"
|
||||
spec.license = "MIT"
|
||||
spec.required_ruby_version = ">= 3.0"
|
||||
spec.platform = Gem::Platform::RUBY
|
||||
|
||||
spec.metadata["homepage_uri"] = spec.homepage
|
||||
spec.metadata["source_code_uri"] = "http://github.com/dkam/probot"
|
||||
@@ -29,4 +30,5 @@ Gem::Specification.new do |spec|
|
||||
spec.bindir = "exe"
|
||||
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
|
||||
spec.require_paths = ["lib"]
|
||||
spec.add_development_dependency "debug"
|
||||
end
|
||||
|
||||
@@ -2,6 +2,5 @@
|
||||
|
||||
$LOAD_PATH.unshift File.expand_path("../lib", __dir__)
|
||||
require "probot"
|
||||
require "probot/version" # for testing the version number - otherwise the gemspec does it.
|
||||
|
||||
require "minitest/autorun"
|
||||
|
||||
@@ -95,6 +95,19 @@ class TestProbot < Minitest::Test
|
||||
crawl_delay: 0
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
txt: %("User-agent: *\nDisallow: /wp/wp-admin/\nAllow: /wp/wp-admin/admin-ajax.php\n\nUser-agent: *\nDisallow: /wp-content/uploads/wpo/wpo-plugins-tables-list.json\n\n# START YOAST BLOCK\n# ---------------------------\nUser-agent: *\nDisallow:\n\nSitemap: https://prhinternationalsales.com/sitemap_index.xml\n# ---------------------------\n# END YOAST BLOCK"),
|
||||
sitemaps: ["https://prhinternationalsales.com/sitemap_index.xml"],
|
||||
found_agents: ["*"],
|
||||
tests: [
|
||||
{
|
||||
agent: "*",
|
||||
allowed: ["/wp/wp-admin/admin-ajax.php"],
|
||||
disallowed: ["/wp/wp-admin/", "/wp-content/uploads/wpo/wpo-plugins-tables-list.json"],
|
||||
crawl_delay: 0
|
||||
}
|
||||
]
|
||||
}
|
||||
].freeze
|
||||
|
||||
@@ -131,7 +144,9 @@ class TestProbot < Minitest::Test
|
||||
|
||||
def test_empty_allow_disallow
|
||||
assert Probot.new(%(User-agent: *\nAllow:)).rules.dig("*", "allow").empty?
|
||||
assert Probot.new(%(User-agent: *\nAllow:\n\n)).rules.dig("*", "allow").empty?
|
||||
assert Probot.new(%(User-agent: *\nDisallow:)).rules.dig("*", "disallow").empty?
|
||||
assert Probot.new(%(User-agent: *\nDisallow:\n\n)).rules.dig("*", "disallow").empty?
|
||||
end
|
||||
|
||||
def test_consecutive_user_agents
|
||||
|
||||
Reference in New Issue
Block a user