mirror of
https://github.com/dkam/probot.git
synced 2025-12-28 09:14:53 +00:00
Compare commits
6 Commits
36b6a29039
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
45c1b001cd | ||
|
|
f1a0b74a97 | ||
|
|
2e91518fd6 | ||
|
|
c4e1b876ce | ||
|
|
a7291bdfc3 | ||
|
|
88c7dc67f2 |
@@ -1,5 +1,9 @@
|
|||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
## [0.5.0] - 2024-12-24
|
||||||
|
|
||||||
|
- Fix bug with Disallow rule containing empty line
|
||||||
|
|
||||||
## [0.4.0] - 2024-10-31
|
## [0.4.0] - 2024-10-31
|
||||||
|
|
||||||
- Ensure VERISON is available
|
- Ensure VERISON is available
|
||||||
|
|||||||
@@ -31,6 +31,7 @@ class Probot
|
|||||||
@current_agents = ["*"]
|
@current_agents = ["*"]
|
||||||
@current_agents.each { |agent| @rules[agent] ||= {"disallow" => [], "allow" => [], "crawl_delay" => 0} }
|
@current_agents.each { |agent| @rules[agent] ||= {"disallow" => [], "allow" => [], "crawl_delay" => 0} }
|
||||||
@sitemaps = []
|
@sitemaps = []
|
||||||
|
|
||||||
@site = URI(data) if data.start_with?("http")
|
@site = URI(data) if data.start_with?("http")
|
||||||
@doc = @site.nil? ? data : fetch_robots_txt(@site)
|
@doc = @site.nil? ? data : fetch_robots_txt(@site)
|
||||||
parse(@doc)
|
parse(@doc)
|
||||||
@@ -92,7 +93,9 @@ class Probot
|
|||||||
|
|
||||||
# All Regex characters are escaped, then we unescape * and $ as they may used in robots.txt
|
# All Regex characters are escaped, then we unescape * and $ as they may used in robots.txt
|
||||||
if data.allow? || data.disallow?
|
if data.allow? || data.disallow?
|
||||||
@current_agents.each { |agent| rules[agent][data.key] << Regexp.new(Regexp.escape(data.value).gsub('\*', ".*").gsub('\$', "$")) }
|
@current_agents.each do |agent|
|
||||||
|
rules[agent][data.key] << Regexp.new(Regexp.escape(data.value).gsub('\*', ".*").gsub('\$', "$")) unless data.value.nil?
|
||||||
|
end
|
||||||
|
|
||||||
# When user-agent strings are found on consecutive lines, they are considered to be part of the same record. Google ignores crawl_delay.
|
# When user-agent strings are found on consecutive lines, they are considered to be part of the same record. Google ignores crawl_delay.
|
||||||
subsequent_agent = false
|
subsequent_agent = false
|
||||||
@@ -128,6 +131,8 @@ class Probot
|
|||||||
|
|
||||||
def clean_value = raw_value.split("#").first&.strip
|
def clean_value = raw_value.split("#").first&.strip
|
||||||
|
|
||||||
|
def clean_url = clean_value&.then { URI(_1).to_s }
|
||||||
|
|
||||||
def agent? = key == "user-agent"
|
def agent? = key == "user-agent"
|
||||||
|
|
||||||
def disallow? = key == "disallow"
|
def disallow? = key == "disallow"
|
||||||
@@ -140,11 +145,13 @@ class Probot
|
|||||||
|
|
||||||
def value
|
def value
|
||||||
return clean_value.to_f if crawl_delay?
|
return clean_value.to_f if crawl_delay?
|
||||||
return URI(clean_value).to_s if disallow? || allow?
|
return clean_url if disallow? || allow?
|
||||||
|
|
||||||
raw_value
|
raw_value
|
||||||
rescue URI::InvalidURIError
|
rescue URI::InvalidURIError
|
||||||
raw_value
|
raw_value
|
||||||
|
rescue ArgumentError
|
||||||
|
raw_value
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
class Probot
|
class Probot
|
||||||
VERSION = "0.4.0"
|
VERSION = "0.5.0"
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ Gem::Specification.new do |spec|
|
|||||||
spec.homepage = "http://github.com/dkam/probot"
|
spec.homepage = "http://github.com/dkam/probot"
|
||||||
spec.license = "MIT"
|
spec.license = "MIT"
|
||||||
spec.required_ruby_version = ">= 3.0"
|
spec.required_ruby_version = ">= 3.0"
|
||||||
|
spec.platform = Gem::Platform::RUBY
|
||||||
|
|
||||||
spec.metadata["homepage_uri"] = spec.homepage
|
spec.metadata["homepage_uri"] = spec.homepage
|
||||||
spec.metadata["source_code_uri"] = "http://github.com/dkam/probot"
|
spec.metadata["source_code_uri"] = "http://github.com/dkam/probot"
|
||||||
@@ -29,4 +30,5 @@ Gem::Specification.new do |spec|
|
|||||||
spec.bindir = "exe"
|
spec.bindir = "exe"
|
||||||
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
|
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
|
||||||
spec.require_paths = ["lib"]
|
spec.require_paths = ["lib"]
|
||||||
|
spec.add_development_dependency "debug"
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -95,6 +95,19 @@ class TestProbot < Minitest::Test
|
|||||||
crawl_delay: 0
|
crawl_delay: 0
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
txt: %("User-agent: *\nDisallow: /wp/wp-admin/\nAllow: /wp/wp-admin/admin-ajax.php\n\nUser-agent: *\nDisallow: /wp-content/uploads/wpo/wpo-plugins-tables-list.json\n\n# START YOAST BLOCK\n# ---------------------------\nUser-agent: *\nDisallow:\n\nSitemap: https://prhinternationalsales.com/sitemap_index.xml\n# ---------------------------\n# END YOAST BLOCK"),
|
||||||
|
sitemaps: ["https://prhinternationalsales.com/sitemap_index.xml"],
|
||||||
|
found_agents: ["*"],
|
||||||
|
tests: [
|
||||||
|
{
|
||||||
|
agent: "*",
|
||||||
|
allowed: ["/wp/wp-admin/admin-ajax.php"],
|
||||||
|
disallowed: ["/wp/wp-admin/", "/wp-content/uploads/wpo/wpo-plugins-tables-list.json"],
|
||||||
|
crawl_delay: 0
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
].freeze
|
].freeze
|
||||||
|
|
||||||
@@ -131,7 +144,9 @@ class TestProbot < Minitest::Test
|
|||||||
|
|
||||||
def test_empty_allow_disallow
|
def test_empty_allow_disallow
|
||||||
assert Probot.new(%(User-agent: *\nAllow:)).rules.dig("*", "allow").empty?
|
assert Probot.new(%(User-agent: *\nAllow:)).rules.dig("*", "allow").empty?
|
||||||
|
assert Probot.new(%(User-agent: *\nAllow:\n\n)).rules.dig("*", "allow").empty?
|
||||||
assert Probot.new(%(User-agent: *\nDisallow:)).rules.dig("*", "disallow").empty?
|
assert Probot.new(%(User-agent: *\nDisallow:)).rules.dig("*", "disallow").empty?
|
||||||
|
assert Probot.new(%(User-agent: *\nDisallow:\n\n)).rules.dig("*", "disallow").empty?
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_consecutive_user_agents
|
def test_consecutive_user_agents
|
||||||
|
|||||||
Reference in New Issue
Block a user