So many changes. Some tests
This commit is contained in:
@@ -1,11 +1,10 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require_relative "picopackage/version"
|
||||
# require_relative "picopackage/http_fetcher"
|
||||
require_relative "picopackage/provider"
|
||||
require_relative "picopackage/source_file"
|
||||
require_relative "picopackage/scanner"
|
||||
require_relative "picopackage/fetch"
|
||||
require_relative "picopackage/provider"
|
||||
require_relative "picopackage/package"
|
||||
require_relative "picopackage/scanner"
|
||||
require_relative "picopackage/cli"
|
||||
|
||||
module Picopackage
|
||||
|
||||
@@ -15,20 +15,20 @@ module Picopackage
|
||||
dir = argv.first || "."
|
||||
Picopackage::Scanner.scan(dir).each { |f| puts f.file_path }
|
||||
|
||||
when "digest"
|
||||
when "init"
|
||||
OptionParser.new do |opts|
|
||||
opts.banner = "Usage: ppkg digest FILE"
|
||||
opts.banner = "Usage: ppkg init FILE"
|
||||
end.parse!(argv)
|
||||
|
||||
file = argv.first
|
||||
Picopackage::SourceFile.from_file(file).digest!
|
||||
Picopackage::Package.from_file(file).init_metadata
|
||||
|
||||
when "checksum"
|
||||
OptionParser.new do |opts|
|
||||
opts.banner = "Usage: ppkg checksum FILE"
|
||||
end.parse!(argv)
|
||||
file = argv.first
|
||||
puts Picopackage::SourceFile.from_file(file).checksum
|
||||
puts Picopackage::Package.from_file(file).checksum
|
||||
|
||||
when "verify"
|
||||
OptionParser.new do |opts|
|
||||
@@ -36,7 +36,7 @@ module Picopackage
|
||||
end.parse!(argv)
|
||||
|
||||
path = argv.first
|
||||
source = SourceFile.from_file(path)
|
||||
source = Package.from_file(path)
|
||||
|
||||
if source.metadata["content_checksum"].nil?
|
||||
puts "⚠️ No checksum found in #{path}"
|
||||
@@ -44,7 +44,7 @@ module Picopackage
|
||||
exit 1
|
||||
end
|
||||
|
||||
unless source.verify
|
||||
unless source.verify_payload
|
||||
puts "❌ Checksum verification failed for #{path}"
|
||||
puts "Expected: #{source.metadata["content_checksum"]}"
|
||||
puts "Got: #{source.checksum}"
|
||||
@@ -59,7 +59,7 @@ module Picopackage
|
||||
end.parse!(argv)
|
||||
|
||||
path = argv.first
|
||||
Picopackage::SourceFile.from_file(path).inspect
|
||||
Picopackage::Package.from_file(path).inspect_metadata
|
||||
|
||||
when "fetch"
|
||||
options = {force: false}
|
||||
@@ -93,9 +93,9 @@ module Picopackage
|
||||
end.parse!(argv)
|
||||
|
||||
file = argv.first
|
||||
source_file = SourceFile.from_file(file)
|
||||
package = Package.from_file(file)
|
||||
begin
|
||||
Fetch.fetch(source_file.url, File.dirname(file), force: options[:force])
|
||||
Fetch.fetch(package.url, File.dirname(file), force: options[:force])
|
||||
rescue LocalModificationError => e
|
||||
puts "Error: #{e.message}"
|
||||
rescue => e
|
||||
@@ -105,7 +105,7 @@ module Picopackage
|
||||
|
||||
else
|
||||
puts "Unknown command: #{command}"
|
||||
puts "Available commands: scan, sign, inspect, update"
|
||||
puts "Available commands: fetch, update, scan, sign, inspect"
|
||||
exit 1
|
||||
end
|
||||
rescue OptionParser::InvalidOption => e
|
||||
@@ -116,5 +116,25 @@ module Picopackage
|
||||
puts e.backtrace if ENV["DEBUG"]
|
||||
exit 1
|
||||
end
|
||||
|
||||
def self.determine_script_source
|
||||
# Get the full path of the currently executing script
|
||||
current_path = File.expand_path($0)
|
||||
|
||||
# Check if script is in GEM_PATH
|
||||
gem_paths = Gem.path.map { |p| File.expand_path(p) }
|
||||
|
||||
is_gem = gem_paths.any? { |path| current_path.start_with?(path) }
|
||||
|
||||
if is_gem
|
||||
# Running from gem installation
|
||||
gem_name = File.basename(File.dirname(File.dirname(current_path)))
|
||||
version = File.basename(File.dirname(current_path))
|
||||
{source: :gem, path: current_path, gem_name: gem_name, version: version}
|
||||
else
|
||||
# Running from local installation
|
||||
{source: :local, path: current_path}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -6,98 +6,162 @@ require "debug"
|
||||
|
||||
module Picopackage
|
||||
class Fetch
|
||||
class Error < StandardError; end
|
||||
class HTTPError < Error; end
|
||||
class FileTooLargeError < Error; end
|
||||
class NotModifiedError < Error; end # Add this
|
||||
class TooManyRedirectsError < Error; end # Add this
|
||||
|
||||
MAX_REDIRECTS = 5 # This constant is used but not defined
|
||||
|
||||
def initialize(max_size: 1024 * 1024, timeout: 10)
|
||||
@max_size = max_size
|
||||
@timeout = timeout
|
||||
end
|
||||
|
||||
def fetch(uri)
|
||||
case uri.scheme
|
||||
when "http", "https" then fetch_http(uri)
|
||||
when "file" then fetch_file(uri)
|
||||
else
|
||||
raise Error, "Unsupported scheme: #{uri.scheme}"
|
||||
end
|
||||
end
|
||||
|
||||
def self.fetch(url, destination, force: false)
|
||||
raise ArgumentError, "Destination directory does not exist: #{destination}" unless Dir.exist?(destination)
|
||||
|
||||
provider = Provider.for(url)
|
||||
source_file = provider.source_file
|
||||
package = provider.package
|
||||
file_path = File.join(destination, package.filename)
|
||||
|
||||
file_path = File.join(destination, source_file.filename)
|
||||
local_package = File.exist?(file_path) ? FileProvider.new(file_path).package : nil
|
||||
|
||||
if File.exist?(file_path) && force
|
||||
source_file.save(destination)
|
||||
elsif File.exist?(file_path)
|
||||
local_source_file = SourceFile.from_file(file_path)
|
||||
status = Status.compare(local_source_file, source_file)
|
||||
resolver = Resolver.new(package, local_package, file_path, force: force).resolve
|
||||
|
||||
if force
|
||||
source_file.save(destination)
|
||||
elsif status.modified?
|
||||
raise LocalModificationError, "#{status.message}. Use -f or --force to overwrite local version"
|
||||
elsif status.outdated?
|
||||
puts "Updated from #{local_source_file.version} to #{source_file.version}"
|
||||
source_file.save(destination)
|
||||
elsif status.up_to_date?
|
||||
puts status.message
|
||||
end
|
||||
|
||||
else
|
||||
source_file.save(destination)
|
||||
if source_file.imported?
|
||||
source_file.digest!
|
||||
puts "Picopackage created for #{source_file.filename}"
|
||||
else
|
||||
puts "Picopackage downloaded to #{file_path}"
|
||||
end
|
||||
case resolver[:state]
|
||||
when :kept, :updated
|
||||
puts resolver[:message]
|
||||
when :conflict
|
||||
raise LocalModificationError, resolver[:message]
|
||||
end
|
||||
provider.source_file
|
||||
provider.package
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def fetch_http(uri, etag = nil)
|
||||
Net::HTTP.start(uri.host, uri.port, connection_options(uri)) do |http|
|
||||
request = Net::HTTP::Get.new(uri.request_uri)
|
||||
request["If-None-Match"] = etag if etag
|
||||
|
||||
response = http.request(request)
|
||||
handle_response(response, uri)
|
||||
end
|
||||
end
|
||||
|
||||
def fetch_file(uri)
|
||||
File.read(uri.path)
|
||||
end
|
||||
|
||||
def connection_options(uri)
|
||||
{
|
||||
use_ssl: uri.scheme == "https",
|
||||
read_timeout: @timeout,
|
||||
open_timeout: @timeout
|
||||
}
|
||||
end
|
||||
|
||||
def handle_response(response, uri)
|
||||
case response
|
||||
when Net::HTTPSuccess
|
||||
{
|
||||
body: read_body(response),
|
||||
etag: response["ETag"]
|
||||
}
|
||||
when Net::HTTPNotModified
|
||||
raise NotModifiedError.new("Resource not modified", etag: response["ETag"])
|
||||
when Net::HTTPRedirection
|
||||
handle_redirect(response, uri)
|
||||
else
|
||||
raise HTTPError, "HTTP #{response.code}: #{response.message}"
|
||||
end
|
||||
end
|
||||
|
||||
def handle_redirect(response, uri, redirect_count = 0)
|
||||
raise TooManyRedirectsError if redirect_count >= MAX_REDIRECTS
|
||||
location = response["location"]
|
||||
new_uri = URI(location)
|
||||
# Handle both relative paths and full URLs
|
||||
new_uri = uri.merge(location) if new_uri.relative?
|
||||
fetch(new_uri, redirect_count: redirect_count + 1)
|
||||
end
|
||||
|
||||
def read_body(response)
|
||||
buffer = String.new(capacity: @max_size)
|
||||
response.read_body do |chunk|
|
||||
raise FileTooLargeError, "Response would exceed #{@max_size} bytes" if buffer.bytesize + chunk.bytesize > @max_size
|
||||
buffer << chunk
|
||||
end
|
||||
buffer
|
||||
end
|
||||
end
|
||||
|
||||
class Status
|
||||
attr_reader :state, :local_comparison, :remote_comparison
|
||||
##
|
||||
# States:
|
||||
# - kept: local file was converted to a picopackage and kept
|
||||
# - updated: local file was updated with remote picopackage
|
||||
# - conflict: local and remote files differ - manually resolve or use -f to force
|
||||
class Resolver
|
||||
attr_reader :remote, :local, :local_path, :force
|
||||
def initialize(remote_package, local_package, local_path, force: false)
|
||||
@remote = remote_package
|
||||
@local = local_package
|
||||
@local_path = local_path
|
||||
@force = force
|
||||
@same_checksum = @remote.payload_checksum == @local&.payload_checksum
|
||||
end
|
||||
|
||||
def self.compare(local_source_file, remote_source_file)
|
||||
return new(:outdated) if local_source_file.metadata.nil? || remote_source_file.metadata.nil?
|
||||
STATES = %i[kept updated conflict].freeze
|
||||
|
||||
local_comparison = local_source_file.metadata["version"] || local_source_file.metadata["updated_at"]&.to_s
|
||||
remote_comparison = remote_source_file.metadata["version"] || remote_source_file.metadata["updated_at"]&.to_s
|
||||
|
||||
if local_comparison == remote_comparison
|
||||
if local_source_file.modified?
|
||||
new(:modified, local_version: local_comparison)
|
||||
def resolve
|
||||
validate_state_hash(
|
||||
if @force
|
||||
@remote.save(local_path)
|
||||
{state: :updated, message: "Force mode: overwrote local file with remote package"}
|
||||
elsif @local.nil?
|
||||
@remote.save(local_path)
|
||||
{state: :kept, message: "Saved Package as new file"}
|
||||
elsif @remote.payload_version != @local.payload_version
|
||||
{state: :conflict, message: "Version conflict. Local: #{@local.payload_version}, Remote: #{@remote.payload_version}"}
|
||||
elsif @remote.payload_timestamp_as_time > @local.payload_timestamp_as_time
|
||||
@remote.save(local_path)
|
||||
{state: :updated, message: "Updated to newer version"}
|
||||
elsif !@same_checksum
|
||||
handle_checksum_mismatch
|
||||
elsif @local.was_bare_file
|
||||
debugger
|
||||
@local.save(local_path)
|
||||
{state: :kept, message: "Packaged existing file as Picopackage"}
|
||||
else
|
||||
new(:up_to_date, local_version: local_comparison)
|
||||
{state: :kept, message: "Local file is up to date"}
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def validate_state_hash(hash)
|
||||
raise "Invalid state" unless STATES.include?(hash[:state])
|
||||
raise "Missing message" unless hash[:message].is_a?(String)
|
||||
hash
|
||||
end
|
||||
|
||||
def handle_checksum_mismatch
|
||||
if @force
|
||||
@remote.save(local_path) # In force mode, remote wins
|
||||
{state: :updated, message: "Overwrote local file with remote package"}
|
||||
else
|
||||
new(:outdated,
|
||||
local_version: local_comparison,
|
||||
remote_version: remote_comparison,
|
||||
modified: local_source_file.modified?)
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(state, local_version: nil, remote_version: nil, modified: false)
|
||||
@state = state
|
||||
@local_version = local_version
|
||||
@remote_version = remote_version
|
||||
@modified = modified
|
||||
end
|
||||
|
||||
def modified?
|
||||
@modified || @state == :modified
|
||||
end
|
||||
|
||||
def up_to_date?
|
||||
@state == :up_to_date
|
||||
end
|
||||
|
||||
def outdated?
|
||||
@state == :outdated
|
||||
end
|
||||
|
||||
def message
|
||||
case state
|
||||
when :up_to_date
|
||||
"Picopackage is up to date"
|
||||
when :outdated
|
||||
if modified?
|
||||
"Local Picopackage (v#{local_comparison}) has modifications but remote version (v#{remote_comparison}) is available"
|
||||
else
|
||||
"Local Picopackage (v#{local_comparison}) is outdated. Remote version: v#{remote_comparison}"
|
||||
end
|
||||
when :modified
|
||||
"Local Picopackage has been modified from original version (v#{local_version})"
|
||||
{state: :conflict, message: "Files differ. Use --force to convert both to packages"}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,92 +1,111 @@
|
||||
require "yaml"
|
||||
require "digest"
|
||||
require "forwardable"
|
||||
|
||||
module Picopackage
|
||||
class SourceFile
|
||||
attr_reader :content, :metadata, :code, :original_path
|
||||
METADATA_PATTERN = /^\n*#\s*@PICOPACKAGE_START\n(.*?)^\s*#\s*@PICOPACKAGE_END\s*$/m
|
||||
|
||||
METADATA_PATTERN = /^\n*#\s*@PICOPACKAGE_START\n(.*?)^\s*#\s*@PICOPACKAGE_END\s*$/m
|
||||
|
||||
def self.from_file(file_path) = new(content: File.read(file_path), original_path: file_path)
|
||||
|
||||
def self.from_content(content, metadata: {})
|
||||
instance = new(content: content)
|
||||
instance.imported! if instance.metadata.empty?
|
||||
|
||||
updated_metadata = metadata.merge(instance.metadata)
|
||||
|
||||
## For new Picopackages, we should add metadata and checksum
|
||||
instance.update_metadata(updated_metadata)
|
||||
|
||||
instance
|
||||
class Metadata < Struct.new(:url, :filename, :payload_version, :payload_timestamp, :payload_checksum, :etag, keyword_init: true)
|
||||
# the #from_file method will create a new instance of Metadata from a file path, rather than read a package's metadata
|
||||
def self.from_file(file_path, content: nil)
|
||||
new(content: File.read(file_path))
|
||||
end
|
||||
|
||||
def initialize(content:, original_path: nil)
|
||||
@original_path = original_path
|
||||
def self.from_url_response(url, response)
|
||||
end
|
||||
|
||||
def self.from_content(content)
|
||||
return new unless content =~ METADATA_PATTERN
|
||||
|
||||
yaml_content = $1.each_line.map { |line| line.sub(/^\s*#\s?/, "").rstrip }.join("\n")
|
||||
|
||||
# Load and transform in one chain
|
||||
@metadata = new(**YAML.safe_load(yaml_content)
|
||||
.slice(*Metadata.members.map(&:to_s))
|
||||
.transform_keys(&:to_sym))
|
||||
rescue
|
||||
new # Return empty hash on any YAML/transformation errors
|
||||
end
|
||||
|
||||
def empty? = to_h.values.all?(&:nil?)
|
||||
end
|
||||
|
||||
class Payload
|
||||
def self.from_content(content) = content.sub(METADATA_PATTERN, "")
|
||||
|
||||
def self.normalize(payload) = payload.rstrip + "\n\n"
|
||||
|
||||
def self.normalized_from_content(content) = Payload.from_content(content).then { Payload.normalize(_1) }
|
||||
|
||||
def self.from_file(file_path) = normalized_from_content(File.read(file_path))
|
||||
|
||||
def self.checksum(payload) = "sha256:#{Digest::SHA256.hexdigest(payload)}"
|
||||
|
||||
def self.checksum_from_content(content) = checksum(from_content(content))
|
||||
end
|
||||
|
||||
class Package
|
||||
extend Forwardable
|
||||
attr_reader :content, :payload, :metadata, :was_bare_file
|
||||
|
||||
def_delegators :@metadata,
|
||||
:url, :url=,
|
||||
:filename, :filename=,
|
||||
:payload_version, :payload_version=,
|
||||
:payload_timestamp, :payload_timestamp=,
|
||||
:payload_checksum, :payload_checksum=
|
||||
|
||||
def self.from_file(file_path)
|
||||
if File.exist?(file_path)
|
||||
new(content: File.read(file_path))
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(content:)
|
||||
@content = content
|
||||
@code = extract_code
|
||||
@metadata = extract_metadata
|
||||
@payload = Payload.normalized_from_content(@content)
|
||||
@metadata = Metadata.from_content(@content)
|
||||
|
||||
if is_bare_file?
|
||||
@was_bare_file = true
|
||||
init_metadata
|
||||
else
|
||||
@was_bare_file = false
|
||||
end
|
||||
end
|
||||
|
||||
def imported! = @imported = true
|
||||
def is_bare_file? = @metadata.empty?
|
||||
|
||||
def imported? = @imported ||= false
|
||||
|
||||
def url = @metadata["url"]
|
||||
|
||||
def filename = @metadata["filename"]
|
||||
|
||||
def version = @metadata["version"]
|
||||
|
||||
def packaged_at = @metadata["packaged_at"]
|
||||
|
||||
def checksum = "sha256:#{Digest::SHA256.hexdigest(code)}"
|
||||
|
||||
def inspect_metadata = puts JSON.pretty_generate(@metadata)
|
||||
|
||||
def save(destination = nil)
|
||||
path = determine_save_path(destination)
|
||||
File.write(path, content)
|
||||
path
|
||||
def init_metadata
|
||||
@metadata.url ||= url
|
||||
@metadata.filename ||= filename
|
||||
@metadata.payload_checksum ||= Payload.checksum_from_content(content)
|
||||
@metadata.payload_timestamp ||= payload_timestamp
|
||||
end
|
||||
|
||||
def extract_code = content.sub(METADATA_PATTERN, "")
|
||||
def save(path, filename = nil)
|
||||
path = File.join(path, filename || @metadata.filename) if File.directory?(path)
|
||||
|
||||
def extract_metadata
|
||||
return {} unless content =~ METADATA_PATTERN
|
||||
|
||||
yaml_content = $1.lines.map do |line|
|
||||
line.sub(/^\s*#\s?/, "").rstrip
|
||||
end.join("\n")
|
||||
|
||||
YAML.safe_load(yaml_content)
|
||||
File.write(path, generate_package)
|
||||
end
|
||||
|
||||
def update_metadata(metadata_hash)
|
||||
@metadata = metadata_hash
|
||||
@content = generate_content
|
||||
def verify_payload
|
||||
return false if metadata.payload_checksum.nil? || metadata.payload_checksum&.empty?
|
||||
Payload.checksum(payload) == metadata.payload_checksum
|
||||
end
|
||||
|
||||
def digest!
|
||||
hash = checksum
|
||||
return puts "File already has a checksum" if metadata["content_checksum"] == hash
|
||||
|
||||
new_metadata = metadata.merge("content_checksum" => hash)
|
||||
update_metadata(new_metadata)
|
||||
save
|
||||
def payload_timestamp_as_time
|
||||
@metadata&.payload_timestamp ? Time.parse(@metadata.payload_timestamp) : nil
|
||||
end
|
||||
|
||||
def verify
|
||||
return false unless metadata.key? "content_checksum"
|
||||
checksum == metadata["content_checksum"]
|
||||
end
|
||||
def modified? = !verify_payload
|
||||
|
||||
def modified? = !verify
|
||||
def inspect_metadata = puts JSON.pretty_generate(@metadata.to_h)
|
||||
|
||||
private
|
||||
|
||||
def generate_content
|
||||
def generate_package
|
||||
@metadata.url = url.to_s
|
||||
metadata_block = generate_metadata
|
||||
if METADATA_PATTERN.match?(content)
|
||||
content.sub(METADATA_PATTERN, "\n#{metadata_block}")
|
||||
@@ -97,7 +116,7 @@ module Picopackage
|
||||
|
||||
# This will need a comment style one day, to work with other languages
|
||||
def generate_metadata
|
||||
yaml_content = @metadata.to_yaml.strip
|
||||
yaml_content = @metadata.to_h.transform_keys(&:to_s).to_yaml.strip
|
||||
[
|
||||
"# @PICOPACKAGE_START",
|
||||
yaml_content.lines.map { |line| "# #{line}" }.join,
|
||||
@@ -105,15 +124,5 @@ module Picopackage
|
||||
""
|
||||
].join("\n")
|
||||
end
|
||||
|
||||
def determine_save_path(destination)
|
||||
if destination.nil?
|
||||
@original_path || filename || raise("No filename available")
|
||||
elsif File.directory?(destination)
|
||||
File.join(destination, filename || File.basename(@original_path))
|
||||
else
|
||||
destination
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
require "time"
|
||||
require "pathname"
|
||||
|
||||
module Picopackage
|
||||
class Provider
|
||||
@@ -25,47 +26,38 @@ module Picopackage
|
||||
# The variable `metadata` will contain the metadata extracted from `package_data`
|
||||
|
||||
# Job of the Provider class is to fetch the body from the URL, and then extract the package_data
|
||||
# and the filename from the body. The SourceFile class will then take the body and split it into payload and metadata
|
||||
# and the filename from the body. The Package class will then take the body and split it into payload and metadata
|
||||
|
||||
class DefaultProvider
|
||||
MAX_SIZE = 1024 * 1024
|
||||
TIMEOUT = 10
|
||||
attr_reader :url
|
||||
attr_reader :url, :package
|
||||
|
||||
def self.handles_url?(url) = :maybe
|
||||
|
||||
def initialize(url)
|
||||
def initialize(url, fetcher: Fetch.new(max_size: MAX_SIZE, timeout: TIMEOUT))
|
||||
@url = transform_url(url)
|
||||
@uri = URI(@url)
|
||||
@body = nil
|
||||
@content = nil
|
||||
@fetcher = fetcher
|
||||
@package = Package.new(content: content)
|
||||
populate_metadata
|
||||
end
|
||||
|
||||
def body = @body ||= fetch
|
||||
def transform_url(url) = URI(url)
|
||||
|
||||
def json_body = @json_body ||= JSON.parse(body)
|
||||
|
||||
def transform_url(url) = url
|
||||
|
||||
def fetch
|
||||
Net::HTTP.start(@uri.host, @uri.port, use_ssl: @uri.scheme == "https", read_timeout: TIMEOUT, open_timeout: TIMEOUT) do |http|
|
||||
http.request_get(@uri.path) do |response|
|
||||
raise "Unexpected response: #{response.code}" unless response.is_a?(Net::HTTPSuccess)
|
||||
|
||||
@body = String.new(capacity: MAX_SIZE)
|
||||
response.read_body do |chunk|
|
||||
if @body.bytesize + chunk.bytesize > MAX_SIZE
|
||||
raise FileTooLargeError, "Response would exceed #{MAX_SIZE} bytes"
|
||||
end
|
||||
@body << chunk
|
||||
end
|
||||
@body
|
||||
end
|
||||
end
|
||||
|
||||
@body
|
||||
def body
|
||||
@body ||= @fetcher.fetch(@url)
|
||||
rescue Fetch::Error => e
|
||||
raise FetchError, e.message
|
||||
end
|
||||
|
||||
def json_body
|
||||
@json_body ||= JSON.parse(body)
|
||||
rescue JSON::ParserError
|
||||
raise FetchError, "Failed to parse JSON response"
|
||||
end
|
||||
|
||||
def payload_timestamp = Time.now.httpdate
|
||||
|
||||
def handles_body?
|
||||
true
|
||||
rescue FileTooLargeError, Net::HTTPError, RuntimeError
|
||||
@@ -73,14 +65,17 @@ module Picopackage
|
||||
end
|
||||
|
||||
# Implement in subclass - this come from the `body`.
|
||||
# Spliting content into payload and metadata is the job of the SourceFile class
|
||||
# Spliting content into payload and metadata is the job of the Package class
|
||||
def content = body
|
||||
|
||||
# Implement in subclass - this should return the filename extracted from the body - if it exists, but not from the metadata
|
||||
def filename = File.basename @url
|
||||
|
||||
def source_file
|
||||
@source_file ||= SourceFile.from_content(content, metadata: {"filename" => filename, "url" => url, "packaged_at" => packaged_at}.compact)
|
||||
def populate_metadata
|
||||
@package.filename ||= filename
|
||||
@package.url ||= @url
|
||||
@package.payload_timestamp ||= payload_timestamp
|
||||
@package.payload_checksum ||= Payload.checksum(content)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -96,7 +91,7 @@ module Picopackage
|
||||
"https://api.github.com/gists/#{gist_id}"
|
||||
end
|
||||
|
||||
def packaged_at
|
||||
def payload_timestamp
|
||||
Time.parse(json_body["created_at"])
|
||||
rescue ArgumentError
|
||||
nil
|
||||
@@ -120,7 +115,24 @@ module Picopackage
|
||||
# If we successfully fetch the body, and the body contains content and a filename, then we can handle the body
|
||||
end
|
||||
|
||||
class FileProvider < DefaultProvider
|
||||
def self.handles_url?(url) = File.exist?(url)
|
||||
|
||||
def transform_url(url) = Pathname(url)
|
||||
|
||||
def content = url.read
|
||||
|
||||
def filename = url.basename.to_s
|
||||
|
||||
def payload_timestamp
|
||||
url.mtime.httpdate
|
||||
rescue Errno::ENOENT
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
PROVIDERS = [
|
||||
FileProvider,
|
||||
GithubGistProvider,
|
||||
OpenGistProvider,
|
||||
DefaultProvider
|
||||
|
||||
@@ -4,8 +4,8 @@ module Picopackage
|
||||
Dir.glob(File.join(directory, pattern)).select do |file|
|
||||
next unless File.file?(file)
|
||||
content = File.read(file)
|
||||
content.match?(SourceFile::METADATA_PATTERN)
|
||||
end.map { |file| SourceFile.new(file) }
|
||||
content.match?(Package::METADATA_PATTERN)
|
||||
end.map { |file| Package.new(file) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user