Add StandardRB and get some fixes
This commit is contained in:
2
Gemfile
2
Gemfile
@@ -10,3 +10,5 @@ gem "rake", "~> 13.0"
|
|||||||
gem "minitest", "~> 5.16"
|
gem "minitest", "~> 5.16"
|
||||||
|
|
||||||
gem "rubocop", "~> 1.21"
|
gem "rubocop", "~> 1.21"
|
||||||
|
|
||||||
|
gem "standard", "~> 1.44"
|
||||||
|
|||||||
29
Gemfile.lock
29
Gemfile.lock
@@ -1,7 +1,7 @@
|
|||||||
PATH
|
PATH
|
||||||
remote: .
|
remote: .
|
||||||
specs:
|
specs:
|
||||||
picopackage (0.2.0)
|
picopackage (0.2.1)
|
||||||
digest
|
digest
|
||||||
open-uri (~> 0.5)
|
open-uri (~> 0.5)
|
||||||
yaml (~> 0.4)
|
yaml (~> 0.4)
|
||||||
@@ -14,22 +14,27 @@ GEM
|
|||||||
debug (1.10.0)
|
debug (1.10.0)
|
||||||
irb (~> 1.10)
|
irb (~> 1.10)
|
||||||
reline (>= 0.3.8)
|
reline (>= 0.3.8)
|
||||||
digest (3.1.1)
|
digest (3.2.0)
|
||||||
io-console (0.8.0)
|
io-console (0.8.0)
|
||||||
irb (1.14.3)
|
irb (1.15.1)
|
||||||
|
pp (>= 0.6.0)
|
||||||
rdoc (>= 4.0.0)
|
rdoc (>= 4.0.0)
|
||||||
reline (>= 0.4.2)
|
reline (>= 0.4.2)
|
||||||
json (2.9.1)
|
json (2.9.1)
|
||||||
language_server-protocol (3.17.0.3)
|
language_server-protocol (3.17.0.3)
|
||||||
|
lint_roller (1.1.0)
|
||||||
minitest (5.25.4)
|
minitest (5.25.4)
|
||||||
open-uri (0.5.0)
|
open-uri (0.5.0)
|
||||||
stringio
|
stringio
|
||||||
time
|
time
|
||||||
uri
|
uri
|
||||||
parallel (1.26.3)
|
parallel (1.26.3)
|
||||||
parser (3.3.6.0)
|
parser (3.3.7.0)
|
||||||
ast (~> 2.4.1)
|
ast (~> 2.4.1)
|
||||||
racc
|
racc
|
||||||
|
pp (0.6.2)
|
||||||
|
prettyprint
|
||||||
|
prettyprint (0.2.0)
|
||||||
psych (5.2.3)
|
psych (5.2.3)
|
||||||
date
|
date
|
||||||
stringio
|
stringio
|
||||||
@@ -53,7 +58,22 @@ GEM
|
|||||||
unicode-display_width (>= 2.4.0, < 4.0)
|
unicode-display_width (>= 2.4.0, < 4.0)
|
||||||
rubocop-ast (1.37.0)
|
rubocop-ast (1.37.0)
|
||||||
parser (>= 3.3.1.0)
|
parser (>= 3.3.1.0)
|
||||||
|
rubocop-performance (1.23.1)
|
||||||
|
rubocop (>= 1.48.1, < 2.0)
|
||||||
|
rubocop-ast (>= 1.31.1, < 2.0)
|
||||||
ruby-progressbar (1.13.0)
|
ruby-progressbar (1.13.0)
|
||||||
|
standard (1.44.0)
|
||||||
|
language_server-protocol (~> 3.17.0.2)
|
||||||
|
lint_roller (~> 1.0)
|
||||||
|
rubocop (~> 1.70.0)
|
||||||
|
standard-custom (~> 1.0.0)
|
||||||
|
standard-performance (~> 1.6)
|
||||||
|
standard-custom (1.0.2)
|
||||||
|
lint_roller (~> 1.0)
|
||||||
|
rubocop (~> 1.50)
|
||||||
|
standard-performance (1.6.0)
|
||||||
|
lint_roller (~> 1.1)
|
||||||
|
rubocop-performance (~> 1.23.0)
|
||||||
stringio (3.1.2)
|
stringio (3.1.2)
|
||||||
time (0.4.1)
|
time (0.4.1)
|
||||||
date
|
date
|
||||||
@@ -73,6 +93,7 @@ DEPENDENCIES
|
|||||||
picopackage!
|
picopackage!
|
||||||
rake (~> 13.0)
|
rake (~> 13.0)
|
||||||
rubocop (~> 1.21)
|
rubocop (~> 1.21)
|
||||||
|
standard (~> 1.44)
|
||||||
|
|
||||||
BUNDLED WITH
|
BUNDLED WITH
|
||||||
2.6.2
|
2.6.2
|
||||||
|
|||||||
7
Rakefile
7
Rakefile
@@ -5,8 +5,9 @@ require "minitest/test_task"
|
|||||||
|
|
||||||
Minitest::TestTask.create
|
Minitest::TestTask.create
|
||||||
|
|
||||||
require "rubocop/rake_task"
|
# require "rubocop/rake_task"
|
||||||
|
# RuboCop::RakeTask.new
|
||||||
|
|
||||||
RuboCop::RakeTask.new
|
require "standard/rake"
|
||||||
|
|
||||||
task default: %i[test rubocop]
|
task default: %i[test standard]
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require "bundler/setup"
|
require "bundler/setup"
|
||||||
require "picop"
|
require "picopackage"
|
||||||
|
|
||||||
# You can add fixtures and/or initialization code here to make experimenting
|
# You can add fixtures and/or initialization code here to make experimenting
|
||||||
# with your gem easier. You can also use a different console, if you like.
|
# with your gem easier. You can also use a different console, if you like.
|
||||||
|
|||||||
6
exe/ppkg
6
exe/ppkg
@@ -1,15 +1,15 @@
|
|||||||
#!/usr/bin/env ruby
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
# Add lib directory to load path
|
# Add lib directory to load path
|
||||||
lib_path = File.expand_path('../lib', __dir__)
|
lib_path = File.expand_path("../lib", __dir__)
|
||||||
$LOAD_PATH.unshift(lib_path) unless $LOAD_PATH.include?(lib_path)
|
$LOAD_PATH.unshift(lib_path) unless $LOAD_PATH.include?(lib_path)
|
||||||
|
|
||||||
require 'picopackage'
|
require "picopackage"
|
||||||
|
|
||||||
begin
|
begin
|
||||||
Picopackage::CLI.run(ARGV)
|
Picopackage::CLI.run(ARGV)
|
||||||
rescue => e
|
rescue => e
|
||||||
warn "Error: #{e.message}"
|
warn "Error: #{e.message}"
|
||||||
warn e.backtrace if ENV['DEBUG']
|
warn e.backtrace if ENV["DEBUG"]
|
||||||
exit 1
|
exit 1
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -10,7 +10,10 @@ require_relative "picopackage/cli"
|
|||||||
|
|
||||||
module Picopackage
|
module Picopackage
|
||||||
class Error < StandardError; end
|
class Error < StandardError; end
|
||||||
|
|
||||||
class FileTooLargeError < StandardError; end
|
class FileTooLargeError < StandardError; end
|
||||||
|
|
||||||
class FetchError < StandardError; end
|
class FetchError < StandardError; end
|
||||||
|
|
||||||
class LocalModificationError < StandardError; end
|
class LocalModificationError < StandardError; end
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -5,17 +5,17 @@ module Picopackage
|
|||||||
def self.run(argv = ARGV)
|
def self.run(argv = ARGV)
|
||||||
command = argv.shift
|
command = argv.shift
|
||||||
case command
|
case command
|
||||||
when 'scan'
|
when "scan"
|
||||||
options = {}
|
options = {}
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg scan [options] DIRECTORY"
|
opts.banner = "Usage: ppkg scan [options] DIRECTORY"
|
||||||
# opts.on('-v', '--verbose', 'Run verbosely') { |v| options[:verbose] = v }
|
# opts.on('-v', '--verbose', 'Run verbosely') { |v| options[:verbose] = v }
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
|
|
||||||
dir = argv.first || '.'
|
dir = argv.first || "."
|
||||||
Picopackage::Scanner.scan(dir).each {|f| puts f.file_path }
|
Picopackage::Scanner.scan(dir).each { |f| puts f.file_path }
|
||||||
|
|
||||||
when 'digest'
|
when "digest"
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg digest FILE"
|
opts.banner = "Usage: ppkg digest FILE"
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
@@ -23,14 +23,14 @@ module Picopackage
|
|||||||
file = argv.first
|
file = argv.first
|
||||||
Picopackage::SourceFile.from_file(file).digest!
|
Picopackage::SourceFile.from_file(file).digest!
|
||||||
|
|
||||||
when 'checksum'
|
when "checksum"
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg checksum FILE"
|
opts.banner = "Usage: ppkg checksum FILE"
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
file = argv.first
|
file = argv.first
|
||||||
puts Picopackage::SourceFile.from_file(file).checksum
|
puts Picopackage::SourceFile.from_file(file).checksum
|
||||||
|
|
||||||
when 'verify'
|
when "verify"
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg sign FILE"
|
opts.banner = "Usage: ppkg sign FILE"
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
@@ -38,7 +38,7 @@ module Picopackage
|
|||||||
path = argv.first
|
path = argv.first
|
||||||
source = SourceFile.from_file(path)
|
source = SourceFile.from_file(path)
|
||||||
|
|
||||||
if source.metadata['content_checksum'].nil?
|
if source.metadata["content_checksum"].nil?
|
||||||
puts "⚠️ No checksum found in #{path}"
|
puts "⚠️ No checksum found in #{path}"
|
||||||
puts "Run 'ppkg sign #{path}' to add one"
|
puts "Run 'ppkg sign #{path}' to add one"
|
||||||
exit 1
|
exit 1
|
||||||
@@ -46,14 +46,14 @@ module Picopackage
|
|||||||
|
|
||||||
unless source.verify
|
unless source.verify
|
||||||
puts "❌ Checksum verification failed for #{path}"
|
puts "❌ Checksum verification failed for #{path}"
|
||||||
puts "Expected: #{source.metadata['content_checksum']}"
|
puts "Expected: #{source.metadata["content_checksum"]}"
|
||||||
puts "Got: #{source.checksum}"
|
puts "Got: #{source.checksum}"
|
||||||
exit 1
|
exit 1
|
||||||
end
|
end
|
||||||
|
|
||||||
puts "✅ #{path} verified successfully"
|
puts "✅ #{path} verified successfully"
|
||||||
|
|
||||||
when 'inspect'
|
when "inspect"
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg inspect FILE|DIRECTORY"
|
opts.banner = "Usage: ppkg inspect FILE|DIRECTORY"
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
@@ -61,15 +61,15 @@ module Picopackage
|
|||||||
path = argv.first
|
path = argv.first
|
||||||
Picopackage::SourceFile.from_file(path).inspect
|
Picopackage::SourceFile.from_file(path).inspect
|
||||||
|
|
||||||
when 'fetch'
|
when "fetch"
|
||||||
options = { force: false }
|
options = {force: false}
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg fetch [options] URI [PATH]"
|
opts.banner = "Usage: ppkg fetch [options] URI [PATH]"
|
||||||
opts.on('-f', '--force', 'Force fetch') { |f| options[:force] = f }
|
opts.on("-f", "--force", "Force fetch") { |f| options[:force] = f }
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
|
|
||||||
url = argv.shift
|
url = argv.shift
|
||||||
path = argv.shift || '.' # use '.' if no path provided
|
path = argv.shift || "." # use '.' if no path provided
|
||||||
|
|
||||||
if url.nil?
|
if url.nil?
|
||||||
puts "Error: URI is required"
|
puts "Error: URI is required"
|
||||||
@@ -85,11 +85,11 @@ module Picopackage
|
|||||||
exit 1
|
exit 1
|
||||||
end
|
end
|
||||||
|
|
||||||
when 'update'
|
when "update"
|
||||||
options = { force: false }
|
options = {force: false}
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg update [options] FILE"
|
opts.banner = "Usage: ppkg update [options] FILE"
|
||||||
opts.on('-f', '--force', 'Force update') { |f| options[:force] = f }
|
opts.on("-f", "--force", "Force update") { |f| options[:force] = f }
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
|
|
||||||
file = argv.first
|
file = argv.first
|
||||||
@@ -113,7 +113,7 @@ module Picopackage
|
|||||||
exit 1
|
exit 1
|
||||||
rescue => e
|
rescue => e
|
||||||
puts "Error: #{e.message}"
|
puts "Error: #{e.message}"
|
||||||
puts e.backtrace if ENV['DEBUG']
|
puts e.backtrace if ENV["DEBUG"]
|
||||||
exit 1
|
exit 1
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
require 'net/http'
|
require "net/http"
|
||||||
require 'fileutils'
|
require "fileutils"
|
||||||
require 'tempfile'
|
require "tempfile"
|
||||||
require 'json'
|
require "json"
|
||||||
require 'debug'
|
require "debug"
|
||||||
|
|
||||||
module Picopackage
|
module Picopackage
|
||||||
class Fetch
|
class Fetch
|
||||||
@@ -45,26 +45,25 @@ module Picopackage
|
|||||||
end
|
end
|
||||||
|
|
||||||
class Status
|
class Status
|
||||||
attr_reader :state, :local_version, :remote_version
|
attr_reader :state, :local_comparison, :remote_comparison
|
||||||
|
|
||||||
def self.compare(local_source_file, remote_source_file)
|
def self.compare(local_source_file, remote_source_file)
|
||||||
return new(:outdated) if local_source_file.metadata.nil? || remote_source_file.metadata.nil?
|
return new(:outdated) if local_source_file.metadata.nil? || remote_source_file.metadata.nil?
|
||||||
|
|
||||||
local_version = local_source_file.metadata["version"]
|
local_comparison = local_source_file.metadata["version"] || local_source_file.metadata["updated_at"]&.to_s
|
||||||
remote_version = remote_source_file.metadata["version"]
|
remote_comparison = remote_source_file.metadata["version"] || remote_source_file.metadata["updated_at"]&.to_s
|
||||||
|
|
||||||
if local_version == remote_version
|
if local_comparison == remote_comparison
|
||||||
if local_source_file.modified?
|
if local_source_file.modified?
|
||||||
new(:modified, local_version:)
|
new(:modified, local_version: local_comparison)
|
||||||
else
|
else
|
||||||
new(:up_to_date, local_version:)
|
new(:up_to_date, local_version: local_comparison)
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
new(:outdated,
|
new(:outdated,
|
||||||
local_version:,
|
local_version: local_comparison,
|
||||||
remote_version:,
|
remote_version: remote_comparison,
|
||||||
modified: local_source_file.modified?
|
modified: local_source_file.modified?)
|
||||||
)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -93,9 +92,9 @@ module Picopackage
|
|||||||
"Picopackage is up to date"
|
"Picopackage is up to date"
|
||||||
when :outdated
|
when :outdated
|
||||||
if modified?
|
if modified?
|
||||||
"Local Picopackage (v#{local_version}) has modifications but remote version (v#{remote_version}) is available"
|
"Local Picopackage (v#{local_comparison}) has modifications but remote version (v#{remote_comparison}) is available"
|
||||||
else
|
else
|
||||||
"Local Picopackage (v#{local_version}) is outdated. Remote version: v#{remote_version}"
|
"Local Picopackage (v#{local_comparison}) is outdated. Remote version: v#{remote_comparison}"
|
||||||
end
|
end
|
||||||
when :modified
|
when :modified
|
||||||
"Local Picopackage has been modified from original version (v#{local_version})"
|
"Local Picopackage has been modified from original version (v#{local_version})"
|
||||||
|
|||||||
@@ -1,36 +0,0 @@
|
|||||||
# Currently unused. If we get to the point where a provider needs to make a http request, we'll
|
|
||||||
# swappout DefaultProvider#fetch and include this module.
|
|
||||||
|
|
||||||
module Picopackage
|
|
||||||
module HttpFetcher
|
|
||||||
MAX_SIZE = 1024 * 1024
|
|
||||||
TIMEOUT = 10
|
|
||||||
|
|
||||||
# This seemed to cause loops - constanting making requests to the same URL
|
|
||||||
|
|
||||||
def fetch_url(url, max_size: MAX_SIZE, timeout: TIMEOUT)
|
|
||||||
raise ArgumentError, "This method shouldn't be called"
|
|
||||||
uri = URI(url)
|
|
||||||
|
|
||||||
Net::HTTP.start(uri.host, uri.port, use_ssl: uri.scheme == 'https', read_timeout: timeout, open_timeout: timeout) do |http|
|
|
||||||
request = Net::HTTP::Get.new(uri.path)
|
|
||||||
|
|
||||||
http.request_get(uri.path) do |response|
|
|
||||||
unless response.is_a?(Net::HTTPSuccess)
|
|
||||||
raise FetchError, "HTTP #{response.code} #{response.message}"
|
|
||||||
end
|
|
||||||
|
|
||||||
data = String.new(capacity: max_size)
|
|
||||||
response.read_body do |chunk|
|
|
||||||
# Stream chunks with size checking
|
|
||||||
if data.bytesize + chunk.bytesize > max_size
|
|
||||||
raise FileTooLargeError
|
|
||||||
end
|
|
||||||
data << chunk
|
|
||||||
end
|
|
||||||
end
|
|
||||||
data
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
|
require "time"
|
||||||
|
|
||||||
module Picopackage
|
module Picopackage
|
||||||
class Provider
|
class Provider
|
||||||
def self.for(url)
|
def self.for(url)
|
||||||
@@ -17,18 +19,18 @@ module Picopackage
|
|||||||
end
|
end
|
||||||
|
|
||||||
# Base class for fetching content from a URL
|
# Base class for fetching content from a URL
|
||||||
# The variable `body` will contain the content retrieved from the URL
|
# The variable `body` will contain the package_data retrieved from the URL
|
||||||
# The variable `content` will contain both and code + metadata - this would be writen to a file.
|
# The variable `package_data` will contain both and payload + metadata - this would be writen to a file.
|
||||||
# The variable `code` will contain the code extracted from `content`
|
# The variable `payload` will contain the payload extracted from `package_data`
|
||||||
# The variable `metadata` will contain the metadata extracted from `content`
|
# The variable `metadata` will contain the metadata extracted from `package_data`
|
||||||
|
|
||||||
# Job of the Provider class is to fetch the body from the URL, and then extract the content and the filename from the body
|
# Job of the Provider class is to fetch the body from the URL, and then extract the package_data
|
||||||
# The SourceFile class will then take the body and split it into code and metadata
|
# and the filename from the body. The SourceFile class will then take the body and split it into payload and metadata
|
||||||
|
|
||||||
class DefaultProvider
|
class DefaultProvider
|
||||||
MAX_SIZE = 1024 * 1024
|
MAX_SIZE = 1024 * 1024
|
||||||
TIMEOUT = 10
|
TIMEOUT = 10
|
||||||
attr_reader :url, :source_file
|
attr_reader :url
|
||||||
|
|
||||||
def self.handles_url?(url) = :maybe
|
def self.handles_url?(url) = :maybe
|
||||||
|
|
||||||
@@ -40,12 +42,13 @@ module Picopackage
|
|||||||
end
|
end
|
||||||
|
|
||||||
def body = @body ||= fetch
|
def body = @body ||= fetch
|
||||||
|
|
||||||
def json_body = @json_body ||= JSON.parse(body)
|
def json_body = @json_body ||= JSON.parse(body)
|
||||||
|
|
||||||
def transform_url(url) = url
|
def transform_url(url) = url
|
||||||
|
|
||||||
def fetch
|
def fetch
|
||||||
begin
|
Net::HTTP.start(@uri.host, @uri.port, use_ssl: @uri.scheme == "https", read_timeout: TIMEOUT, open_timeout: TIMEOUT) do |http|
|
||||||
Net::HTTP.start(@uri.host, @uri.port, use_ssl: @uri.scheme == 'https', read_timeout: TIMEOUT, open_timeout: TIMEOUT) do |http|
|
|
||||||
http.request_get(@uri.path) do |response|
|
http.request_get(@uri.path) do |response|
|
||||||
raise "Unexpected response: #{response.code}" unless response.is_a?(Net::HTTPSuccess)
|
raise "Unexpected response: #{response.code}" unless response.is_a?(Net::HTTPSuccess)
|
||||||
|
|
||||||
@@ -59,46 +62,59 @@ module Picopackage
|
|||||||
@body
|
@body
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
@body
|
@body
|
||||||
end
|
end
|
||||||
|
|
||||||
def handles_body?
|
def handles_body?
|
||||||
true
|
true
|
||||||
rescue FileTooLargeError, Net::HTTPError, RuntimeError => e
|
rescue FileTooLargeError, Net::HTTPError, RuntimeError
|
||||||
false
|
false
|
||||||
end
|
end
|
||||||
|
|
||||||
# Implement in subclass - this come from the `body`.
|
# Implement in subclass - this come from the `body`.
|
||||||
# Spliting content into code and metadata is the job of the SourceFile class
|
# Spliting content into payload and metadata is the job of the SourceFile class
|
||||||
def content = body
|
def content = body
|
||||||
|
|
||||||
# Implement in subclass - this should return the filename extracted from the body - if it exists, but not from the metadata
|
# Implement in subclass - this should return the filename extracted from the body - if it exists, but not from the metadata
|
||||||
def filename = File.basename @url
|
def filename = File.basename @url
|
||||||
|
|
||||||
def source_file
|
def source_file
|
||||||
@source_file ||= SourceFile.from_content(content, metadata: {'filename' => filename, 'url' => url, 'version' => '0.0.1'})
|
@source_file ||= SourceFile.from_content(content, metadata: {"filename" => filename, "url" => url, "packaged_at" => packaged_at}.compact)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class GithubGistProvider < DefaultProvider
|
class GithubGistProvider < DefaultProvider
|
||||||
def self.handles_url?(url) = url.match?(%r{gist\.github\.com})
|
def self.handles_url?(url) = url.match?(%r{gist\.github\.com})
|
||||||
|
|
||||||
def content = json_body["files"].values.first["content"]
|
def content = json_body["files"].values.first["content"]
|
||||||
|
|
||||||
def filename = json_body["files"].values.first["filename"]
|
def filename = json_body["files"].values.first["filename"]
|
||||||
|
|
||||||
def transform_url(url)
|
def transform_url(url)
|
||||||
gist_id = url[/gist\.github\.com\/[^\/]+\/([a-f0-9]+)/, 1]
|
gist_id = url[/gist\.github\.com\/[^\/]+\/([a-f0-9]+)/, 1]
|
||||||
"https://api.github.com/gists/#{gist_id}"
|
"https://api.github.com/gists/#{gist_id}"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def packaged_at
|
||||||
|
Time.parse(json_body["created_at"])
|
||||||
|
rescue ArgumentError
|
||||||
|
nil
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class OpenGistProvider < DefaultProvider
|
class OpenGistProvider < DefaultProvider
|
||||||
def handles_url?(url) = :maybe
|
def handles_url?(url) = :maybe
|
||||||
|
|
||||||
def transform_url(url) = "#{url}.json"
|
def transform_url(url) = "#{url}.json"
|
||||||
def content = json_body.dig("files",0, "content")
|
|
||||||
def filename = json_body.dig("files",0, "filename")
|
def content = json_body.dig("files", 0, "content")
|
||||||
|
|
||||||
|
def filename = json_body.dig("files", 0, "filename")
|
||||||
|
|
||||||
def handles_body?
|
def handles_body?
|
||||||
content && filename
|
content && filename
|
||||||
rescue FileTooLargeError, Net::HTTPError, RuntimeError => e
|
rescue FileTooLargeError, Net::HTTPError, RuntimeError
|
||||||
false
|
false
|
||||||
end
|
end
|
||||||
# If we successfully fetch the body, and the body contains content and a filename, then we can handle the body
|
# If we successfully fetch the body, and the body contains content and a filename, then we can handle the body
|
||||||
|
|||||||
@@ -25,21 +25,21 @@ module Picopackage
|
|||||||
@original_path = original_path
|
@original_path = original_path
|
||||||
|
|
||||||
@content = content
|
@content = content
|
||||||
@metadata = extract_metadata
|
|
||||||
@code = extract_code
|
@code = extract_code
|
||||||
|
@metadata = extract_metadata
|
||||||
end
|
end
|
||||||
|
|
||||||
def imported! = @imported = true
|
def imported! = @imported = true
|
||||||
|
|
||||||
def imported? = @imported ||= false
|
def imported? = @imported ||= false
|
||||||
|
|
||||||
def content = @content
|
def url = @metadata["url"]
|
||||||
|
|
||||||
def url = @metadata['url']
|
def filename = @metadata["filename"]
|
||||||
|
|
||||||
def filename = @metadata['filename']
|
def version = @metadata["version"]
|
||||||
|
|
||||||
def version = @metadata['version'] || '0.0.1'
|
def packaged_at = @metadata["packaged_at"]
|
||||||
|
|
||||||
def checksum = "sha256:#{Digest::SHA256.hexdigest(code)}"
|
def checksum = "sha256:#{Digest::SHA256.hexdigest(code)}"
|
||||||
|
|
||||||
@@ -51,13 +51,13 @@ module Picopackage
|
|||||||
path
|
path
|
||||||
end
|
end
|
||||||
|
|
||||||
def extract_code = content.sub(METADATA_PATTERN, '')
|
def extract_code = content.sub(METADATA_PATTERN, "")
|
||||||
|
|
||||||
def extract_metadata
|
def extract_metadata
|
||||||
return {} unless content =~ METADATA_PATTERN
|
return {} unless content =~ METADATA_PATTERN
|
||||||
|
|
||||||
yaml_content = $1.lines.map do |line|
|
yaml_content = $1.lines.map do |line|
|
||||||
line.sub(/^\s*#\s?/, '').rstrip
|
line.sub(/^\s*#\s?/, "").rstrip
|
||||||
end.join("\n")
|
end.join("\n")
|
||||||
|
|
||||||
YAML.safe_load(yaml_content)
|
YAML.safe_load(yaml_content)
|
||||||
@@ -70,16 +70,16 @@ module Picopackage
|
|||||||
|
|
||||||
def digest!
|
def digest!
|
||||||
hash = checksum
|
hash = checksum
|
||||||
return puts "File already has a checksum" if metadata['content_checksum'] == hash
|
return puts "File already has a checksum" if metadata["content_checksum"] == hash
|
||||||
|
|
||||||
new_metadata = metadata.merge('content_checksum' => hash)
|
new_metadata = metadata.merge("content_checksum" => hash)
|
||||||
update_metadata(new_metadata)
|
update_metadata(new_metadata)
|
||||||
save
|
save
|
||||||
end
|
end
|
||||||
|
|
||||||
def verify
|
def verify
|
||||||
return false unless metadata.key? 'content_checksum'
|
return false unless metadata.key? "content_checksum"
|
||||||
checksum == metadata['content_checksum']
|
checksum == metadata["content_checksum"]
|
||||||
end
|
end
|
||||||
|
|
||||||
def modified? = !verify
|
def modified? = !verify
|
||||||
@@ -88,7 +88,7 @@ module Picopackage
|
|||||||
|
|
||||||
def generate_content
|
def generate_content
|
||||||
metadata_block = generate_metadata
|
metadata_block = generate_metadata
|
||||||
if content =~ METADATA_PATTERN
|
if METADATA_PATTERN.match?(content)
|
||||||
content.sub(METADATA_PATTERN, "\n#{metadata_block}")
|
content.sub(METADATA_PATTERN, "\n#{metadata_block}")
|
||||||
else
|
else
|
||||||
[content.rstrip, "\n#{metadata_block}"].join("\n")
|
[content.rstrip, "\n#{metadata_block}"].join("\n")
|
||||||
@@ -115,6 +115,5 @@ module Picopackage
|
|||||||
destination
|
destination
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -14,11 +14,11 @@ Gem::Specification.new do |spec|
|
|||||||
spec.license = "MIT"
|
spec.license = "MIT"
|
||||||
spec.required_ruby_version = ">= 3.1.0"
|
spec.required_ruby_version = ">= 3.1.0"
|
||||||
|
|
||||||
#spec.metadata["allowed_push_host"] = "TODO: Set to your gem server 'https://example.com'"
|
# spec.metadata["allowed_push_host"] = "TODO: Set to your gem server 'https://example.com'"
|
||||||
|
|
||||||
#spec.metadata["homepage_uri"] = spec.homepage
|
# spec.metadata["homepage_uri"] = spec.homepage
|
||||||
#spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
|
# spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
|
||||||
#spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
|
# spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
|
||||||
|
|
||||||
# Specify which files should be added to the gem when it is released.
|
# Specify which files should be added to the gem when it is released.
|
||||||
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
|
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
|
||||||
@@ -37,6 +37,7 @@ Gem::Specification.new do |spec|
|
|||||||
spec.add_dependency "yaml", "~> 0.4"
|
spec.add_dependency "yaml", "~> 0.4"
|
||||||
spec.add_dependency "digest"
|
spec.add_dependency "digest"
|
||||||
spec.add_development_dependency "debug"
|
spec.add_development_dependency "debug"
|
||||||
|
spec.add_development_dependency "standard"
|
||||||
|
|
||||||
# For more information and examples about making a new gem, check out our
|
# For more information and examples about making a new gem, check out our
|
||||||
# guide at: https://bundler.io/guides/creating_gem.html
|
# guide at: https://bundler.io/guides/creating_gem.html
|
||||||
|
|||||||
@@ -7,7 +7,8 @@ class TestPicopackage < Minitest::Test
|
|||||||
refute_nil ::Picopackage::VERSION
|
refute_nil ::Picopackage::VERSION
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_it_does_something_useful
|
def test_it_can_load_a_picopackage_file
|
||||||
assert false
|
sf = Picopackage::SourceFile.from_content(File.read("test/files/uniquify_array.rb"))
|
||||||
|
assert_equal sf.metadata["filename"], "uniquify_array.rb"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
Reference in New Issue
Block a user