From 6796c3f9fd4267b1b787b0a2b02c6f2b19ed9f19 Mon Sep 17 00:00:00 2001 From: Brandon Hornseth Date: Fri, 1 Nov 2024 16:33:58 -0600 Subject: [PATCH 1/8] Eliminate test warnings Each of these changes were causing a longer version of the below warning > WARNING: Using the `raise_error` matcher without providing a specific error > or message risks false positives There are definitely better refactors for each of these, but I'm optimizing for cleaning up the test output, not for improving the code interface or possibly introducing breaking changes --- spec/unit/spark_api/configuration/yaml_spec.rb | 4 ++-- spec/unit/spark_api/multi_client_spec.rb | 2 +- spec/unit/spark_api/request_spec.rb | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/spec/unit/spark_api/configuration/yaml_spec.rb b/spec/unit/spark_api/configuration/yaml_spec.rb index c4d95a58..60ef03d6 100644 --- a/spec/unit/spark_api/configuration/yaml_spec.rb +++ b/spec/unit/spark_api/configuration/yaml_spec.rb @@ -27,9 +27,9 @@ end it "should raise an error for a bad configuration" do allow(subject).to receive(:env){ {} } - expect { subject.load_file("spec/config/spark_api/some_random_key.yml")}.to raise_error + expect { subject.load_file("spec/config/spark_api/some_random_key.yml")}.to raise_error(Errno::ENOENT) allow(subject).to receive(:env){ {"RAILS_ENV" => "fake_env"} } - expect { subject.load_file(api_file)}.to raise_error + expect { subject.load_file(api_file)}.to raise_error(NoMethodError) end end describe "oauth2" do diff --git a/spec/unit/spark_api/multi_client_spec.rb b/spec/unit/spark_api/multi_client_spec.rb index c632f0bc..cf96b66d 100644 --- a/spec/unit/spark_api/multi_client_spec.rb +++ b/spec/unit/spark_api/multi_client_spec.rb @@ -44,7 +44,7 @@ def self.test_client_c expect(SparkApi.client.api_key).to eq('c') raise "OH MY GOODNESS I BLEW UP!!!" end - end.to raise_error + end.to raise_error(RuntimeError) expect(SparkApi.client.api_key).to eq('a') end diff --git a/spec/unit/spark_api/request_spec.rb b/spec/unit/spark_api/request_spec.rb index b149ff80..ebac4b18 100644 --- a/spec/unit/spark_api/request_spec.rb +++ b/spec/unit/spark_api/request_spec.rb @@ -223,7 +223,7 @@ def version() it "should escape a path correctly" do expect(subject.get('/test path with spaces').length).to eq(0) # now try this with an already escaped path. Kaboom! - expect { subject.get('/test%20path%20with%20spaces') }.to raise_error() + expect { subject.get('/test%20path%20with%20spaces') }.to raise_error(Faraday::Adapter::Test::Stubs::NotFound) end it "post data should support non json data" do From c728ff569e04b5c22c6edd22c3a85048b6369f53 Mon Sep 17 00:00:00 2001 From: Brandon Hornseth Date: Sat, 2 Nov 2024 08:22:13 -0600 Subject: [PATCH 2/8] Ecosystem Updates - Add support for Ruby 3.2 and 3.3 - Require Faraday 2.0+ --- .github/workflows/ci.yml | 6 +++--- .ruby-version | 2 +- CHANGELOG | 5 +++++ VERSION | 2 +- .../authentication/oauth2_impl/faraday_middleware.rb | 4 ++-- lib/spark_api/configuration/yaml.rb | 2 +- lib/spark_api/faraday_middleware.rb | 2 +- spark_api.gemspec | 10 +++------- spec/spec_helper.rb | 2 +- 9 files changed, 18 insertions(+), 17 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 550eb91a..4bcd7ced 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - ruby: [ '2.7', '3.0', '3.1' ] + ruby: [ '3.1', '3.2', '3.3' ] steps: - name: repo checkout @@ -22,7 +22,7 @@ jobs: ruby-version: ${{ matrix.ruby }} - name: Set up Bundler - run: gem install bundler -v 2.1.4 + run: gem install bundler - name: bundle install @@ -48,7 +48,7 @@ jobs: - name: Set up Ruby uses: ruby/setup-ruby@v1 with: - ruby-version: '2.7' + ruby-version: '3.3' - name: bundle install run: | diff --git a/.ruby-version b/.ruby-version index 37c2961c..b347b11e 100644 --- a/.ruby-version +++ b/.ruby-version @@ -1 +1 @@ -2.7.2 +3.2.3 diff --git a/CHANGELOG b/CHANGELOG index 149dd00f..ac51e3a3 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,8 @@ +v1.7.0 + - Drop support for ruby 2.7 + - Add support for ruby 3.2+ + - Require minimum version of faraday 2.0 + v1.6.3 - Add support for sending end user ip address in request headers diff --git a/VERSION b/VERSION index 266146b8..bd8bf882 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.6.3 +1.7.0 diff --git a/lib/spark_api/authentication/oauth2_impl/faraday_middleware.rb b/lib/spark_api/authentication/oauth2_impl/faraday_middleware.rb index 6089aebe..857d2d81 100644 --- a/lib/spark_api/authentication/oauth2_impl/faraday_middleware.rb +++ b/lib/spark_api/authentication/oauth2_impl/faraday_middleware.rb @@ -6,7 +6,7 @@ module OAuth2Impl #==OAuth2 Faraday response middleware # HTTP Response after filter to package oauth2 responses and bubble up basic api errors. - class FaradayMiddleware < Faraday::Response::Middleware + class FaradayMiddleware < Faraday::Middleware def initialize(app) super(app) @@ -42,7 +42,7 @@ def on_complete(env) #==OAuth2 Faraday response middleware # HTTP Response after filter to package oauth2 responses and bubble up basic api errors. - class SparkbarFaradayMiddleware < Faraday::Response::Middleware + class SparkbarFaradayMiddleware < Faraday::Middleware def initialize(app) super(app) diff --git a/lib/spark_api/configuration/yaml.rb b/lib/spark_api/configuration/yaml.rb index 9e2b0ca0..bb600bb9 100644 --- a/lib/spark_api/configuration/yaml.rb +++ b/lib/spark_api/configuration/yaml.rb @@ -60,7 +60,7 @@ def self.config_keys() end def self.exists?(name) - File.exists? "#{config_path}/#{name}.yml" + File.exist? "#{config_path}/#{name}.yml" end def self.build(name) diff --git a/lib/spark_api/faraday_middleware.rb b/lib/spark_api/faraday_middleware.rb index 17436d2c..7fe065a0 100644 --- a/lib/spark_api/faraday_middleware.rb +++ b/lib/spark_api/faraday_middleware.rb @@ -5,7 +5,7 @@ module SparkApi #=Spark API Faraday middleware # HTTP Response after filter to package api responses and bubble up basic api errors. - class FaradayMiddleware < Faraday::Response::Middleware + class FaradayMiddleware < Faraday::Middleware include SparkApi::PaginateHelper def initialize(app) diff --git a/spark_api.gemspec b/spark_api.gemspec index 4284d2af..8ca1e008 100644 --- a/spark_api.gemspec +++ b/spark_api.gemspec @@ -31,10 +31,9 @@ Gem::Specification.new do |s| s.require_paths = ["lib"] s.add_dependency 'addressable' - s.add_dependency 'faraday', '>= 0.17.3', '< 2.0' - s.add_dependency 'multi_json', '~> 1.0' + s.add_dependency 'faraday', '>= 2.0.0' + s.add_dependency 'multi_json', '> 1.0' s.add_dependency 'json', '>= 1.7' - s.add_dependency 'builder', '>= 2.1.2', '< 4.0.0' s.add_dependency 'will_paginate', '>= 3.0.pre2', '< 4.0.0' s.add_dependency 'highline', '>= 1.0' @@ -45,10 +44,7 @@ Gem::Specification.new do |s| s.add_development_dependency 'rexml' #needed for ruby 3 s.add_development_dependency 'typhoeus' s.add_development_dependency 'ci_reporter_rspec' - # s.add_development_dependency 'rb-readline' - # s.add_development_dependency 'rb-fsevent' - # s.add_development_dependency 'simplecov' + s.add_development_dependency 'builder', '>= 2.1.2', '< 4.0.0' s.add_development_dependency 'simplecov-rcov' - # s.add_development_dependency 'guard-rspec' end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index e9075362..4f9a881c 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -23,7 +23,7 @@ require 'spark_api' -FileUtils.mkdir 'log' unless File.exists? 'log' +FileUtils.mkdir 'log' unless File.exist? 'log' module SparkApi def self.logger From c65d92ca66851b30be3cc618829c49eed0224802 Mon Sep 17 00:00:00 2001 From: Brandon Hornseth Date: Sat, 2 Nov 2024 09:39:54 -0600 Subject: [PATCH 3/8] Add Verbose option This change turns off request and response logging by default and allows users to turn that back on by setting a new `verbose` option to true. --- CHANGELOG | 2 ++ lib/spark_api/configuration.rb | 8 +++++--- lib/spark_api/connection.rb | 4 +++- lib/spark_api/faraday_middleware.rb | 4 +++- lib/spark_api/request.rb | 4 +++- spec/unit/spark_api/configuration_spec.rb | 7 ++++++- 6 files changed, 22 insertions(+), 7 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index ac51e3a3..eeee7ee6 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,8 @@ v1.7.0 - Drop support for ruby 2.7 - Add support for ruby 3.2+ - Require minimum version of faraday 2.0 + - Disabled request/response logging by default. Old behavior can be restored + by setting a new `verbose` configuration option to true v1.6.3 - Add support for sending end user ip address in request headers diff --git a/lib/spark_api/configuration.rb b/lib/spark_api/configuration.rb index 7c046ed7..98e3c9c5 100644 --- a/lib/spark_api/configuration.rb +++ b/lib/spark_api/configuration.rb @@ -9,9 +9,9 @@ module Configuration end # valid configuration options - VALID_OPTION_KEYS = [:api_key, :api_secret, :api_user, :endpoint, - :user_agent, :version, :ssl, :ssl_verify, :oauth2_provider, :authentication_mode, - :auth_endpoint, :callback, :compress, :timeout, :middleware, :dictionary_version, :request_id_chain, :user_ip_address].freeze + VALID_OPTION_KEYS = [:api_key, :api_secret, :api_user, :endpoint, + :user_agent, :version, :ssl, :ssl_verify, :oauth2_provider, :authentication_mode, + :auth_endpoint, :callback, :compress, :timeout, :middleware, :dictionary_version, :request_id_chain, :user_ip_address, :verbose].freeze OAUTH2_KEYS = [:authorization_uri, :access_uri, :client_id, :client_secret, # Requirements for authorization_code grant type :redirect_uri, @@ -47,6 +47,7 @@ module Configuration DEFAULT_DICTIONARY_VERSION = nil DEFAULT_REQUEST_ID_CHAIN = nil DEFAULT_USER_IP_ADDRESS = nil + DEFAULT_VERBOSE = false X_SPARK_API_USER_AGENT = "X-SparkApi-User-Agent" X_USER_IP_ADDRESS = "X-User-IP-Address" @@ -85,6 +86,7 @@ def reset_configuration self.dictionary_version = DEFAULT_DICTIONARY_VERSION self.request_id_chain = DEFAULT_REQUEST_ID_CHAIN self.user_ip_address = DEFAULT_USER_IP_ADDRESS + self.verbose = DEFAULT_VERBOSE self end end diff --git a/lib/spark_api/connection.rb b/lib/spark_api/connection.rb index fe17940f..997c5dd1 100644 --- a/lib/spark_api/connection.rb +++ b/lib/spark_api/connection.rb @@ -40,7 +40,9 @@ def connection(force_ssl = false) conn.options[:timeout] = self.timeout conn.adapter Faraday.default_adapter end - SparkApi.logger.debug { "Connection: #{conn.inspect}" } + if self.verbose + SparkApi.logger.debug { "Connection: #{conn.inspect}" } + end conn end diff --git a/lib/spark_api/faraday_middleware.rb b/lib/spark_api/faraday_middleware.rb index 7fe065a0..4370c3bf 100644 --- a/lib/spark_api/faraday_middleware.rb +++ b/lib/spark_api/faraday_middleware.rb @@ -18,7 +18,9 @@ def on_complete(env) env[:body] = decompress_body(env) body = MultiJson.decode(env[:body]) - SparkApi.logger.debug{ "Response Body: #{body.inspect}" } + if SparkApi.verbose + SparkApi.logger.debug{ "Response Body: #{body.inspect}" } + end unless body.is_a?(Hash) && body.key?("D") raise InvalidResponse, "The server response could not be understood" end diff --git a/lib/spark_api/request.rb b/lib/spark_api/request.rb index 2ece326d..f4363a5e 100644 --- a/lib/spark_api/request.rb +++ b/lib/spark_api/request.rb @@ -93,7 +93,9 @@ def request(method, path, body, options) response = authenticator.request(method, request_path, nil, request_opts) else post_data = process_request_body(body) - SparkApi.logger.debug { "#{method.to_s.upcase} Data: #{post_data}" } + if self.verbose + SparkApi.logger.debug { "#{method.to_s.upcase} Data: #{post_data}" } + end response = authenticator.request(method, request_path, post_data, request_opts) end request_time = Time.now - start_time diff --git a/spec/unit/spark_api/configuration_spec.rb b/spec/unit/spark_api/configuration_spec.rb index 4d2f1947..ac6f76e3 100644 --- a/spec/unit/spark_api/configuration_spec.rb +++ b/spec/unit/spark_api/configuration_spec.rb @@ -16,6 +16,7 @@ expect(SparkApi.request_id_chain).to be_nil expect(SparkApi.user_ip_address).to be_nil expect(SparkApi.middleware).to eq('spark_api') + expect(SparkApi.verbose).to be false end end @@ -28,7 +29,8 @@ :endpoint => "http://api.wade.dev.fbsdata.com", :timeout => 15, :request_id_chain => 'foobar', - :user_ip_address => 'barfoo') + :user_ip_address => 'barfoo', + :verbose => true) expect(client.api_key).to match("key_of_wade") expect(client.api_secret).to match("TopSecret") @@ -39,6 +41,7 @@ expect(client.timeout).to eq(15) expect(client.request_id_chain).to eq('foobar') expect(client.user_ip_address).to eq('barfoo') + expect(client.verbose).to be true end it "should allow unverified ssl certificates when verification is off" do @@ -101,6 +104,7 @@ config.endpoint = "test.api.sparkapi.com" config.user_agent = "my useragent" config.timeout = 15 + config.verbose = true end expect(SparkApi.api_key).to match("my_key") @@ -111,6 +115,7 @@ expect(SparkApi.user_agent).to match("my useragent") expect(SparkApi.oauth2_enabled?()).to be false expect(SparkApi.timeout).to eq(15) + expect(SparkApi.verbose).to be true end it "should correctly set up the client for oauth2" do From ea9712cfb1e3e6c4a60ca38727c348db4d15d042 Mon Sep 17 00:00:00 2001 From: Brandon Hornseth Date: Sat, 2 Nov 2024 09:46:22 -0600 Subject: [PATCH 4/8] Clean up GitHub Actions script - only build on some pushes, but all pull requests - support manual builds via workflow_dispatch - change publish to be triggered by a GitHub release action - update to latest actions/checkout script --- .github/workflows/ci.yml | 15 +++++++++++---- CHANGELOG | 2 +- VERSION | 2 +- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4bcd7ced..96824968 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,13 @@ name: CI -on: [push, pull_request] +on: + push: + branches: 'master' + pull_request: + workflow_dispatch: + release: + types: published + jobs: build: @@ -13,7 +20,7 @@ jobs: steps: - name: repo checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Set up Ruby ${{ matrix.ruby }} # https://github.com/ruby/setup-ruby @@ -36,14 +43,14 @@ jobs: runs-on: ubuntu-latest # only run if we pushed a tag - if: startsWith(github.ref, 'refs/tags/v') + if: github.event_name == 'release' # require that the build matrix passed needs: build steps: - name: repo checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Set up Ruby uses: ruby/setup-ruby@v1 diff --git a/CHANGELOG b/CHANGELOG index eeee7ee6..eb76ad20 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,4 +1,4 @@ -v1.7.0 +v2.0.0 - Drop support for ruby 2.7 - Add support for ruby 3.2+ - Require minimum version of faraday 2.0 diff --git a/VERSION b/VERSION index bd8bf882..227cea21 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.7.0 +2.0.0 From 4295b7ce393e5764a54e25ec46ac44e58f156a64 Mon Sep 17 00:00:00 2001 From: Brandon Hornseth Date: Wed, 11 Dec 2024 13:45:32 -0700 Subject: [PATCH 5/8] BREAKING: remove `compress` option This change removes the `compress` option from the client entirely. We use the default faraday adapter, which is `net/http` from stdlib. If you read [the documentation for that module][net-http-docs], you'll come across this: > `Net::HTTP` automatically adds Accept-Encoding for compression of > response bodies and automatically decompresses gzip and deflate > responses unless a Range header was sent. In other words, by default, ruby's net/http module will request gzipped content and automatically uncompress it if that's what the response contains. If instead one sets the `Accept-Encoding` header on a request that's made with `net/http`, that default behavior is ignored and we're left to deal with unpacking hte response ourselves as was done in the `FaradayMiddleware` class. In other words, whether the `compress` option is used or not, the behavior has been that we're asking for compressed responses and the option is effectively ignored. I went back to ruby 1.9.2 and the same behavior existed there. Given that, I'm confident this option has always been either ignored or redundant. [net-http-docs]: https://ruby-doc.org/stdlib-3.0.0/libdoc/net/http/rdoc/Net/HTTP.html#class-Net::HTTP-label-Compression --- CHANGELOG | 2 + lib/spark_api/configuration.rb | 4 +- lib/spark_api/connection.rb | 5 -- lib/spark_api/faraday_middleware.rb | 14 ------ lib/spark_api/reso_faraday_middleware.rb | 7 --- spec/unit/spark_api/configuration_spec.rb | 6 --- .../unit/spark_api/faraday_middleware_spec.rb | 49 ------------------- 7 files changed, 3 insertions(+), 84 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index eb76ad20..8449cc9a 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -4,6 +4,8 @@ v2.0.0 - Require minimum version of faraday 2.0 - Disabled request/response logging by default. Old behavior can be restored by setting a new `verbose` configuration option to true + - BREAKING: remove `compress` from configuration. gzip compression is now + always enabled v1.6.3 - Add support for sending end user ip address in request headers diff --git a/lib/spark_api/configuration.rb b/lib/spark_api/configuration.rb index 98e3c9c5..cfed7be2 100644 --- a/lib/spark_api/configuration.rb +++ b/lib/spark_api/configuration.rb @@ -11,7 +11,7 @@ module Configuration # valid configuration options VALID_OPTION_KEYS = [:api_key, :api_secret, :api_user, :endpoint, :user_agent, :version, :ssl, :ssl_verify, :oauth2_provider, :authentication_mode, - :auth_endpoint, :callback, :compress, :timeout, :middleware, :dictionary_version, :request_id_chain, :user_ip_address, :verbose].freeze + :auth_endpoint, :callback, :timeout, :middleware, :dictionary_version, :request_id_chain, :user_ip_address, :verbose].freeze OAUTH2_KEYS = [:authorization_uri, :access_uri, :client_id, :client_secret, # Requirements for authorization_code grant type :redirect_uri, @@ -41,7 +41,6 @@ module Configuration DEFAULT_SSL = true DEFAULT_SSL_VERIFY = true DEFAULT_OAUTH2 = nil - DEFAULT_COMPRESS = false DEFAULT_TIMEOUT = 5 # seconds DEFAULT_MIDDLEWARE = 'spark_api' DEFAULT_DICTIONARY_VERSION = nil @@ -80,7 +79,6 @@ def reset_configuration self.ssl = DEFAULT_SSL self.ssl_verify = DEFAULT_SSL_VERIFY self.version = DEFAULT_VERSION - self.compress = DEFAULT_COMPRESS self.timeout = DEFAULT_TIMEOUT self.middleware = DEFAULT_MIDDLEWARE self.dictionary_version = DEFAULT_DICTIONARY_VERSION diff --git a/lib/spark_api/connection.rb b/lib/spark_api/connection.rb index 997c5dd1..eb4dc221 100644 --- a/lib/spark_api/connection.rb +++ b/lib/spark_api/connection.rb @@ -10,7 +10,6 @@ module Connection HTTP_SCHEME = 'http:' HTTPS_SCHEME = 'https:' ACCEPT_ENCODING = 'Accept-Encoding' - COMPRESS_ACCEPT_ENCODING = 'gzip, deflate' X_REQUEST_ID_CHAIN = 'X-Request-Id-Chain' MIME_JSON = 'application/json' MIME_RESO = 'application/json, application/xml' @@ -27,10 +26,6 @@ def connection(force_ssl = false) opts[:url] = @endpoint.sub REG_HTTPS, HTTP_SCHEME end - if self.compress - opts[:headers][ACCEPT_ENCODING] = COMPRESS_ACCEPT_ENCODING - end - if request_id_chain opts[:headers][X_REQUEST_ID_CHAIN] = request_id_chain end diff --git a/lib/spark_api/faraday_middleware.rb b/lib/spark_api/faraday_middleware.rb index 4370c3bf..3fb86364 100644 --- a/lib/spark_api/faraday_middleware.rb +++ b/lib/spark_api/faraday_middleware.rb @@ -15,8 +15,6 @@ def initialize(app) # Handles pretty much all the api response parsing and error handling. All responses that # indicate a failure will raise a SparkApi::ClientError exception def on_complete(env) - env[:body] = decompress_body(env) - body = MultiJson.decode(env[:body]) if SparkApi.verbose SparkApi.logger.debug{ "Response Body: #{body.inspect}" } @@ -82,18 +80,6 @@ def on_complete(env) env[:body] = results end - def decompress_body(env) - encoding = env[:response_headers]['content-encoding'].to_s.downcase - - if encoding == 'gzip' - env[:body] = Zlib::GzipReader.new(StringIO.new(env[:body])).read - elsif encoding == 'deflate' - env[:body] = Zlib::Inflate.inflate(env[:body]) - end - - env[:body] - end - private def http_method_override_request?(env) diff --git a/lib/spark_api/reso_faraday_middleware.rb b/lib/spark_api/reso_faraday_middleware.rb index a69df868..dafa9b84 100644 --- a/lib/spark_api/reso_faraday_middleware.rb +++ b/lib/spark_api/reso_faraday_middleware.rb @@ -1,12 +1,7 @@ module SparkApi - class ResoFaradayMiddleware < FaradayMiddleware - def on_complete(env) - - body = decompress_body(env) - begin body = MultiJson.decode(body) @@ -21,9 +16,7 @@ def on_complete(env) # some minor format verification raise e if body.strip[/\A<\?xml/].nil? end - end - end Faraday::Response.register_middleware :reso_api => ResoFaradayMiddleware diff --git a/spec/unit/spark_api/configuration_spec.rb b/spec/unit/spark_api/configuration_spec.rb index ac6f76e3..48fff99d 100644 --- a/spec/unit/spark_api/configuration_spec.rb +++ b/spec/unit/spark_api/configuration_spec.rb @@ -228,12 +228,6 @@ expect(c.connection.headers["Accept-Encoding"]).to be_nil end - it "should set gzip header if compress option is set" do - c = SparkApi::Client.new(:endpoint => "https://api.sparkapi.com", - :compress => true) - expect(c.connection.headers["Accept-Encoding"]).to eq("gzip, deflate") - end - it "should set default timeout of 5 seconds" do c = SparkApi::Client.new(:endpoint => "https://sparkapi.com") expect(c.connection.options[:timeout]).to eq(5) diff --git a/spec/unit/spark_api/faraday_middleware_spec.rb b/spec/unit/spark_api/faraday_middleware_spec.rb index bdca3b72..b46827a2 100644 --- a/spec/unit/spark_api/faraday_middleware_spec.rb +++ b/spec/unit/spark_api/faraday_middleware_spec.rb @@ -97,55 +97,6 @@ expect(e.errors).to eq("Some errors and stuff.") } end - - end - - describe "#decompress_body" do - let(:middleware) do - SparkApi::FaradayMiddleware.new(SparkApi) - end - - it "should leave the body along if content-encoding not set" do - env = { - :body => "UNCOMPRESSED", - :response_headers => {} - } - - expect(middleware.decompress_body(env)).to eq("UNCOMPRESSED") - end - - it "should unzip gzipped data" do - bod = "OUTPUT BODY" - - out = StringIO.new - gz = Zlib::GzipWriter.new(out) - gz.write bod - gz.close - - env = { - :body => out.string, - :response_headers => { - 'content-encoding' => 'gzip' - } - } - - expect(middleware.decompress_body(env)).to eq(bod) - end - - it "should inflate deflated data" do - bod = "INFLATED BODY" - deflated_bod = Zlib::Deflate.deflate(bod) - - env = { - :body => deflated_bod, - :response_headers => { - 'content-encoding' => 'deflate' - } - } - - expect(middleware.decompress_body(env)).to eq(bod) - end end - end From 2f4814322d9455517dbafaa9dd9036ef2e08d4da Mon Sep 17 00:00:00 2001 From: Brandon Hornseth Date: Thu, 12 Dec 2024 07:38:20 -0700 Subject: [PATCH 6/8] Remove highline dependency Highline is only used within this cli provider, and only to capture and do some minor formatting on the user input. This changeset removes highline entirely and replaces it with equivalent ruby --- .../authentication/oauth2_impl/cli_provider.rb | 10 +++++----- spark_api.gemspec | 1 - 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/lib/spark_api/authentication/oauth2_impl/cli_provider.rb b/lib/spark_api/authentication/oauth2_impl/cli_provider.rb index df82295e..7c1caed1 100644 --- a/lib/spark_api/authentication/oauth2_impl/cli_provider.rb +++ b/lib/spark_api/authentication/oauth2_impl/cli_provider.rb @@ -1,5 +1,3 @@ -require "highline" - module SparkApi module Authentication module OAuth2Impl @@ -18,9 +16,11 @@ def initialize(credentials) def redirect(url) puts "Missing OAuth2 session, redirecting..." puts "Please visit #{url}, login as a user, and paste the authorization code here:" - self.code = HighLine.ask("Authorization code?") do |q| - q.whitespace = :strip_and_collapse - q.validate = /^\w+$/ + puts "Authorization code?" + raw_code = gets.strip + + unless raw_code.match?(/^\w+$/) + raise "Invalid authorization code. Please try again." end end diff --git a/spark_api.gemspec b/spark_api.gemspec index 8ca1e008..b7015646 100644 --- a/spark_api.gemspec +++ b/spark_api.gemspec @@ -35,7 +35,6 @@ Gem::Specification.new do |s| s.add_dependency 'multi_json', '> 1.0' s.add_dependency 'json', '>= 1.7' s.add_dependency 'will_paginate', '>= 3.0.pre2', '< 4.0.0' - s.add_dependency 'highline', '>= 1.0' # TEST GEMS s.add_development_dependency 'rake' From 644dd46b73ac10011ff388c33c1a185d1ad5366d Mon Sep 17 00:00:00 2001 From: Brandon Hornseth Date: Thu, 12 Dec 2024 08:59:10 -0700 Subject: [PATCH 7/8] remove broken Code Climate badge --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2113c364..5927be9a 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ Spark API ===================== -![CI](https://github.com/sparkapi/spark_api/workflows/CI/badge.svg) ![Code Climate](https://codeclimate.com/badge.png) +![CI](https://github.com/sparkapi/spark_api/workflows/CI/badge.svg) A Ruby wrapper for the Spark REST API. Loosely based on ActiveResource to provide models to interact with remote services. From 11b29607ab6e634d5950d95187efb8fa8b45b147 Mon Sep 17 00:00:00 2001 From: Cody Gustafson Date: Fri, 13 Dec 2024 09:26:01 -0600 Subject: [PATCH 8/8] Update reso_faraday_middleware.rb The new Faraday::Middleware class does not have a `body` method from the response and requires us to retrieve it from the `env` variable. --- lib/spark_api/reso_faraday_middleware.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spark_api/reso_faraday_middleware.rb b/lib/spark_api/reso_faraday_middleware.rb index dafa9b84..75fe884e 100644 --- a/lib/spark_api/reso_faraday_middleware.rb +++ b/lib/spark_api/reso_faraday_middleware.rb @@ -3,7 +3,7 @@ module SparkApi class ResoFaradayMiddleware < FaradayMiddleware def on_complete(env) begin - body = MultiJson.decode(body) + body = MultiJson.decode(env[:body]) if body["D"] super(env)