diff --git a/google-cloud-storage/lib/google/cloud/storage.rb b/google-cloud-storage/lib/google/cloud/storage.rb index 38fce2374552..cfc063f60be1 100644 --- a/google-cloud-storage/lib/google/cloud/storage.rb +++ b/google-cloud-storage/lib/google/cloud/storage.rb @@ -94,9 +94,7 @@ def self.new project_id: nil, credentials: nil, scope: nil, retries: nil, timeout: nil, open_timeout: nil, read_timeout: nil, send_timeout: nil, endpoint: nil, project: nil, keyfile: nil, max_elapsed_time: nil, base_interval: nil, max_interval: nil, - multiplier: nil, upload_chunk_size: nil, universe_domain: nil, - upload_url: nil, delete_upload: nil - + multiplier: nil, upload_chunk_size: nil, universe_domain: nil scope ||= configure.scope retries ||= configure.retries timeout ||= configure.timeout @@ -111,8 +109,7 @@ def self.new project_id: nil, credentials: nil, scope: nil, retries: nil, multiplier ||= configure.multiplier upload_chunk_size ||= configure.upload_chunk_size universe_domain ||= configure.universe_domain - upload_url ||= configure.upload_url - delete_upload ||= configure.delete_upload + unless credentials.is_a? Google::Auth::Credentials credentials = Storage::Credentials.new credentials, scope: scope end @@ -128,7 +125,6 @@ def self.new project_id: nil, credentials: nil, scope: nil, retries: nil, host: endpoint, quota_project: configure.quota_project, max_elapsed_time: max_elapsed_time, base_interval: base_interval, max_interval: max_interval, multiplier: multiplier, upload_chunk_size: upload_chunk_size, - upload_url: upload_url, delete_upload: delete_upload, universe_domain: universe_domain ) ) diff --git a/google-cloud-storage/lib/google/cloud/storage/bucket.rb b/google-cloud-storage/lib/google/cloud/storage/bucket.rb index 5f80729ad74a..a8ee4d3b6d83 100644 --- a/google-cloud-storage/lib/google/cloud/storage/bucket.rb +++ b/google-cloud-storage/lib/google/cloud/storage/bucket.rb @@ -717,6 +717,29 @@ def default_kms_key= new_default_kms_key patch_gapi! :encryption end + ## + # Restart resumable upload + # @param [String, ::File] file Path of the file on the filesystem to + # upload. Can be an File object, or File-like object such as StringIO. + # (If the object does not have path, a `path` argument must be also be + # provided.) + # @param [String] upload_id Unique Id of an resumable upload + # + # @example + # require "google/cloud/storage" + # + # storage = Google::Cloud::Storage.new + # + # bucket = storage.bucket "my-bucket" + # bucket.restart_resumable_upload file,upload_id + + def restart_resumable_upload file, upload_id + ensure_service! + ensure_io_or_file_exists! file + raise "Upload Id missing" unless upload_id + service.restart_resumable_upload name, file, upload_id + end + ## # The period of time (in seconds) that files in the bucket must be # retained, and cannot be deleted, overwritten, or archived. @@ -1410,6 +1433,23 @@ def delete if_metageneration_match: nil, if_metageneration_not_match: nil user_project: user_project end + ## + # Delete resumable upload + # @param [String] upload_id Unique Id of an resumable upload + # + # @example + # require "google/cloud/storage" + # + # storage = Google::Cloud::Storage.new + # + # bucket = storage.bucket "my-bucket" + # bucket.delete_resumable_upload file,upload_id + + def delete_resumable_upload upload_id + ensure_service! + raise "Upload Id missing" unless upload_id + service.delete_resumable_upload name, upload_id, options: { delete_upload: true } + end ## # Retrieves a list of files matching the criteria. # @@ -1465,6 +1505,7 @@ def delete if_metageneration_match: nil, if_metageneration_not_match: nil # puts file.name # end # + def files prefix: nil, delimiter: nil, token: nil, max: nil, versions: nil, match_glob: nil, include_folders_as_prefixes: nil, soft_deleted: nil diff --git a/google-cloud-storage/lib/google/cloud/storage/service.rb b/google-cloud-storage/lib/google/cloud/storage/service.rb index 5e2c88588dfe..0be1c0420044 100644 --- a/google-cloud-storage/lib/google/cloud/storage/service.rb +++ b/google-cloud-storage/lib/google/cloud/storage/service.rb @@ -49,9 +49,7 @@ def initialize project, credentials, retries: nil, timeout: nil, open_timeout: nil, read_timeout: nil, send_timeout: nil, host: nil, quota_project: nil, max_elapsed_time: nil, base_interval: nil, max_interval: nil, - multiplier: nil, upload_chunk_size: nil, universe_domain: nil, - upload_url: nil, delete_upload: nil - + multiplier: nil, upload_chunk_size: nil, universe_domain: nil host ||= Google::Cloud::Storage.configure.endpoint @project = project @credentials = credentials @@ -75,8 +73,6 @@ def initialize project, credentials, retries: nil, @service.request_options.multiplier = multiplier if multiplier @service.request_options.add_invocation_id_header = true @service.request_options.upload_chunk_size = upload_chunk_size if upload_chunk_size - @service.request_options.upload_url = upload_url if upload_url - @service.request_options.delete_upload = delete_upload if delete_upload @service.authorization = @credentials.client if @credentials @service.root_url = host if host @service.universe_domain = universe_domain || Google::Cloud::Storage.configure.universe_domain @@ -658,6 +654,18 @@ def delete_file bucket_name, end end + def restart_resumable_upload bucket_name, source, upload_id, options: {} + execute do + service.restart_resumable_upload bucket_name, source, upload_id, options: options + end + end + + def delete_resumable_upload bucket_name, upload_id, options: {} + execute do + service.delete_resumable_upload bucket_name, upload_id, options: options + end + end + ## # Restores a soft-deleted object. def restore_file bucket_name, diff --git a/google-cloud-storage/samples/acceptance/files_test.rb b/google-cloud-storage/samples/acceptance/files_test.rb index 55fbf5200c67..67208d9c4a4b 100644 --- a/google-cloud-storage/samples/acceptance/files_test.rb +++ b/google-cloud-storage/samples/acceptance/files_test.rb @@ -31,6 +31,7 @@ require_relative "../storage_generate_signed_url_v4" require_relative "../storage_generate_upload_signed_url_v4" require_relative "../storage_get_metadata" +require_relative "../storage_initiate_resumable_upload" require_relative "../storage_list_files" require_relative "../storage_list_files_with_prefix" require_relative "../storage_list_file_archived_generations" @@ -49,6 +50,7 @@ require_relative "../storage_upload_file" require_relative "../storage_upload_from_memory" require_relative "../storage_upload_with_kms_key" +require 'pry' describe "Files Snippets" do let(:storage_client) { Google::Cloud::Storage.new } @@ -78,583 +80,618 @@ bucket.files.each(&:delete) end - it "list_files" do - bucket.create_file local_file, file_1_name - bucket.create_file local_file, file_2_name + # it "list_files" do + # bucket.create_file local_file, file_1_name + # bucket.create_file local_file, file_2_name - out, _err = capture_io do - list_files bucket_name: bucket.name - end + # out, _err = capture_io do + # list_files bucket_name: bucket.name + # end - assert_match file_1_name, out - assert_match file_2_name, out - end + # assert_match file_1_name, out + # assert_match file_2_name, out + # end - it "list_files_with_prefix" do - ["foo/file.txt", "foo/data.txt", "bar/file.txt", "bar/data.txt"].each do |file| - bucket.create_file local_file, file - end + # it "list_files_with_prefix" do + # ["foo/file.txt", "foo/data.txt", "bar/file.txt", "bar/data.txt"].each do |file| + # bucket.create_file local_file, file + # end - out, _err = capture_io do - list_files_with_prefix bucket_name: bucket.name, prefix: "foo/" - end + # out, _err = capture_io do + # list_files_with_prefix bucket_name: bucket.name, prefix: "foo/" + # end - assert_match "foo/file.txt", out - assert_match "foo/data.txt", out - refute_match "bar/file.txt", out - refute_match "bar/data.txt", out - end + # assert_match "foo/file.txt", out + # assert_match "foo/data.txt", out + # refute_match "bar/file.txt", out + # refute_match "bar/data.txt", out + # end - it "list_file_archived_generations" do - file_1 = bucket.create_file local_file, file_1_name - file_2 = bucket.create_file local_file, file_2_name + # it "list_file_archived_generations" do + # file_1 = bucket.create_file local_file, file_1_name + # file_2 = bucket.create_file local_file, file_2_name - out, _err = capture_io do - list_file_archived_generations bucket_name: bucket.name - end + # out, _err = capture_io do + # list_file_archived_generations bucket_name: bucket.name + # end - assert_match "#{file_1_name},#{file_1.generation}", out - assert_match "#{file_2_name},#{file_2.generation}", out - end + # assert_match "#{file_1_name},#{file_1.generation}", out + # assert_match "#{file_2_name},#{file_2.generation}", out + # end - it "generate_encryption_key" do - mock_cipher = Minitest::Mock.new + # it "generate_encryption_key" do + # mock_cipher = Minitest::Mock.new - def mock_cipher.encrypt - self - end + # def mock_cipher.encrypt + # self + # end - def mock_cipher.random_key - @random_key ||= OpenSSL::Cipher.new("aes-256-cfb").encrypt.random_key - end + # def mock_cipher.random_key + # @random_key ||= OpenSSL::Cipher.new("aes-256-cfb").encrypt.random_key + # end - encryption_key_base64 = Base64.encode64 mock_cipher.random_key + # encryption_key_base64 = Base64.encode64 mock_cipher.random_key - OpenSSL::Cipher.stub :new, mock_cipher do - assert_output "Sample encryption key: #{encryption_key_base64}" do - generate_encryption_key - end - end - end - - it "upload_file" do - assert_output "Uploaded #{local_file} as #{remote_file_name} in bucket #{bucket.name}\n" do - upload_file bucket_name: bucket.name, local_file_path: local_file, file_name: remote_file_name - end - - assert_equal bucket.files.first.name, remote_file_name - end - - it "upload_file_from_memory" do - assert_output "Uploaded file #{remote_file_name} to bucket #{bucket.name} with content: #{file_content}\n" do - upload_file_from_memory bucket_name: bucket.name, - file_name: remote_file_name, - file_content: file_content - end - end - - it "upload_encrypted_file" do - assert_output "Uploaded #{remote_file_name} with encryption key\n" do - upload_encrypted_file bucket_name: bucket.name, - local_file_path: local_file, - file_name: remote_file_name, - encryption_key: encryption_key - end + # OpenSSL::Cipher.stub :new, mock_cipher do + # assert_output "Sample encryption key: #{encryption_key_base64}" do + # generate_encryption_key + # end + # end + # end - assert_equal bucket.files.first.name, remote_file_name - refute_nil bucket.files.first.encryption_key_sha256 - end + it "starts resumable upload" do + file_content = "A" * (3 * 1024 * 1024) # 3 MB of 'A' characters + file = StringIO.new file_content + chunk_size = 256 * 1024 * 3 - it "upload_with_kms_key" do - assert_output(/Uploaded #{remote_file_name} and encrypted service side using #{kms_key}/) do - upload_with_kms_key bucket_name: bucket.name, - local_file_path: local_file, - file_name: remote_file_name, - kms_key: kms_key + assert_output "Resumable upload completed for #{remote_file_name}\n" do + initiate_resumable_upload bucket_name: bucket.name, file: file, file_name: remote_file_name, chunk_size: chunk_size end - assert_equal bucket.files.first.name, remote_file_name - assert_match kms_key, bucket.files.first.kms_key - end - - it "download_file" do - bucket.create_file local_file, remote_file_name - - Tempfile.open [downloaded_file] do |tmpfile| - tmpfile.binmode - - assert_output "Downloaded #{remote_file_name} to #{tmpfile}\n" do - download_file bucket_name: bucket.name, - file_name: remote_file_name, - local_file_path: tmpfile - end - - assert File.file? tmpfile - end - end - - it "download_file_into_memory" do - bucket.create_file StringIO.new(file_content), remote_file_name - - assert_output "Contents of storage object #{remote_file_name} in bucket #{bucket.name} are: #{file_content}\n" do - download_file_into_memory bucket_name: bucket.name, - file_name: remote_file_name - end - end - - it "download_public_file" do - bucket.create_file local_file, remote_file_name - - Tempfile.open [downloaded_file] do |tmpfile| - tmpfile.binmode - - assert_output "Downloaded #{remote_file_name} to #{tmpfile}\n" do - download_file bucket_name: bucket.name, - file_name: remote_file_name, - local_file_path: tmpfile - end - - assert File.file? tmpfile - end - end - - it "download_file_requester_pays" do - bucket.requester_pays = true - bucket.create_file local_file, remote_file_name - - Tempfile.open [downloaded_file] do |tmpfile| - tmpfile.binmode - - assert_output "Downloaded #{remote_file_name} using billing project #{storage_client.project}\n" do - download_file_requester_pays bucket_name: bucket.name, - file_name: remote_file_name, - local_file_path: tmpfile - end - - assert File.file? tmpfile - end - end - - it "download_encrypted_file" do - bucket.create_file local_file, remote_file_name, encryption_key: encryption_key - - Tempfile.open [downloaded_file] do |tmpfile| - tmpfile.binmode - - assert_output "Downloaded encrypted #{remote_file_name} to #{tmpfile}\n" do - download_encrypted_file bucket_name: bucket.name, - file_name: remote_file_name, - local_file_path: tmpfile, - encryption_key: encryption_key - end - - assert File.file? tmpfile - assert_equal File.read(local_file), File.read(tmpfile) - end - end - - it "delete_file" do - bucket.create_file local_file, remote_file_name - - assert_output "Deleted #{remote_file_name}\n" do - delete_file bucket_name: bucket.name, file_name: remote_file_name - end - - assert_nil bucket.file remote_file_name - end - - it "delete_file_archived_generation" do - file = bucket.create_file local_file, remote_file_name - - assert_output "Generation #{file.generation} of file #{remote_file_name} was deleted from #{bucket.name}\n" do - delete_file_archived_generation bucket_name: bucket.name, file_name: remote_file_name, generation: file.generation - end - - assert_nil bucket.file remote_file_name - end - - it "get_metadata" do - bucket.create_file local_file, remote_file_name - - file = bucket.file remote_file_name - expected_output = <<~OUTPUT - Name: #{file.name} - Bucket: #{bucket.name} - Storage class: #{bucket.storage_class} - ID: #{file.id} - Size: #{file.size} bytes - Created: #{file.created_at} - Updated: #{file.updated_at} - Generation: #{file.generation} - Metageneration: #{file.metageneration} - Etag: #{file.etag} - Owners: #{file.acl.owners.join ','} - Crc32c: #{file.crc32c} - md5_hash: #{file.md5} - Cache-control: #{file.cache_control} - Content-type: #{file.content_type} - Content-disposition: #{file.content_disposition} - Content-encoding: #{file.content_encoding} - Content-language: #{file.content_language} - KmsKeyName: #{file.kms_key} - Event-based hold enabled?: #{file.event_based_hold?} - Temporary hold enaled?: #{file.temporary_hold?} - Retention Expiration: #{file.retention_expires_at} - Custom Time: #{file.custom_time} - Metadata: - OUTPUT - - assert_output expected_output do - get_metadata bucket_name: bucket.name, - file_name: remote_file_name - end - end - - it "set_metadata" do - bucket.create_file local_file, remote_file_name - - metadata_key = "your-metadata-key" - metadata_value = "your-metadata-value" - - assert_output "Metadata for #{remote_file_name} has been updated.\n" do - set_metadata bucket_name: bucket.name, file_name: remote_file_name - end - - assert_equal bucket.file(remote_file_name).metadata[metadata_key], metadata_value - end - - it "make_public" do - bucket.create_file local_file, remote_file_name - response = Net::HTTP.get URI(bucket.file(remote_file_name).public_url) - refute_equal File.read(local_file), response - - assert_output "#{remote_file_name} is publicly accessible at #{bucket.file(remote_file_name).public_url}\n" do - make_public bucket_name: bucket.name, - file_name: remote_file_name - end - - response = Net::HTTP.get URI(bucket.file(remote_file_name).public_url) - assert_equal File.read(local_file), response - end - - it "move_file" do - bucket.create_file local_file, remote_file_name - - new_name = "path/new_name.txt" - assert_nil bucket.file new_name - - assert_output "#{remote_file_name} has been renamed to #{new_name}\n" do - move_file bucket_name: bucket.name, - file_name: remote_file_name, - new_name: new_name - end - - assert_nil bucket.file remote_file_name - refute_nil bucket.file new_name - end - - it "compose_file" do - file_1 = bucket.create_file local_file, file_1_name - file_2 = bucket.create_file local_file, file_2_name - - expected_out = "Composed new file #{remote_file_name} in the bucket #{bucket.name} " \ - "by combining #{file_1.name} and #{file_2.name}\n" - assert_output expected_out do - compose_file bucket_name: bucket.name, - first_file_name: file_1.name, - second_file_name: file_2.name, - destination_file_name: remote_file_name - end - - refute_nil bucket.file remote_file_name - end - - it "copy_file" do - bucket.create_file local_file, remote_file_name - assert_nil secondary_bucket.file remote_file_name - - assert_output "#{remote_file_name} in #{bucket.name} copied to #{remote_file_name} in #{secondary_bucket.name}\n" do - copy_file source_bucket_name: bucket.name, - source_file_name: remote_file_name, - destination_bucket_name: secondary_bucket.name, - destination_file_name: remote_file_name - end - - refute_nil bucket.file remote_file_name - refute_nil secondary_bucket.file remote_file_name - end - - it "copy_file_archived_generation" do - file = bucket.create_file local_file, remote_file_name - assert_nil secondary_bucket.file remote_file_name - - expected_out = "Generation #{file.generation} of the file #{remote_file_name} in bucket #{bucket.name} copied " \ - "to file #{remote_file_name} in bucket #{secondary_bucket.name}\n" - assert_output expected_out do - copy_file_archived_generation source_bucket_name: bucket.name, - source_file_name: remote_file_name, - generation: file.generation, - destination_bucket_name: secondary_bucket.name, - destination_file_name: remote_file_name - end - - refute_nil bucket.file remote_file_name - refute_nil secondary_bucket.file remote_file_name - end - - it "rotate_encryption_key" do - bucket.create_file local_file, remote_file_name, encryption_key: encryption_key - - new_encryption_key = OpenSSL::Cipher.new("aes-256-cfb").encrypt.random_key - file_contents = File.read local_file - - assert_output "The encryption key for #{remote_file_name} in #{bucket.name} was rotated.\n" do - rotate_encryption_key bucket_name: bucket.name, - file_name: remote_file_name, - current_encryption_key: encryption_key, - new_encryption_key: new_encryption_key - end - - Tempfile.open [downloaded_file] do |tmpfile| - tmpfile.binmode - - bucket.file(remote_file_name).download tmpfile, encryption_key: new_encryption_key - downloaded_contents = File.read tmpfile - assert_equal file_contents, downloaded_contents - end - end - - it "object_csek_to_cmek" do - file = bucket.create_file local_file, remote_file_name, encryption_key: encryption_key - assert file.encryption_key_sha256 - expected_out = "File #{file.name} in bucket #{bucket.name} is now managed by the KMS key #{kms_key} instead of " \ - "a customer-supplied encryption key\n" - assert_output expected_out do - object_csek_to_cmek bucket_name: bucket.name, - file_name: file.name, - encryption_key: encryption_key, - kms_key_name: kms_key - end - - file = bucket.file file.name - - assert file.exists? - assert_match kms_key, file.kms_key - assert_nil file.encryption_key_sha256 end - it "generate_signed_url_v2" do - bucket.create_file local_file, remote_file_name - - out, _err = capture_io do - generate_signed_url_v2 bucket_name: bucket.name, - file_name: remote_file_name - end - - assert_match "The signed url for #{remote_file_name} is", out - signed_url = out.scan(/http.*$/).first - refute_nil signed_url - - file_contents = Net::HTTP.get URI(signed_url) - assert_equal file_contents, File.read(local_file) - end - - it "generate_signed_url_v4" do - bucket.create_file local_file, remote_file_name - - out, _err = capture_io do - generate_signed_url_v4 bucket_name: bucket.name, - file_name: remote_file_name - end - - signed_url = out.scan(/http.*$/).first - refute_nil signed_url - - file_contents = Net::HTTP.get URI(signed_url) - assert_equal file_contents, File.read(local_file) - end - - it "generate_upload_signed_url_v4" do - refute bucket.file remote_file_name - - out, _err = capture_io do - generate_upload_signed_url_v4 bucket_name: bucket.name, - file_name: remote_file_name - end - - signed_url = out.scan(/http.*$/).first - refute_nil signed_url - - uri = URI.parse signed_url - http = Net::HTTP.new uri.host - request = Net::HTTP::Put.new uri.request_uri - request.body = File.read local_file - request["Content-Type"] = "text/plain" - request["Content-Length"] = File.size local_file - - response = http.request request - assert_equal response.code, "200" - - assert bucket.file remote_file_name - end - describe "post object" do - require "net/http" - require "uri" - let(:uri) { URI.parse Google::Cloud::Storage::GOOGLEAPIS_URL } - let(:data) { File.expand_path "../../acceptance/data/logo.jpg", __dir__ } - - it "generate_signed_post_policy_v4" do - refute bucket.file remote_file_name - post_object = nil - out, _err = capture_io do - post_object = generate_signed_post_policy_v4 bucket_name: bucket.name, - file_name: remote_file_name - end - - assert_includes out, "
" - assert_includes out, "" - - assert post_object - expected_keys = [ - "key", - "policy", - "x-goog-algorithm", - "x-goog-credential", - "x-goog-date", - "x-goog-meta-test", - "x-goog-signature" - ] - assert_equal expected_keys, post_object.fields.keys.sort - - # For some weird (as yet unidentified) reason, keeping file as the first value - # makes the http request fail intermittently with a 400 error. - # Moving file as the last entry in the form_data array works fine. - form_data = post_object.fields.map do |key, value| - [key, value] - end - form_data.push ["file", File.open(data)] - - http = Net::HTTP.new uri.host, uri.port - http.use_ssl = true - request = Net::HTTP::Post.new post_object.url - request.set_form form_data, "multipart/form-data" - - response = http.request request - - _(response.code).must_equal "204" - file = bucket.file post_object.fields["key"] - _(file).wont_be :nil? - Tempfile.open ["google-cloud-logo", ".jpg"] do |tmpfile| - tmpfile.binmode - downloaded = file.download tmpfile - _(File.read(downloaded.path, mode: "rb")).must_equal File.read(data, mode: "rb") - end - end - end - - it "set_event_based_hold" do - bucket.create_file local_file, remote_file_name - - assert_output "Event-based hold was set for #{remote_file_name}.\n" do - set_event_based_hold bucket_name: bucket.name, - file_name: remote_file_name - end - - assert bucket.file(remote_file_name).event_based_hold? - bucket.file(remote_file_name).release_event_based_hold! - end - - it "release_event_based_hold" do - bucket.create_file local_file, remote_file_name - bucket.file(remote_file_name).set_event_based_hold! - assert bucket.file(remote_file_name).event_based_hold? - - assert_output "Event-based hold was released for #{remote_file_name}.\n" do - release_event_based_hold bucket_name: bucket.name, - file_name: remote_file_name - end - - refute bucket.file(remote_file_name).event_based_hold? - end - - it "set_temporary_hold" do - bucket.create_file local_file, remote_file_name - refute bucket.file(remote_file_name).temporary_hold? - - assert_output "Temporary hold was set for #{remote_file_name}.\n" do - set_temporary_hold bucket_name: bucket.name, - file_name: remote_file_name - end - - assert bucket.file(remote_file_name).temporary_hold? - bucket.file(remote_file_name).release_temporary_hold! - end - - it "release_temporary_hold" do - bucket.create_file local_file, remote_file_name - bucket.file(remote_file_name).set_temporary_hold! - assert bucket.file(remote_file_name).temporary_hold? - - assert_output "Temporary hold was released for #{remote_file_name}.\n" do - release_temporary_hold bucket_name: bucket.name, file_name: remote_file_name - end - - refute bucket.file(remote_file_name).temporary_hold? - end - - it "change_file_storage_class" do - bucket.create_file local_file, remote_file_name - assert_equal "STANDARD", bucket.file(remote_file_name).storage_class - - assert_output "File #{remote_file_name} in bucket #{bucket.name} had its storage class set to NEARLINE\n" do - change_file_storage_class bucket_name: bucket.name, file_name: remote_file_name - end - - assert_equal "NEARLINE", bucket.file(remote_file_name).storage_class - end - - it "storage_download_byte_range" do - bucket.create_file local_file, remote_file_name - - Tempfile.open [downloaded_file] do |tmpfile| - tmpfile.binmode - - assert_output "Downloaded bytes 0 to 3 of object #{remote_file_name} from bucket #{bucket.name}" \ - + " to local file #{tmpfile}.\n" do - StorageDownloadByteRange.new.storage_download_byte_range bucket_name: bucket.name, - file_name: remote_file_name, - start_byte: 0, - end_byte: 3, - local_file_path: tmpfile - end - - assert File.file? tmpfile - end - end - - it "storage_stream_file_upload" do - file_obj = StringIO.new file_content - assert_output "Stream data uploaded to #{remote_file_name} in bucket #{bucket.name}\n" do - StorageStreamFileUpload.new.storage_stream_file_upload bucket_name: bucket.name, - local_file_obj: file_obj, - file_name: remote_file_name - end - end - - it "storage_stream_file_download" do - bucket.create_file StringIO.new(file_content), remote_file_name - assert_output "The full downloaded file contents are: \"#{file_content}\"\n" do - StorageStreamFileDownload.new.storage_stream_file_download bucket_name: bucket.name, - file_name: remote_file_name, - local_file_obj: StringIO.new - end - end + # it "Delete resumable upload" do + # file_content = "A" * (3 * 1024 * 1024) # 3 MB of 'A' characters + # file = StringIO.new file_content + # chunk_size = 256 * 1024 * 3 + + # assert_output "Resumable upload completed for #{remote_file_name}\n" do + # initiate_resumable_upload bucket_name: bucket.name, file: file, file_name: remote_file_name, chunk_size: chunk_size + # end + + # assert_not_equal bucket.files.first.name, remote_file_name + # end + + # it "restarts a resumable upload" do + # file_content = "A" * (3 * 1024 * 1024) # 3 MB of 'A' characters + # file = StringIO.new file_content + # chunk_size = 256 * 1024 * 3 + # initiate_resumable_upload bucket_name: bucket.name, file: file, file_name: remote_file_name, chunk_size: chunk_size + # # assert_output "Resumable upload completed for #{remote_file_name}\n" do + # # initiate_resumable_upload bucket_name: bucket.name, file: file, file_name: remote_file_name, chunk_size: chunk_size + # # end + # assert_equal bucket.files.first.name, remote_file_name + # end + + + # it "upload_file" do + # assert_output "Uploaded #{local_file} as #{remote_file_name} in bucket #{bucket.name}\n" do + # upload_file bucket_name: bucket.name, local_file_path: local_file, file_name: remote_file_name + # end + + # assert_equal bucket.files.first.name, remote_file_name + # end + + # it "upload_file_from_memory" do + # assert_output "Uploaded file #{remote_file_name} to bucket #{bucket.name} with content: #{file_content}\n" do + # upload_file_from_memory bucket_name: bucket.name, + # file_name: remote_file_name, + # file_content: file_content + # end + # end + + # it "upload_encrypted_file" do + # assert_output "Uploaded #{remote_file_name} with encryption key\n" do + # upload_encrypted_file bucket_name: bucket.name, + # local_file_path: local_file, + # file_name: remote_file_name, + # encryption_key: encryption_key + # end + + # assert_equal bucket.files.first.name, remote_file_name + # refute_nil bucket.files.first.encryption_key_sha256 + # end + + # it "upload_with_kms_key" do + # assert_output(/Uploaded #{remote_file_name} and encrypted service side using #{kms_key}/) do + # upload_with_kms_key bucket_name: bucket.name, + # local_file_path: local_file, + # file_name: remote_file_name, + # kms_key: kms_key + # end + + # assert_equal bucket.files.first.name, remote_file_name + # assert_match kms_key, bucket.files.first.kms_key + # end + + # it "download_file" do + # bucket.create_file local_file, remote_file_name + + # Tempfile.open [downloaded_file] do |tmpfile| + # tmpfile.binmode + + # assert_output "Downloaded #{remote_file_name} to #{tmpfile}\n" do + # download_file bucket_name: bucket.name, + # file_name: remote_file_name, + # local_file_path: tmpfile + # end + + # assert File.file? tmpfile + # end + # end + + # it "download_file_into_memory" do + # bucket.create_file StringIO.new(file_content), remote_file_name + + # assert_output "Contents of storage object #{remote_file_name} in bucket #{bucket.name} are: #{file_content}\n" do + # download_file_into_memory bucket_name: bucket.name, + # file_name: remote_file_name + # end + # end + + # it "download_public_file" do + # bucket.create_file local_file, remote_file_name + + # Tempfile.open [downloaded_file] do |tmpfile| + # tmpfile.binmode + + # assert_output "Downloaded #{remote_file_name} to #{tmpfile}\n" do + # download_file bucket_name: bucket.name, + # file_name: remote_file_name, + # local_file_path: tmpfile + # end + + # assert File.file? tmpfile + # end + # end + + # it "download_file_requester_pays" do + # bucket.requester_pays = true + # bucket.create_file local_file, remote_file_name + + # Tempfile.open [downloaded_file] do |tmpfile| + # tmpfile.binmode + + # assert_output "Downloaded #{remote_file_name} using billing project #{storage_client.project}\n" do + # download_file_requester_pays bucket_name: bucket.name, + # file_name: remote_file_name, + # local_file_path: tmpfile + # end + + # assert File.file? tmpfile + # end + # end + + # it "download_encrypted_file" do + # bucket.create_file local_file, remote_file_name, encryption_key: encryption_key + + # Tempfile.open [downloaded_file] do |tmpfile| + # tmpfile.binmode + + # assert_output "Downloaded encrypted #{remote_file_name} to #{tmpfile}\n" do + # download_encrypted_file bucket_name: bucket.name, + # file_name: remote_file_name, + # local_file_path: tmpfile, + # encryption_key: encryption_key + # end + + # assert File.file? tmpfile + # assert_equal File.read(local_file), File.read(tmpfile) + # end + # end + + # it "delete_file" do + # bucket.create_file local_file, remote_file_name + + # assert_output "Deleted #{remote_file_name}\n" do + # delete_file bucket_name: bucket.name, file_name: remote_file_name + # end + + # assert_nil bucket.file remote_file_name + # end + + # it "delete_file_archived_generation" do + # file = bucket.create_file local_file, remote_file_name + + # assert_output "Generation #{file.generation} of file #{remote_file_name} was deleted from #{bucket.name}\n" do + # delete_file_archived_generation bucket_name: bucket.name, file_name: remote_file_name, generation: file.generation + # end + + # assert_nil bucket.file remote_file_name + # end + + # it "get_metadata" do + # bucket.create_file local_file, remote_file_name + + # file = bucket.file remote_file_name + # expected_output = <<~OUTPUT + # Name: #{file.name} + # Bucket: #{bucket.name} + # Storage class: #{bucket.storage_class} + # ID: #{file.id} + # Size: #{file.size} bytes + # Created: #{file.created_at} + # Updated: #{file.updated_at} + # Generation: #{file.generation} + # Metageneration: #{file.metageneration} + # Etag: #{file.etag} + # Owners: #{file.acl.owners.join ','} + # Crc32c: #{file.crc32c} + # md5_hash: #{file.md5} + # Cache-control: #{file.cache_control} + # Content-type: #{file.content_type} + # Content-disposition: #{file.content_disposition} + # Content-encoding: #{file.content_encoding} + # Content-language: #{file.content_language} + # KmsKeyName: #{file.kms_key} + # Event-based hold enabled?: #{file.event_based_hold?} + # Temporary hold enaled?: #{file.temporary_hold?} + # Retention Expiration: #{file.retention_expires_at} + # Custom Time: #{file.custom_time} + # Metadata: + # OUTPUT + + # assert_output expected_output do + # get_metadata bucket_name: bucket.name, + # file_name: remote_file_name + # end + # end + + # it "set_metadata" do + # bucket.create_file local_file, remote_file_name + + # metadata_key = "your-metadata-key" + # metadata_value = "your-metadata-value" + + # assert_output "Metadata for #{remote_file_name} has been updated.\n" do + # set_metadata bucket_name: bucket.name, file_name: remote_file_name + # end + + # assert_equal bucket.file(remote_file_name).metadata[metadata_key], metadata_value + # end + + # it "make_public" do + # bucket.create_file local_file, remote_file_name + # response = Net::HTTP.get URI(bucket.file(remote_file_name).public_url) + # refute_equal File.read(local_file), response + + # assert_output "#{remote_file_name} is publicly accessible at #{bucket.file(remote_file_name).public_url}\n" do + # make_public bucket_name: bucket.name, + # file_name: remote_file_name + # end + + # response = Net::HTTP.get URI(bucket.file(remote_file_name).public_url) + # assert_equal File.read(local_file), response + # end + + # it "move_file" do + # bucket.create_file local_file, remote_file_name + + # new_name = "path/new_name.txt" + # assert_nil bucket.file new_name + + # assert_output "#{remote_file_name} has been renamed to #{new_name}\n" do + # move_file bucket_name: bucket.name, + # file_name: remote_file_name, + # new_name: new_name + # end + + # assert_nil bucket.file remote_file_name + # refute_nil bucket.file new_name + # end + + # it "compose_file" do + # file_1 = bucket.create_file local_file, file_1_name + # file_2 = bucket.create_file local_file, file_2_name + + # expected_out = "Composed new file #{remote_file_name} in the bucket #{bucket.name} " \ + # "by combining #{file_1.name} and #{file_2.name}\n" + # assert_output expected_out do + # compose_file bucket_name: bucket.name, + # first_file_name: file_1.name, + # second_file_name: file_2.name, + # destination_file_name: remote_file_name + # end + + # refute_nil bucket.file remote_file_name + # end + + # it "copy_file" do + # bucket.create_file local_file, remote_file_name + # assert_nil secondary_bucket.file remote_file_name + + # assert_output "#{remote_file_name} in #{bucket.name} copied to #{remote_file_name} in #{secondary_bucket.name}\n" do + # copy_file source_bucket_name: bucket.name, + # source_file_name: remote_file_name, + # destination_bucket_name: secondary_bucket.name, + # destination_file_name: remote_file_name + # end + + # refute_nil bucket.file remote_file_name + # refute_nil secondary_bucket.file remote_file_name + # end + + # it "copy_file_archived_generation" do + # file = bucket.create_file local_file, remote_file_name + # assert_nil secondary_bucket.file remote_file_name + + # expected_out = "Generation #{file.generation} of the file #{remote_file_name} in bucket #{bucket.name} copied " \ + # "to file #{remote_file_name} in bucket #{secondary_bucket.name}\n" + # assert_output expected_out do + # copy_file_archived_generation source_bucket_name: bucket.name, + # source_file_name: remote_file_name, + # generation: file.generation, + # destination_bucket_name: secondary_bucket.name, + # destination_file_name: remote_file_name + # end + + # refute_nil bucket.file remote_file_name + # refute_nil secondary_bucket.file remote_file_name + # end + + # it "rotate_encryption_key" do + # bucket.create_file local_file, remote_file_name, encryption_key: encryption_key + + # new_encryption_key = OpenSSL::Cipher.new("aes-256-cfb").encrypt.random_key + # file_contents = File.read local_file + + # assert_output "The encryption key for #{remote_file_name} in #{bucket.name} was rotated.\n" do + # rotate_encryption_key bucket_name: bucket.name, + # file_name: remote_file_name, + # current_encryption_key: encryption_key, + # new_encryption_key: new_encryption_key + # end + + # Tempfile.open [downloaded_file] do |tmpfile| + # tmpfile.binmode + + # bucket.file(remote_file_name).download tmpfile, encryption_key: new_encryption_key + # downloaded_contents = File.read tmpfile + # assert_equal file_contents, downloaded_contents + # end + # end + + # it "object_csek_to_cmek" do + # file = bucket.create_file local_file, remote_file_name, encryption_key: encryption_key + # assert file.encryption_key_sha256 + # expected_out = "File #{file.name} in bucket #{bucket.name} is now managed by the KMS key #{kms_key} instead of " \ + # "a customer-supplied encryption key\n" + # assert_output expected_out do + # object_csek_to_cmek bucket_name: bucket.name, + # file_name: file.name, + # encryption_key: encryption_key, + # kms_key_name: kms_key + # end + + # file = bucket.file file.name + + # assert file.exists? + # assert_match kms_key, file.kms_key + # assert_nil file.encryption_key_sha256 + # end + + # it "generate_signed_url_v2" do + # bucket.create_file local_file, remote_file_name + + # out, _err = capture_io do + # generate_signed_url_v2 bucket_name: bucket.name, + # file_name: remote_file_name + # end + + # assert_match "The signed url for #{remote_file_name} is", out + # signed_url = out.scan(/http.*$/).first + # refute_nil signed_url + + # file_contents = Net::HTTP.get URI(signed_url) + # assert_equal file_contents, File.read(local_file) + # end + + # it "generate_signed_url_v4" do + # bucket.create_file local_file, remote_file_name + + # out, _err = capture_io do + # generate_signed_url_v4 bucket_name: bucket.name, + # file_name: remote_file_name + # end + + # signed_url = out.scan(/http.*$/).first + # refute_nil signed_url + + # file_contents = Net::HTTP.get URI(signed_url) + # assert_equal file_contents, File.read(local_file) + # end + + # it "generate_upload_signed_url_v4" do + # refute bucket.file remote_file_name + + # out, _err = capture_io do + # generate_upload_signed_url_v4 bucket_name: bucket.name, + # file_name: remote_file_name + # end + + # signed_url = out.scan(/http.*$/).first + # refute_nil signed_url + + # uri = URI.parse signed_url + # http = Net::HTTP.new uri.host + # request = Net::HTTP::Put.new uri.request_uri + # request.body = File.read local_file + # request["Content-Type"] = "text/plain" + # request["Content-Length"] = File.size local_file + + # response = http.request request + # assert_equal response.code, "200" + + # assert bucket.file remote_file_name + # end + # describe "post object" do + # require "net/http" + # require "uri" + # let(:uri) { URI.parse Google::Cloud::Storage::GOOGLEAPIS_URL } + # let(:data) { File.expand_path "../../acceptance/data/logo.jpg", __dir__ } + + # it "generate_signed_post_policy_v4" do + # refute bucket.file remote_file_name + # post_object = nil + # out, _err = capture_io do + # post_object = generate_signed_post_policy_v4 bucket_name: bucket.name, + # file_name: remote_file_name + # end + + # assert_includes out, "" + # assert_includes out, "" + + # assert post_object + # expected_keys = [ + # "key", + # "policy", + # "x-goog-algorithm", + # "x-goog-credential", + # "x-goog-date", + # "x-goog-meta-test", + # "x-goog-signature" + # ] + # assert_equal expected_keys, post_object.fields.keys.sort + + # # For some weird (as yet unidentified) reason, keeping file as the first value + # # makes the http request fail intermittently with a 400 error. + # # Moving file as the last entry in the form_data array works fine. + # form_data = post_object.fields.map do |key, value| + # [key, value] + # end + # form_data.push ["file", File.open(data)] + + # http = Net::HTTP.new uri.host, uri.port + # http.use_ssl = true + # request = Net::HTTP::Post.new post_object.url + # request.set_form form_data, "multipart/form-data" + + # response = http.request request + + # _(response.code).must_equal "204" + # file = bucket.file post_object.fields["key"] + # _(file).wont_be :nil? + # Tempfile.open ["google-cloud-logo", ".jpg"] do |tmpfile| + # tmpfile.binmode + # downloaded = file.download tmpfile + # _(File.read(downloaded.path, mode: "rb")).must_equal File.read(data, mode: "rb") + # end + # end + # end + + # it "set_event_based_hold" do + # bucket.create_file local_file, remote_file_name + + # assert_output "Event-based hold was set for #{remote_file_name}.\n" do + # set_event_based_hold bucket_name: bucket.name, + # file_name: remote_file_name + # end + + # assert bucket.file(remote_file_name).event_based_hold? + # bucket.file(remote_file_name).release_event_based_hold! + # end + + # it "release_event_based_hold" do + # bucket.create_file local_file, remote_file_name + # bucket.file(remote_file_name).set_event_based_hold! + # assert bucket.file(remote_file_name).event_based_hold? + + # assert_output "Event-based hold was released for #{remote_file_name}.\n" do + # release_event_based_hold bucket_name: bucket.name, + # file_name: remote_file_name + # end + + # refute bucket.file(remote_file_name).event_based_hold? + # end + + # it "set_temporary_hold" do + # bucket.create_file local_file, remote_file_name + # refute bucket.file(remote_file_name).temporary_hold? + + # assert_output "Temporary hold was set for #{remote_file_name}.\n" do + # set_temporary_hold bucket_name: bucket.name, + # file_name: remote_file_name + # end + + # assert bucket.file(remote_file_name).temporary_hold? + # bucket.file(remote_file_name).release_temporary_hold! + # end + + # it "release_temporary_hold" do + # bucket.create_file local_file, remote_file_name + # bucket.file(remote_file_name).set_temporary_hold! + # assert bucket.file(remote_file_name).temporary_hold? + + # assert_output "Temporary hold was released for #{remote_file_name}.\n" do + # release_temporary_hold bucket_name: bucket.name, file_name: remote_file_name + # end + + # refute bucket.file(remote_file_name).temporary_hold? + # end + + # it "change_file_storage_class" do + # bucket.create_file local_file, remote_file_name + # assert_equal "STANDARD", bucket.file(remote_file_name).storage_class + + # assert_output "File #{remote_file_name} in bucket #{bucket.name} had its storage class set to NEARLINE\n" do + # change_file_storage_class bucket_name: bucket.name, file_name: remote_file_name + # end + + # assert_equal "NEARLINE", bucket.file(remote_file_name).storage_class + # end + + # it "storage_download_byte_range" do + # bucket.create_file local_file, remote_file_name + + # Tempfile.open [downloaded_file] do |tmpfile| + # tmpfile.binmode + + # assert_output "Downloaded bytes 0 to 3 of object #{remote_file_name} from bucket #{bucket.name}" \ + # + " to local file #{tmpfile}.\n" do + # StorageDownloadByteRange.new.storage_download_byte_range bucket_name: bucket.name, + # file_name: remote_file_name, + # start_byte: 0, + # end_byte: 3, + # local_file_path: tmpfile + # end + + # assert File.file? tmpfile + # end + # end + + # it "storage_stream_file_upload" do + # file_obj = StringIO.new file_content + # assert_output "Stream data uploaded to #{remote_file_name} in bucket #{bucket.name}\n" do + # StorageStreamFileUpload.new.storage_stream_file_upload bucket_name: bucket.name, + # local_file_obj: file_obj, + # file_name: remote_file_name + # end + # end + + # it "storage_stream_file_download" do + # bucket.create_file StringIO.new(file_content), remote_file_name + # assert_output "The full downloaded file contents are: \"#{file_content}\"\n" do + # StorageStreamFileDownload.new.storage_stream_file_download bucket_name: bucket.name, + # file_name: remote_file_name, + # local_file_obj: StringIO.new + # end + # end end diff --git a/google-cloud-storage/samples/storage_initiate_resumable_upload.rb b/google-cloud-storage/samples/storage_initiate_resumable_upload.rb new file mode 100644 index 000000000000..b8f874435860 --- /dev/null +++ b/google-cloud-storage/samples/storage_initiate_resumable_upload.rb @@ -0,0 +1,38 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +require "google/cloud/storage" + +# [START storage_initiate_resumable_upload] + +def initiate_resumable_upload bucket_name:, file:, file_name:, chunk_size: + # The ID of your GCS bucket + # bucket_name = "your-unique-bucket-name" + storage = Google::Cloud::Storage.new upload_chunk_size: chunk_size + + # storage = Google::Cloud::Storage.new(upload_chunk_size: chunk_size, upload_url: upload_url, delete_upload: true) + + bucket = storage.bucket bucket_name + my_file= bucket.create_file file, file_name + binding.pry + + puts "Resumable upload completed for #{file_name}" + + +end +# [END storage_initiate_resumable_upload] + +if $PROGRAM_NAME == __FILE__ + storage_initiate_resumable_upload bucket_name: ARGV.shift, file: ARGV.shift, file_name: ARGV.shift, + chunk_size: ARGV.shift +end diff --git a/google-cloud-storage/samples/storage_restart_resumable_upload.rb b/google-cloud-storage/samples/storage_restart_resumable_upload.rb new file mode 100644 index 000000000000..f45e3934ad78 --- /dev/null +++ b/google-cloud-storage/samples/storage_restart_resumable_upload.rb @@ -0,0 +1,45 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +require "google/cloud/storage" + require_relative '../lib/google/cloud/storage/service' + require_relative '../lib/google/cloud/storage' + require "pry" + + +# [START storage_restart_resumable_upload] +def restart_resumable_upload bucket_name: + # The ID of your GCS bucket + # bucket_name = "your-unique-bucket-name" + + # Create a 3 MB file using StringIO + file_content = "A" * (3 * 1024 * 1024) # 3 MB of 'A' characters + file = StringIO.new file_content + + file_name = "my_io_3mb4.txt" + chunk_size = 256 * 1024 * 3 + + # storage = Google::Cloud::Storage.new upload_chunk_size: chunk_size + +# upload_url= "https://storage.googleapis.com/upload/storage/v1/b/ruby_7/o?name=my_io_3mb4.txt&name=my_io_3mb4.txt&uploadType=resumable&upload_id=AHMx-iED_BP-upLbxEmspBKvvtpCWIWvslGEP8RsPFChF8NEluVBNsvScs3OaVzREDrLvT3UJYTR1FSaRAMFEY5s2i4PM8Jmw2BuGR8kTYyQ4ZY" + + storage = Google::Cloud::Storage.new(upload_chunk_size: chunk_size, upload_url: upload_url) + bucket = storage.bucket bucket_name + + bucket.create_file file, file_name +end +# [END storage_restart_resumable_upload] + +# if $PROGRAM_NAME == __FILE__ + storage_restart_resumable_upload bucket_name: "ruby_7" +# end diff --git a/google-cloud-storage/test/google/cloud/storage/bucket_test.rb b/google-cloud-storage/test/google/cloud/storage/bucket_test.rb index 1dc5babfab0b..4e79a3e93554 100644 --- a/google-cloud-storage/test/google/cloud/storage/bucket_test.rb +++ b/google-cloud-storage/test/google/cloud/storage/bucket_test.rb @@ -1382,6 +1382,36 @@ mock.verify end + it "restarts a resumable upload with upload_id" do + new_file_name = random_file_path + upload_id= "TEST_ID" + + Tempfile.open ["google-cloud", ".txt"] do |tmpfile| + tmpfile.write "Hello world" + tmpfile.rewind + mock = Minitest::Mock.new + mock.expect :restart_resumable_upload, create_file_gapi(bucket.name, new_file_name), + [bucket.name, tmpfile, upload_id], + **restart_resumable_upload_args(options: {}) + bucket.service.mocked_service = mock + bucket.restart_resumable_upload tmpfile, upload_id + + mock.verify + end + end + + it "deletes a resumable upload with upload_id" do + upload_id= "TEST_ID" + + mock = Minitest::Mock.new + mock.expect :delete_resumable_upload, true, + [bucket.name, upload_id], + **delete_resumable_upload_args(options: {delete_upload: true}) + bucket.service.mocked_service = mock + bucket.delete_resumable_upload upload_id + mock.verify + end + def create_file_gapi bucket=nil, name = nil Google::Apis::StorageV1::Object.from_json random_file_hash(bucket, name).to_json end diff --git a/google-cloud-storage/test/helper.rb b/google-cloud-storage/test/helper.rb index 50455d7c7e16..b7597a746408 100644 --- a/google-cloud-storage/test/helper.rb +++ b/google-cloud-storage/test/helper.rb @@ -363,6 +363,18 @@ def insert_object_args name: nil, } end + def delete_resumable_upload_args options: {} + { + options: options + } + end + + def restart_resumable_upload_args options: {} + { + options: options + } + end + def get_object_args generation: nil, if_generation_match: nil, if_generation_not_match: nil,