|
7 | 7 | describe "Restore from crash", :integration => true do
|
8 | 8 | include_context "setup plugin"
|
9 | 9 |
|
10 |
| - let(:options) { main_options.merge({ "restore" => true, "canned_acl" => "public-read-write" }) } |
11 |
| - |
12 | 10 | let(:number_of_files) { 5 }
|
13 | 11 | let(:dummy_content) { "foobar\n" * 100 }
|
14 |
| - let(:factory) { LogStash::Outputs::S3::TemporaryFileFactory.new(prefix, tags, "none", temporary_directory)} |
15 | 12 |
|
16 | 13 | before do
|
17 | 14 | clean_remote_files(prefix)
|
18 | 15 | end
|
19 | 16 |
|
20 |
| - |
21 | 17 | context 'with a non-empty tempfile' do
|
| 18 | + let(:options) { main_options.merge({ "restore" => true, "canned_acl" => "public-read-write" }) } |
| 19 | + let(:factory) { LogStash::Outputs::S3::TemporaryFileFactory.new(prefix, tags, "none", temporary_directory)} |
| 20 | + |
22 | 21 | before do
|
23 | 22 | # Creating a factory always create a file
|
24 | 23 | factory.current.write(dummy_content)
|
|
41 | 40 | end
|
42 | 41 |
|
43 | 42 | context 'with an empty tempfile' do
|
| 43 | + let(:options) { main_options.merge({ "restore" => true, "canned_acl" => "public-read-write" }) } |
| 44 | + let(:factory) { LogStash::Outputs::S3::TemporaryFileFactory.new(prefix, tags, "none", temporary_directory)} |
| 45 | + |
44 | 46 | before do
|
45 | 47 | factory.current
|
46 | 48 | factory.rotate!
|
|
63 | 65 | expect(bucket_resource.objects(:prefix => prefix).count).to eq(0)
|
64 | 66 | end
|
65 | 67 | end
|
| 68 | + |
| 69 | + context "#gzip encoding" do |
| 70 | + let(:options) { main_options.merge({ "restore" => true, "canned_acl" => "public-read-write", "encoding" => "gzip" }) } |
| 71 | + let(:factory) { LogStash::Outputs::S3::TemporaryFileFactory.new(prefix, tags, "gzip", temporary_directory)} |
| 72 | + describe "with empty recovered file" do |
| 73 | + before do |
| 74 | + # Creating a factory always create a file |
| 75 | + factory.current.write('') |
| 76 | + factory.current.fsync |
| 77 | + factory.current.close |
| 78 | + end |
| 79 | + |
| 80 | + it 'should not upload and not remove temp file' do |
| 81 | + subject.register |
| 82 | + try(20) do |
| 83 | + expect(bucket_resource.objects(:prefix => prefix).count).to eq(0) |
| 84 | + expect(Dir.glob(File.join(temporary_directory, "*")).size).to eq(1) |
| 85 | + end |
| 86 | + end |
| 87 | + end |
| 88 | + |
| 89 | + describe "with healthy recovered, size is greater than zero file" do |
| 90 | + before do |
| 91 | + # Creating a factory always create a file |
| 92 | + factory.current.write(dummy_content) |
| 93 | + factory.current.fsync |
| 94 | + factory.current.close |
| 95 | + |
| 96 | + (number_of_files - 1).times do |
| 97 | + factory.rotate! |
| 98 | + factory.current.write(dummy_content) |
| 99 | + factory.current.fsync |
| 100 | + factory.current.close |
| 101 | + end |
| 102 | + end |
| 103 | + |
| 104 | + it 'should recover, upload to S3 and remove temp file' do |
| 105 | + subject.register |
| 106 | + try(20) do |
| 107 | + expect(bucket_resource.objects(:prefix => prefix).count).to eq(number_of_files) |
| 108 | + expect(Dir.glob(File.join(temporary_directory, "*")).size).to eq(0) |
| 109 | + expect(bucket_resource.objects(:prefix => prefix).first.acl.grants.collect(&:permission)).to include("READ", "WRITE") |
| 110 | + end |
| 111 | + end |
| 112 | + end |
| 113 | + |
| 114 | + describe "with failure when recovering" do |
| 115 | + before do |
| 116 | + # Creating a factory always create a file |
| 117 | + factory.current.write(dummy_content) |
| 118 | + factory.current.fsync |
| 119 | + end |
| 120 | + |
| 121 | + it 'should not upload to S3 and not remove temp file' do |
| 122 | + subject.register |
| 123 | + try(20) do |
| 124 | + expect(bucket_resource.objects(:prefix => prefix).count).to eq(0) |
| 125 | + expect(Dir.glob(File.join(temporary_directory, "*")).size).to eq(1) |
| 126 | + end |
| 127 | + end |
| 128 | + end |
| 129 | + end |
| 130 | + |
66 | 131 | end
|
67 | 132 |
|
0 commit comments