github.com/anuvu/storage@v1.12.13/tests/bigdata.bats (about) 1 #!/usr/bin/env bats 2 3 load helpers 4 5 @test "image-data" { 6 # Bail if "sha256sum" isn't available. 7 if test -z "$(which sha256sum 2> /dev/null)" ; then 8 skip "need sha256sum" 9 fi 10 11 # Create a layer. 12 run storage --debug=false create-layer 13 [ "$status" -eq 0 ] 14 [ "$output" != "" ] 15 layer=$output 16 17 # Create an image using that layer. 18 run storage --debug=false create-image $layer 19 [ "$status" -eq 0 ] 20 [ "$output" != "" ] 21 image=${output%% *} 22 23 # Make sure the image can be located. 24 storage exists -i $image 25 26 # Make sure the image has no big data items associated with it. 27 run storage --debug=false list-image-data $image 28 [ "$status" -eq 0 ] 29 [ "$output" = "" ] 30 31 # Create two random files. 32 createrandom $TESTDIR/big-item-1 1234 33 createrandom $TESTDIR/big-item-2 5678 34 35 # Set each of those files as a big data item named after the file. 36 storage set-image-data -f $TESTDIR/big-item-1 $image big-item-1 37 storage set-image-data -f $TESTDIR/big-item-2 $image big-item-2 38 39 # Get a list of the items. Make sure they're both listed. 40 run storagewithsorting --debug=false list-image-data $image 41 [ "$status" -eq 0 ] 42 [ "${#lines[*]}" -eq 2 ] 43 [ "${lines[0]}" = "big-item-1" ] 44 [ "${lines[1]}" = "big-item-2" ] 45 46 # Check that the recorded sizes of the items match what we decided above. 47 run storage get-image-data-size $image no-such-item 48 [ "$status" -ne 0 ] 49 run storage --debug=false get-image-data-size $image big-item-1 50 [ "$status" -eq 0 ] 51 [ "$output" -eq 1234 ] 52 run storage --debug=false get-image-data-size $image big-item-2 53 [ "$status" -eq 0 ] 54 [ "$output" -eq 5678 ] 55 56 # Save the contents of the big data items to disk and compare them with the originals. 57 run storage --debug=false get-image-data $image no-such-item 58 [ "$status" -ne 0 ] 59 storage get-image-data -f $TESTDIR/big-item-1.2 $image big-item-1 60 cmp $TESTDIR/big-item-1 $TESTDIR/big-item-1.2 61 storage get-image-data -f $TESTDIR/big-item-2.2 $image big-item-2 62 cmp $TESTDIR/big-item-2 $TESTDIR/big-item-2.2 63 64 # Read the recorded digests of the items and compare them with the digests of the originals. 65 run storage get-image-data-digest $image no-such-item 66 [ "$status" -ne 0 ] 67 run storage --debug=false get-image-data-digest $image big-item-1 68 [ "$status" -eq 0 ] 69 sum=$(sha256sum $TESTDIR/big-item-1) 70 sum=sha256:"${sum%% *}" 71 echo output:"$output": 72 echo sum:"$sum": 73 [ "$output" = "$sum" ] 74 run storage --debug=false get-image-data-digest $image big-item-2 75 [ "$status" -eq 0 ] 76 sum=$(sha256sum $TESTDIR/big-item-2) 77 sum=sha256:"${sum%% *}" 78 echo output:"$output": 79 echo sum:"$sum": 80 [ "$output" = "$sum" ] 81 } 82 83 @test "container-data" { 84 # Bail if "sha256sum" isn't available. 85 if test -z "$(which sha256sum 2> /dev/null)" ; then 86 skip "need sha256sum" 87 fi 88 89 # Create a layer. 90 run storage --debug=false create-layer 91 [ "$status" -eq 0 ] 92 [ "$output" != "" ] 93 layer=$output 94 95 # Create an image using that layer. 96 run storage --debug=false create-image $layer 97 [ "$status" -eq 0 ] 98 [ "$output" != "" ] 99 image=${output%% *} 100 101 # Create a container based on that image. 102 run storage --debug=false create-container $image 103 [ "$status" -eq 0 ] 104 [ "$output" != "" ] 105 container=${output%% *} 106 107 # Make sure the container can be located. 108 storage exists -c $container 109 110 # Make sure the container has no big data items associated with it. 111 run storage --debug=false list-container-data $container 112 [ "$status" -eq 0 ] 113 [ "$output" = "" ] 114 115 # Create two random files. 116 createrandom $TESTDIR/big-item-1 1234 117 createrandom $TESTDIR/big-item-2 5678 118 119 # Set each of those files as a big data item named after the file. 120 storage set-container-data -f $TESTDIR/big-item-1 $container big-item-1 121 storage set-container-data -f $TESTDIR/big-item-2 $container big-item-2 122 123 # Get a list of the items. Make sure they're both listed. 124 run storage --debug=false list-container-data $container 125 [ "$status" -eq 0 ] 126 [ "${#lines[*]}" -eq 2 ] 127 [ "${lines[0]}" = "big-item-1" ] 128 [ "${lines[1]}" = "big-item-2" ] 129 130 # Check that the recorded sizes of the items match what we decided above. 131 run storage get-container-data-size $container no-such-item 132 [ "$status" -ne 0 ] 133 run storage --debug=false get-container-data-size $container big-item-1 134 echo "$output" 135 [ "$status" -eq 0 ] 136 [ "$output" -eq 1234 ] 137 run storage --debug=false get-container-data-size $container big-item-2 138 [ "$status" -eq 0 ] 139 [ "$output" -eq 5678 ] 140 141 # Save the contents of the big data items to disk and compare them with the originals. 142 run storage --debug=false get-container-data $container no-such-item 143 [ "$status" -ne 0 ] 144 storage get-container-data -f $TESTDIR/big-item-1.2 $container big-item-1 145 cmp $TESTDIR/big-item-1 $TESTDIR/big-item-1.2 146 storage get-container-data -f $TESTDIR/big-item-2.2 $container big-item-2 147 cmp $TESTDIR/big-item-2 $TESTDIR/big-item-2.2 148 149 # Read the recorded digests of the items and compare them with the digests of the originals. 150 run storage get-container-data-digest $container no-such-item 151 [ "$status" -ne 0 ] 152 run storage --debug=false get-container-data-digest $container big-item-1 153 [ "$status" -eq 0 ] 154 sum=$(sha256sum $TESTDIR/big-item-1) 155 sum=sha256:"${sum%% *}" 156 echo output:"$output": 157 echo sum:"$sum": 158 [ "$output" = "$sum" ] 159 run storage --debug=false get-container-data-digest $container big-item-2 160 [ "$status" -eq 0 ] 161 sum=$(sha256sum $TESTDIR/big-item-2) 162 sum=sha256:"${sum%% *}" 163 echo output:"$output": 164 echo sum:"$sum": 165 [ "$output" = "$sum" ] 166 }