@@ -499,7 +499,7 @@ def dynamo_request(table_name, scan_hash = {}, select_opts = {})
499499 expect ( items . size ) . to eq 101
500500 end
501501
502- it 'loads unprocessed items' do
502+ it 'loads unprocessed items for a table without a composite key ' do
503503 # BatchGetItem has following limitations:
504504 # * up to 100 items at once
505505 # * up to 16 MB at once
@@ -533,6 +533,40 @@ def dynamo_request(table_name, scan_hash = {}, select_opts = {})
533533 expect ( items . map { |h | h [ :id ] } ) . to match_array ( ids )
534534 end
535535
536+ it 'loads unprocessed items for a table with a composite key' do
537+ # BatchGetItem has following limitations:
538+ # * up to 100 items at once
539+ # * up to 16 MB at once
540+ # * one item size up to 400 KB (common limitation)
541+ #
542+ # To reach limits we will write as large data as possible
543+ # and then read it back
544+ #
545+ # 100 * 400 KB = ~40 MB
546+ # 40 MB / 16 MB ~ 3
547+ # So we expect BatchGetItem to be called 3 times
548+ #
549+ # '15' is an experimentally found value
550+ # it includes the size of ('id' + 'age') + some not documented overhead
551+
552+ ids = ( 1 ..100 ) . map { |id | [ id . to_s , id ] }
553+
554+ ids . each do |id , age |
555+ text = '#' * ( 400 . kilobytes - 15 )
556+ Dynamoid . adapter . put_item ( table_with_composite_key , id : id , age : age , name : text )
557+ end
558+
559+ expect ( Dynamoid . adapter . client ) . to receive ( :batch_get_item )
560+ . exactly ( 3 )
561+ . times . and_call_original
562+
563+ results = Dynamoid . adapter . batch_get_item ( table_with_composite_key => ids )
564+ items = results [ table_with_composite_key ]
565+
566+ expect ( items . size ) . to eq ( 100 )
567+ expect ( items . map { |h | [ h [ :id ] , h [ :age ] ] } ) . to match_array ( ids )
568+ end
569+
536570 context 'when called with block' do
537571 it 'returns nil' do
538572 Dynamoid . adapter . put_item ( table , id : '1' )
0 commit comments