test_s3concat.py 2.6 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. import boto3
  2. from s3_concat import S3Concat
  3. from moto import mock_s3
  4. ###
  5. # add_file
  6. ###
  7. @mock_s3
  8. def test_add_file():
  9. session = boto3.session.Session()
  10. s3 = session.client('s3')
  11. # Need to create the bucket since this is in Moto's 'virtual' AWS account
  12. s3.create_bucket(Bucket='my-bucket')
  13. s3.put_object(
  14. Bucket='my-bucket',
  15. Key='some_folder/thing1.json',
  16. Body=b'{"foo": "Test File Contents"}',
  17. )
  18. concat = S3Concat('my-bucket', 'all_data.json', '10MB', session=session)
  19. concat.add_file('some_folder/thing1.json')
  20. assert concat.all_files == [('some_folder/thing1.json', 29)]
  21. @mock_s3
  22. def test_concat_text_file():
  23. session = boto3.session.Session()
  24. s3 = session.client('s3')
  25. # Need to create the bucket since this is in Moto's 'virtual' AWS account
  26. s3.create_bucket(Bucket='my-bucket')
  27. s3.put_object(
  28. Bucket='my-bucket',
  29. Key='some_folder/thing1.json',
  30. Body=b'Thing1\n',
  31. )
  32. s3.put_object(
  33. Bucket='my-bucket',
  34. Key='some_folder/thing2.json',
  35. Body=b'Thing2\n',
  36. )
  37. concat = S3Concat('my-bucket', 'all_things.json', None, session=session)
  38. concat.add_files('some_folder')
  39. concat.concat()
  40. concat_output = s3.get_object(
  41. Bucket='my-bucket',
  42. Key='all_things.json'
  43. )['Body'].read().decode('utf-8')
  44. assert concat_output == 'Thing1\nThing2\n'
  45. @mock_s3
  46. def test_concat_gzip_content():
  47. """Create 2 gzip files, then use s3concat to create a single gzip file
  48. To test, un-compress and read contents of the concat'd file
  49. """
  50. import gzip
  51. import tempfile
  52. session = boto3.session.Session()
  53. s3 = session.client('s3')
  54. # Need to create the bucket since this is in Moto's 'virtual' AWS account
  55. s3.create_bucket(Bucket='my-bucket')
  56. file1 = tempfile.NamedTemporaryFile()
  57. with gzip.open(file1.name, 'wb') as f:
  58. f.write(b"file 1 contents\n")
  59. s3.upload_file(file1.name, 'my-bucket', 'some_folder/thing1.gz')
  60. file2 = tempfile.NamedTemporaryFile()
  61. with gzip.open(file2.name, 'wb') as f:
  62. f.write(b"file 2 contents\n")
  63. s3.upload_file(file2.name, 'my-bucket', 'some_folder/thing2.gz')
  64. concat = S3Concat('my-bucket', 'all_data.gz', None, session=session)
  65. concat.add_files('some_folder')
  66. concat.concat()
  67. all_data_file = tempfile.NamedTemporaryFile()
  68. s3.download_file('my-bucket', 'all_data.gz', all_data_file.name)
  69. with gzip.open(all_data_file.name, 'rb') as f:
  70. content_output = f.read()
  71. assert content_output == b'file 1 contents\nfile 2 contents\n'