s3.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200
  1. import time
  2. try:
  3. from boto import s3
  4. from boto.s3.prefix import Prefix
  5. from boto.s3.key import Key
  6. except ImportError:
  7. s3 = None
  8. from flask import redirect
  9. from flask_admin.babel import gettext
  10. from . import BaseFileAdmin
  11. class S3Storage(object):
  12. """
  13. Storage object representing files on an Amazon S3 bucket.
  14. Usage::
  15. from flask_admin.contrib.fileadmin import BaseFileAdmin
  16. from flask_admin.contrib.fileadmin.s3 import S3Storage
  17. class MyS3Admin(BaseFileAdmin):
  18. # Configure your class however you like
  19. pass
  20. fileadmin_view = MyS3Admin(storage=S3Storage(...))
  21. """
  22. def __init__(self, bucket_name, region, aws_access_key_id,
  23. aws_secret_access_key):
  24. """
  25. Constructor
  26. :param bucket_name:
  27. Name of the bucket that the files are on.
  28. :param region:
  29. Region that the bucket is located
  30. :param aws_access_key_id:
  31. AWS Access Key ID
  32. :param aws_secret_access_key:
  33. AWS Secret Access Key
  34. Make sure the credentials have the correct permissions set up on
  35. Amazon or else S3 will return a 403 FORBIDDEN error.
  36. """
  37. if not s3:
  38. raise ValueError('Could not import boto. You can install boto by '
  39. 'using pip install boto')
  40. connection = s3.connect_to_region(
  41. region,
  42. aws_access_key_id=aws_access_key_id,
  43. aws_secret_access_key=aws_secret_access_key,
  44. )
  45. self.bucket = connection.get_bucket(bucket_name)
  46. self.separator = '/'
  47. def get_files(self, path, directory):
  48. def _strip_path(name, path):
  49. if name.startswith(path):
  50. return name.replace(path, '', 1)
  51. return name
  52. def _remove_trailing_slash(name):
  53. return name[:-1]
  54. def _iso_to_epoch(timestamp):
  55. dt = time.strptime(timestamp.split(".")[0], "%Y-%m-%dT%H:%M:%S")
  56. return int(time.mktime(dt))
  57. files = []
  58. directories = []
  59. if path and not path.endswith(self.separator):
  60. path += self.separator
  61. for key in self.bucket.list(path, self.separator):
  62. if key.name == path:
  63. continue
  64. if isinstance(key, Prefix):
  65. name = _remove_trailing_slash(_strip_path(key.name, path))
  66. key_name = _remove_trailing_slash(key.name)
  67. directories.append((name, key_name, True, 0, 0))
  68. else:
  69. last_modified = _iso_to_epoch(key.last_modified)
  70. name = _strip_path(key.name, path)
  71. files.append((name, key.name, False, key.size, last_modified))
  72. return directories + files
  73. def _get_bucket_list_prefix(self, path):
  74. parts = path.split(self.separator)
  75. if len(parts) == 1:
  76. search = ''
  77. else:
  78. search = self.separator.join(parts[:-1]) + self.separator
  79. return search
  80. def _get_path_keys(self, path):
  81. search = self._get_bucket_list_prefix(path)
  82. return {key.name for key in self.bucket.list(search, self.separator)}
  83. def is_dir(self, path):
  84. keys = self._get_path_keys(path)
  85. return path + self.separator in keys
  86. def path_exists(self, path):
  87. if path == '':
  88. return True
  89. keys = self._get_path_keys(path)
  90. return path in keys or (path + self.separator) in keys
  91. def get_base_path(self):
  92. return ''
  93. def get_breadcrumbs(self, path):
  94. accumulator = []
  95. breadcrumbs = []
  96. for n in path.split(self.separator):
  97. accumulator.append(n)
  98. breadcrumbs.append((n, self.separator.join(accumulator)))
  99. return breadcrumbs
  100. def send_file(self, file_path):
  101. key = self.bucket.get_key(file_path)
  102. if key is None:
  103. raise ValueError()
  104. return redirect(key.generate_url(3600))
  105. def save_file(self, path, file_data):
  106. key = Key(self.bucket, path)
  107. key.set_contents_from_file(file_data.stream)
  108. def delete_tree(self, directory):
  109. self._check_empty_directory(directory)
  110. self.bucket.delete_key(directory + self.separator)
  111. def delete_file(self, file_path):
  112. self.bucket.delete_key(file_path)
  113. def make_dir(self, path, directory):
  114. dir_path = self.separator.join([path, (directory + self.separator)])
  115. key = Key(self.bucket, dir_path)
  116. key.set_contents_from_string('')
  117. def _check_empty_directory(self, path):
  118. if not self._is_directory_empty(path):
  119. raise ValueError(gettext('Cannot operate on non empty '
  120. 'directories'))
  121. return True
  122. def rename_path(self, src, dst):
  123. if self.is_dir(src):
  124. self._check_empty_directory(src)
  125. src += self.separator
  126. dst += self.separator
  127. self.bucket.copy_key(dst, self.bucket.name, src)
  128. self.delete_file(src)
  129. def _is_directory_empty(self, path):
  130. keys = self._get_path_keys(path + self.separator)
  131. return len(keys) == 1
  132. class S3FileAdmin(BaseFileAdmin):
  133. """
  134. Simple Amazon Simple Storage Service file-management interface.
  135. :param bucket_name:
  136. Name of the bucket that the files are on.
  137. :param region:
  138. Region that the bucket is located
  139. :param aws_access_key_id:
  140. AWS Access Key ID
  141. :param aws_secret_access_key:
  142. AWS Secret Access Key
  143. Sample usage::
  144. from flask_admin import Admin
  145. from flask_admin.contrib.fileadmin.s3 import S3FileAdmin
  146. admin = Admin()
  147. admin.add_view(S3FileAdmin('files_bucket', 'us-east-1', 'key_id', 'secret_key')
  148. """
  149. def __init__(self, bucket_name, region, aws_access_key_id,
  150. aws_secret_access_key, *args, **kwargs):
  151. storage = S3Storage(bucket_name, region, aws_access_key_id,
  152. aws_secret_access_key)
  153. super(S3FileAdmin, self).__init__(*args, storage=storage, **kwargs)