_s3fs.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816
  1. from __future__ import absolute_import
  2. from __future__ import print_function
  3. from __future__ import unicode_literals
  4. __all__ = ["S3FS"]
  5. import contextlib
  6. from datetime import datetime
  7. import io
  8. import itertools
  9. import os
  10. from ssl import SSLError
  11. import tempfile
  12. import threading
  13. import mimetypes
  14. import boto3
  15. from botocore.exceptions import ClientError, EndpointConnectionError
  16. import six
  17. from six import text_type
  18. from fs import ResourceType
  19. from fs.base import FS
  20. from fs.info import Info
  21. from fs import errors
  22. from fs.mode import Mode
  23. from fs.subfs import SubFS
  24. from fs.path import basename, dirname, forcedir, join, normpath, relpath
  25. from fs.time import datetime_to_epoch
  26. def _make_repr(class_name, *args, **kwargs):
  27. """
  28. Generate a repr string.
  29. Positional arguments should be the positional arguments used to
  30. construct the class. Keyword arguments should consist of tuples of
  31. the attribute value and default. If the value is the default, then
  32. it won't be rendered in the output.
  33. Here's an example::
  34. def __repr__(self):
  35. return make_repr('MyClass', 'foo', name=(self.name, None))
  36. The output of this would be something line ``MyClass('foo',
  37. name='Will')``.
  38. """
  39. arguments = [repr(arg) for arg in args]
  40. arguments.extend(
  41. "{}={!r}".format(name, value)
  42. for name, (value, default) in sorted(kwargs.items())
  43. if value != default
  44. )
  45. return "{}({})".format(class_name, ", ".join(arguments))
  46. class S3File(io.IOBase):
  47. """Proxy for a S3 file."""
  48. @classmethod
  49. def factory(cls, filename, mode, on_close):
  50. """Create a S3File backed with a temporary file."""
  51. _temp_file = tempfile.TemporaryFile()
  52. proxy = cls(_temp_file, filename, mode, on_close=on_close)
  53. return proxy
  54. def __repr__(self):
  55. return _make_repr(
  56. self.__class__.__name__, self.__filename, text_type(self.__mode)
  57. )
  58. def __init__(self, f, filename, mode, on_close=None):
  59. self._f = f
  60. self.__filename = filename
  61. self.__mode = mode
  62. self._on_close = on_close
  63. def __enter__(self):
  64. return self
  65. def __exit__(self, exc_type, exc_value, traceback):
  66. self.close()
  67. @property
  68. def raw(self):
  69. return self._f
  70. def close(self):
  71. if self._on_close is not None:
  72. self._on_close(self)
  73. @property
  74. def closed(self):
  75. return self._f.closed
  76. def fileno(self):
  77. return self._f.fileno()
  78. def flush(self):
  79. return self._f.flush()
  80. def isatty(self):
  81. return self._f.asatty()
  82. def readable(self):
  83. return self.__mode.reading
  84. def readline(self, limit=-1):
  85. return self._f.readline(limit)
  86. def readlines(self, hint=-1):
  87. if hint == -1:
  88. return self._f.readlines(hint)
  89. else:
  90. size = 0
  91. lines = []
  92. for line in iter(self._f.readline, b""):
  93. lines.append(line)
  94. size += len(line)
  95. if size > hint:
  96. break
  97. return lines
  98. def seek(self, offset, whence=os.SEEK_SET):
  99. if whence not in (os.SEEK_CUR, os.SEEK_END, os.SEEK_SET):
  100. raise ValueError("invalid value for 'whence'")
  101. self._f.seek(offset, whence)
  102. return self._f.tell()
  103. def seekable(self):
  104. return True
  105. def tell(self):
  106. return self._f.tell()
  107. def writable(self):
  108. return self.__mode.writing
  109. def writelines(self, lines):
  110. return self._f.writelines(lines)
  111. def read(self, n=-1):
  112. if not self.__mode.reading:
  113. raise IOError("not open for reading")
  114. return self._f.read(n)
  115. def readall(self):
  116. return self._f.readall()
  117. def readinto(self, b):
  118. return self._f.readinto()
  119. def write(self, b):
  120. if not self.__mode.writing:
  121. raise IOError("not open for reading")
  122. self._f.write(b)
  123. return len(b)
  124. def truncate(self, size=None):
  125. if size is None:
  126. size = self._f.tell()
  127. self._f.truncate(size)
  128. return size
  129. @contextlib.contextmanager
  130. def s3errors(path):
  131. """Translate S3 errors to FSErrors."""
  132. try:
  133. yield
  134. except ClientError as error:
  135. _error = error.response.get("Error", {})
  136. error_code = _error.get("Code", None)
  137. response_meta = error.response.get("ResponseMetadata", {})
  138. http_status = response_meta.get("HTTPStatusCode", 200)
  139. error_msg = _error.get("Message", None)
  140. if error_code == "NoSuchBucket":
  141. raise errors.ResourceError(path, exc=error, msg=error_msg)
  142. if http_status == 404:
  143. raise errors.ResourceNotFound(path)
  144. elif http_status == 403:
  145. raise errors.PermissionDenied(path=path, msg=error_msg)
  146. else:
  147. raise errors.OperationFailed(path=path, exc=error)
  148. except SSLError as error:
  149. raise errors.OperationFailed(path, exc=error)
  150. except EndpointConnectionError as error:
  151. raise errors.RemoteConnectionError(path, exc=error, msg="{}".format(error))
  152. @six.python_2_unicode_compatible
  153. class S3FS(FS):
  154. """
  155. Construct an Amazon S3 filesystem for
  156. `PyFilesystem <https://pyfilesystem.org>`_
  157. :param str bucket_name: The S3 bucket name.
  158. :param str dir_path: The root directory within the S3 Bucket.
  159. Defaults to ``"/"``
  160. :param str aws_access_key_id: The access key, or ``None`` to read
  161. the key from standard configuration files.
  162. :param str aws_secret_access_key: The secret key, or ``None`` to
  163. read the key from standard configuration files.
  164. :param str endpoint_url: Alternative endpoint url (``None`` to use
  165. default).
  166. :param str aws_session_token:
  167. :param str region: Optional S3 region.
  168. :param str delimiter: The delimiter to separate folders, defaults to
  169. a forward slash.
  170. :param bool strict: When ``True`` (default) S3FS will follow the
  171. PyFilesystem specification exactly. Set to ``False`` to disable
  172. validation of destination paths which may speed up uploads /
  173. downloads.
  174. :param str cache_control: Sets the 'Cache-Control' header for uploads.
  175. :param str acl: Sets the Access Control List header for uploads.
  176. :param dict upload_args: A dictionary for additional upload arguments.
  177. See https://boto3.readthedocs.io/en/latest/reference/services/s3.html#S3.Object.put
  178. for details.
  179. :param dict download_args: Dictionary of extra arguments passed to
  180. the S3 client.
  181. """
  182. _meta = {
  183. "case_insensitive": False,
  184. "invalid_path_chars": "\0",
  185. "network": True,
  186. "read_only": False,
  187. "thread_safe": True,
  188. "unicode_paths": True,
  189. "virtual": False,
  190. }
  191. _object_attributes = [
  192. "accept_ranges",
  193. "cache_control",
  194. "content_disposition",
  195. "content_encoding",
  196. "content_language",
  197. "content_length",
  198. "content_type",
  199. "delete_marker",
  200. "e_tag",
  201. "expiration",
  202. "expires",
  203. "last_modified",
  204. "metadata",
  205. "missing_meta",
  206. "parts_count",
  207. "replication_status",
  208. "request_charged",
  209. "restore",
  210. "server_side_encryption",
  211. "sse_customer_algorithm",
  212. "sse_customer_key_md5",
  213. "ssekms_key_id",
  214. "storage_class",
  215. "version_id",
  216. "website_redirect_location",
  217. ]
  218. def __init__(
  219. self,
  220. bucket_name,
  221. dir_path="/",
  222. aws_access_key_id=None,
  223. aws_secret_access_key=None,
  224. aws_session_token=None,
  225. endpoint_url=None,
  226. region=None,
  227. delimiter="/",
  228. strict=True,
  229. cache_control=None,
  230. acl=None,
  231. upload_args=None,
  232. download_args=None,
  233. ):
  234. _creds = (aws_access_key_id, aws_secret_access_key)
  235. if any(_creds) and not all(_creds):
  236. raise ValueError(
  237. "aws_access_key_id and aws_secret_access_key "
  238. "must be set together if specified"
  239. )
  240. self._bucket_name = bucket_name
  241. self.dir_path = dir_path
  242. self._prefix = relpath(normpath(dir_path)).rstrip("/")
  243. self.aws_access_key_id = aws_access_key_id
  244. self.aws_secret_access_key = aws_secret_access_key
  245. self.aws_session_token = aws_session_token
  246. self.endpoint_url = endpoint_url
  247. self.region = region
  248. self.delimiter = delimiter
  249. self.strict = strict
  250. self._tlocal = threading.local()
  251. if cache_control or acl:
  252. upload_args = upload_args or {}
  253. if cache_control:
  254. upload_args["CacheControl"] = cache_control
  255. if acl:
  256. upload_args["ACL"] = acl
  257. self.upload_args = upload_args
  258. self.download_args = download_args
  259. super(S3FS, self).__init__()
  260. def __repr__(self):
  261. return _make_repr(
  262. self.__class__.__name__,
  263. self._bucket_name,
  264. dir_path=(self.dir_path, "/"),
  265. region=(self.region, None),
  266. delimiter=(self.delimiter, "/"),
  267. )
  268. def __str__(self):
  269. return "<s3fs '{}'>".format(join(self._bucket_name, relpath(self.dir_path)))
  270. def _path_to_key(self, path):
  271. """Converts an fs path to a s3 key."""
  272. _path = relpath(normpath(path))
  273. _key = (
  274. "{}/{}".format(self._prefix, _path).lstrip("/").replace("/", self.delimiter)
  275. )
  276. return _key
  277. def _path_to_dir_key(self, path):
  278. """Converts an fs path to a s3 key."""
  279. _path = relpath(normpath(path))
  280. _key = (
  281. forcedir("{}/{}".format(self._prefix, _path))
  282. .lstrip("/")
  283. .replace("/", self.delimiter)
  284. )
  285. return _key
  286. def _key_to_path(self, key):
  287. return key.replace(self.delimiter, "/")
  288. def _get_object(self, path, key):
  289. _key = key.rstrip(self.delimiter)
  290. try:
  291. with s3errors(path):
  292. obj = self.s3.Object(self._bucket_name, _key)
  293. obj.load()
  294. except errors.ResourceNotFound:
  295. with s3errors(path):
  296. obj = self.s3.Object(self._bucket_name, _key + self.delimiter)
  297. obj.load()
  298. return obj
  299. else:
  300. return obj
  301. def _get_upload_args(self, key):
  302. upload_args = self.upload_args.copy() if self.upload_args else {}
  303. if "ContentType" not in upload_args:
  304. mime_type, _encoding = mimetypes.guess_type(key)
  305. if six.PY2 and mime_type is not None:
  306. mime_type = mime_type.decode("utf-8", "replace")
  307. upload_args["ContentType"] = mime_type or "binary/octet-stream"
  308. return upload_args
  309. @property
  310. def s3(self):
  311. if not hasattr(self._tlocal, "s3"):
  312. self._tlocal.s3 = boto3.resource(
  313. "s3",
  314. region_name=self.region,
  315. aws_access_key_id=self.aws_access_key_id,
  316. aws_secret_access_key=self.aws_secret_access_key,
  317. aws_session_token=self.aws_session_token,
  318. endpoint_url=self.endpoint_url,
  319. )
  320. return self._tlocal.s3
  321. @property
  322. def client(self):
  323. if not hasattr(self._tlocal, "client"):
  324. self._tlocal.client = boto3.client(
  325. "s3",
  326. region_name=self.region,
  327. aws_access_key_id=self.aws_access_key_id,
  328. aws_secret_access_key=self.aws_secret_access_key,
  329. aws_session_token=self.aws_session_token,
  330. endpoint_url=self.endpoint_url,
  331. )
  332. return self._tlocal.client
  333. def _info_from_object(self, obj, namespaces):
  334. """Make an info dict from an s3 Object."""
  335. key = obj.key
  336. path = self._key_to_path(key)
  337. name = basename(path.rstrip("/"))
  338. is_dir = key.endswith(self.delimiter)
  339. info = {"basic": {"name": name, "is_dir": is_dir}}
  340. if "details" in namespaces:
  341. _type = int(ResourceType.directory if is_dir else ResourceType.file)
  342. info["details"] = {
  343. "accessed": None,
  344. "modified": datetime_to_epoch(obj.last_modified),
  345. "size": obj.content_length,
  346. "type": _type,
  347. }
  348. if "s3" in namespaces:
  349. s3info = info["s3"] = {}
  350. for name in self._object_attributes:
  351. value = getattr(obj, name, None)
  352. if isinstance(value, datetime):
  353. value = datetime_to_epoch(value)
  354. s3info[name] = value
  355. if "urls" in namespaces:
  356. url = self.client.generate_presigned_url(
  357. ClientMethod="get_object",
  358. Params={"Bucket": self._bucket_name, "Key": key},
  359. )
  360. info["urls"] = {"download": url}
  361. return info
  362. def isdir(self, path):
  363. _path = self.validatepath(path)
  364. try:
  365. return self._getinfo(_path).is_dir
  366. except errors.ResourceNotFound:
  367. return False
  368. def getinfo(self, path, namespaces=None):
  369. self.check()
  370. namespaces = namespaces or ()
  371. _path = self.validatepath(path)
  372. _key = self._path_to_key(_path)
  373. try:
  374. dir_path = dirname(_path)
  375. if dir_path != "/":
  376. _dir_key = self._path_to_dir_key(dir_path)
  377. with s3errors(path):
  378. obj = self.s3.Object(self._bucket_name, _dir_key)
  379. obj.load()
  380. except errors.ResourceNotFound:
  381. raise errors.ResourceNotFound(path)
  382. if _path == "/":
  383. return Info(
  384. {
  385. "basic": {"name": "", "is_dir": True},
  386. "details": {"type": int(ResourceType.directory)},
  387. }
  388. )
  389. obj = self._get_object(path, _key)
  390. info = self._info_from_object(obj, namespaces)
  391. return Info(info)
  392. def _getinfo(self, path, namespaces=None):
  393. """Gets info without checking for parent dir."""
  394. namespaces = namespaces or ()
  395. _path = self.validatepath(path)
  396. _key = self._path_to_key(_path)
  397. if _path == "/":
  398. return Info(
  399. {
  400. "basic": {"name": "", "is_dir": True},
  401. "details": {"type": int(ResourceType.directory)},
  402. }
  403. )
  404. obj = self._get_object(path, _key)
  405. info = self._info_from_object(obj, namespaces)
  406. return Info(info)
  407. def listdir(self, path):
  408. _path = self.validatepath(path)
  409. _s3_key = self._path_to_dir_key(_path)
  410. prefix_len = len(_s3_key)
  411. paginator = self.client.get_paginator("list_objects")
  412. with s3errors(path):
  413. _paginate = paginator.paginate(
  414. Bucket=self._bucket_name, Prefix=_s3_key, Delimiter=self.delimiter
  415. )
  416. _directory = []
  417. for result in _paginate:
  418. common_prefixes = result.get("CommonPrefixes", ())
  419. for prefix in common_prefixes:
  420. _prefix = prefix.get("Prefix")
  421. _name = _prefix[prefix_len:]
  422. if _name:
  423. _directory.append(_name.rstrip(self.delimiter))
  424. for obj in result.get("Contents", ()):
  425. name = obj["Key"][prefix_len:]
  426. if name:
  427. _directory.append(name)
  428. if not _directory:
  429. if not self.getinfo(_path).is_dir:
  430. raise errors.DirectoryExpected(path)
  431. return _directory
  432. def makedir(self, path, permissions=None, recreate=False):
  433. self.check()
  434. _path = self.validatepath(path)
  435. _key = self._path_to_dir_key(_path)
  436. if not self.isdir(dirname(_path)):
  437. raise errors.ResourceNotFound(path)
  438. try:
  439. self._getinfo(path)
  440. except errors.ResourceNotFound:
  441. pass
  442. else:
  443. if recreate:
  444. return self.opendir(_path)
  445. else:
  446. raise errors.DirectoryExists(path)
  447. with s3errors(path):
  448. _obj = self.s3.Object(self._bucket_name, _key)
  449. _obj.put(**self._get_upload_args(_key))
  450. return SubFS(self, path)
  451. def openbin(self, path, mode="r", buffering=-1, **options):
  452. _mode = Mode(mode)
  453. _mode.validate_bin()
  454. self.check()
  455. _path = self.validatepath(path)
  456. _key = self._path_to_key(_path)
  457. if _mode.create:
  458. def on_close_create(s3file):
  459. """Called when the S3 file closes, to upload data."""
  460. try:
  461. s3file.raw.seek(0)
  462. with s3errors(path):
  463. self.client.upload_fileobj(
  464. s3file.raw,
  465. self._bucket_name,
  466. _key,
  467. ExtraArgs=self._get_upload_args(_key),
  468. )
  469. finally:
  470. s3file.raw.close()
  471. try:
  472. dir_path = dirname(_path)
  473. if dir_path != "/":
  474. _dir_key = self._path_to_dir_key(dir_path)
  475. self._get_object(dir_path, _dir_key)
  476. except errors.ResourceNotFound:
  477. raise errors.ResourceNotFound(path)
  478. try:
  479. info = self._getinfo(path)
  480. except errors.ResourceNotFound:
  481. pass
  482. else:
  483. if _mode.exclusive:
  484. raise errors.FileExists(path)
  485. if info.is_dir:
  486. raise errors.FileExpected(path)
  487. s3file = S3File.factory(path, _mode, on_close=on_close_create)
  488. if _mode.appending:
  489. try:
  490. with s3errors(path):
  491. self.client.download_fileobj(
  492. self._bucket_name,
  493. _key,
  494. s3file.raw,
  495. ExtraArgs=self.download_args,
  496. )
  497. except errors.ResourceNotFound:
  498. pass
  499. else:
  500. s3file.seek(0, os.SEEK_END)
  501. return s3file
  502. if self.strict:
  503. info = self.getinfo(path)
  504. if info.is_dir:
  505. raise errors.FileExpected(path)
  506. def on_close(s3file):
  507. """Called when the S3 file closes, to upload the data."""
  508. try:
  509. if _mode.writing:
  510. s3file.raw.seek(0, os.SEEK_SET)
  511. with s3errors(path):
  512. self.client.upload_fileobj(
  513. s3file.raw,
  514. self._bucket_name,
  515. _key,
  516. ExtraArgs=self._get_upload_args(_key),
  517. )
  518. finally:
  519. s3file.raw.close()
  520. s3file = S3File.factory(path, _mode, on_close=on_close)
  521. with s3errors(path):
  522. self.client.download_fileobj(
  523. self._bucket_name, _key, s3file.raw, ExtraArgs=self.download_args
  524. )
  525. s3file.seek(0, os.SEEK_SET)
  526. return s3file
  527. def remove(self, path):
  528. self.check()
  529. _path = self.validatepath(path)
  530. _key = self._path_to_key(_path)
  531. if self.strict:
  532. info = self.getinfo(path)
  533. if info.is_dir:
  534. raise errors.FileExpected(path)
  535. self.client.delete_object(Bucket=self._bucket_name, Key=_key)
  536. def isempty(self, path):
  537. self.check()
  538. _path = self.validatepath(path)
  539. _key = self._path_to_dir_key(_path)
  540. response = self.client.list_objects(
  541. Bucket=self._bucket_name, Prefix=_key, MaxKeys=2
  542. )
  543. contents = response.get("Contents", ())
  544. for obj in contents:
  545. if obj["Key"] != _key:
  546. return False
  547. return True
  548. def removedir(self, path):
  549. self.check()
  550. _path = self.validatepath(path)
  551. if _path == "/":
  552. raise errors.RemoveRootError()
  553. info = self.getinfo(_path)
  554. if not info.is_dir:
  555. raise errors.DirectoryExpected(path)
  556. if not self.isempty(path):
  557. raise errors.DirectoryNotEmpty(path)
  558. _key = self._path_to_dir_key(_path)
  559. self.client.delete_object(Bucket=self._bucket_name, Key=_key)
  560. def setinfo(self, path, info):
  561. self.getinfo(path)
  562. def readbytes(self, path):
  563. self.check()
  564. if self.strict:
  565. info = self.getinfo(path)
  566. if not info.is_file:
  567. raise errors.FileExpected(path)
  568. _path = self.validatepath(path)
  569. _key = self._path_to_key(_path)
  570. bytes_file = io.BytesIO()
  571. with s3errors(path):
  572. self.client.download_fileobj(
  573. self._bucket_name, _key, bytes_file, ExtraArgs=self.download_args
  574. )
  575. return bytes_file.getvalue()
  576. def download(self, path, file, chunk_size=None, **options):
  577. self.check()
  578. if self.strict:
  579. info = self.getinfo(path)
  580. if not info.is_file:
  581. raise errors.FileExpected(path)
  582. _path = self.validatepath(path)
  583. _key = self._path_to_key(_path)
  584. with s3errors(path):
  585. self.client.download_fileobj(
  586. self._bucket_name, _key, file, ExtraArgs=self.download_args
  587. )
  588. def exists(self, path):
  589. self.check()
  590. _path = self.validatepath(path)
  591. if _path == "/":
  592. return True
  593. _key = self._path_to_dir_key(_path)
  594. try:
  595. self._get_object(path, _key)
  596. except errors.ResourceNotFound:
  597. return False
  598. else:
  599. return True
  600. def scandir(self, path, namespaces=None, page=None):
  601. _path = self.validatepath(path)
  602. namespaces = namespaces or ()
  603. _s3_key = self._path_to_dir_key(_path)
  604. prefix_len = len(_s3_key)
  605. info = self.getinfo(path)
  606. if not info.is_dir:
  607. raise errors.DirectoryExpected(path)
  608. paginator = self.client.get_paginator("list_objects")
  609. _paginate = paginator.paginate(
  610. Bucket=self._bucket_name, Prefix=_s3_key, Delimiter=self.delimiter
  611. )
  612. def gen_info():
  613. for result in _paginate:
  614. common_prefixes = result.get("CommonPrefixes", ())
  615. for prefix in common_prefixes:
  616. _prefix = prefix.get("Prefix")
  617. _name = _prefix[prefix_len:]
  618. if _name:
  619. info = {
  620. "basic": {
  621. "name": _name.rstrip(self.delimiter),
  622. "is_dir": True,
  623. }
  624. }
  625. yield Info(info)
  626. for _obj in result.get("Contents", ()):
  627. name = _obj["Key"][prefix_len:]
  628. if name:
  629. with s3errors(path):
  630. obj = self.s3.Object(self._bucket_name, _obj["Key"])
  631. info = self._info_from_object(obj, namespaces)
  632. yield Info(info)
  633. iter_info = iter(gen_info())
  634. if page is not None:
  635. start, end = page
  636. iter_info = itertools.islice(iter_info, start, end)
  637. for info in iter_info:
  638. yield info
  639. def writebytes(self, path, contents):
  640. if not isinstance(contents, bytes):
  641. raise TypeError("contents must be bytes")
  642. _path = self.validatepath(path)
  643. _key = self._path_to_key(_path)
  644. if self.strict:
  645. if not self.isdir(dirname(path)):
  646. raise errors.ResourceNotFound(path)
  647. try:
  648. info = self._getinfo(path)
  649. if info.is_dir:
  650. raise errors.FileExpected(path)
  651. except errors.ResourceNotFound:
  652. pass
  653. bytes_file = io.BytesIO(contents)
  654. with s3errors(path):
  655. self.client.upload_fileobj(
  656. bytes_file,
  657. self._bucket_name,
  658. _key,
  659. ExtraArgs=self._get_upload_args(_key),
  660. )
  661. def upload(self, path, file, chunk_size=None, **options):
  662. _path = self.validatepath(path)
  663. _key = self._path_to_key(_path)
  664. if self.strict:
  665. if not self.isdir(dirname(path)):
  666. raise errors.ResourceNotFound(path)
  667. try:
  668. info = self._getinfo(path)
  669. if info.is_dir:
  670. raise errors.FileExpected(path)
  671. except errors.ResourceNotFound:
  672. pass
  673. with s3errors(path):
  674. self.client.upload_fileobj(
  675. file, self._bucket_name, _key, ExtraArgs=self._get_upload_args(_key)
  676. )
  677. def copy(self, src_path, dst_path, overwrite=False):
  678. if not overwrite and self.exists(dst_path):
  679. raise errors.DestinationExists(dst_path)
  680. _src_path = self.validatepath(src_path)
  681. _dst_path = self.validatepath(dst_path)
  682. if self.strict:
  683. if not self.isdir(dirname(_dst_path)):
  684. raise errors.ResourceNotFound(dst_path)
  685. _src_key = self._path_to_key(_src_path)
  686. _dst_key = self._path_to_key(_dst_path)
  687. try:
  688. with s3errors(src_path):
  689. self.client.copy_object(
  690. Bucket=self._bucket_name,
  691. Key=_dst_key,
  692. CopySource={"Bucket": self._bucket_name, "Key": _src_key},
  693. )
  694. except errors.ResourceNotFound:
  695. if self.exists(src_path):
  696. raise errors.FileExpected(src_path)
  697. raise
  698. def move(self, src_path, dst_path, overwrite=False):
  699. self.copy(src_path, dst_path, overwrite=overwrite)
  700. self.remove(src_path)
  701. def geturl(self, path, purpose="download"):
  702. _path = self.validatepath(path)
  703. _key = self._path_to_key(_path)
  704. if _path == "/":
  705. raise errors.NoURL(path, purpose)
  706. if purpose == "download":
  707. url = self.client.generate_presigned_url(
  708. ClientMethod="get_object",
  709. Params={"Bucket": self._bucket_name, "Key": _key},
  710. )
  711. return url
  712. else:
  713. raise errors.NoURL(path, purpose)