[gnome-continuous-yocto/gnomeostree-3.28-rocko: 4894/8267] bitbake: fetch2: add initial Amazon AWS S3 fetcher



commit 8f1ed1758748e4c00eaa6b4b3528e79f36d62c19
Author: Andre McCurdy <armccurdy gmail com>
Date:   Wed Mar 1 15:56:46 2017 -0800

    bitbake: fetch2: add initial Amazon AWS S3 fetcher
    
    Class for fetching files from Amazon S3 using the AWS Command Line
    Interface. The aws tool must be correctly installed and configured
    prior to use.
    
    The class supports both download() and checkstatus(), which therefore
    allows S3 mirrors to be used for SSTATE_MIRRORS.
    
    (Bitbake rev: 6fe07ed25457dd7952b60f4b2153d56b15d5eea6)
    
    Signed-off-by: Andre McCurdy <armccurdy gmail com>
    Signed-off-by: Richard Purdie <richard purdie linuxfoundation org>

 bitbake/lib/bb/fetch2/__init__.py |    6 ++-
 bitbake/lib/bb/fetch2/s3.py       |   96 +++++++++++++++++++++++++++++++++++++
 2 files changed, 100 insertions(+), 2 deletions(-)
---
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 672f109..5209f4d 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -1200,13 +1200,13 @@ class FetchData(object):
             self.sha256_name = "sha256sum"
         if self.md5_name in self.parm:
             self.md5_expected = self.parm[self.md5_name]
-        elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
+        elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
             self.md5_expected = None
         else:
             self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
         if self.sha256_name in self.parm:
             self.sha256_expected = self.parm[self.sha256_name]
-        elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
+        elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
             self.sha256_expected = None
         else:
             self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
@@ -1791,6 +1791,7 @@ from . import svn
 from . import wget
 from . import ssh
 from . import sftp
+from . import s3
 from . import perforce
 from . import bzr
 from . import hg
@@ -1808,6 +1809,7 @@ methods.append(gitannex.GitANNEX())
 methods.append(cvs.Cvs())
 methods.append(ssh.SSH())
 methods.append(sftp.SFTP())
+methods.append(s3.S3())
 methods.append(perforce.Perforce())
 methods.append(bzr.Bzr())
 methods.append(hg.Hg())
diff --git a/bitbake/lib/bb/fetch2/s3.py b/bitbake/lib/bb/fetch2/s3.py
new file mode 100644
index 0000000..27993aa
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/s3.py
@@ -0,0 +1,96 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for Amazon AWS S3.
+
+Class for fetching files from Amazon S3 using the AWS Command Line Interface.
+The aws tool must be correctly installed and configured prior to use.
+
+"""
+
+# Copyright (C) 2017, Andre McCurdy <armccurdy gmail com>
+#
+# Based in part on bb.fetch2.wget:
+#    Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import bb
+import urllib.request, urllib.parse, urllib.error
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import runfetchcmd
+
+class S3(FetchMethod):
+    """Class to fetch urls via 'aws s3'"""
+
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with s3.
+        """
+        return ud.type in ['s3']
+
+    def recommends_checksum(self, urldata):
+        return True
+
+    def urldata_init(self, ud, d):
+        if 'downloadfilename' in ud.parm:
+            ud.basename = ud.parm['downloadfilename']
+        else:
+            ud.basename = os.path.basename(ud.path)
+
+        ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
+
+    def download(self, ud, d):
+        """
+        Fetch urls
+        Assumes localpath was called first
+        """
+
+        cmd = 'aws s3 cp s3://%s%s %s' % (ud.host, ud.path, ud.localpath)
+        bb.fetch2.check_network_access(d, cmd, ud.url)
+        runfetchcmd(cmd, d)
+
+        # Additional sanity checks copied from the wget class (although there
+        # are no known issues which mean these are required, treat the aws cli
+        # tool with a little healthy suspicion).
+
+        if not os.path.exists(ud.localpath):
+            raise FetchError("The aws cp command returned success for s3://%s%s but %s doesn't exist?!" % 
(ud.host, ud.path, ud.localpath))
+
+        if os.path.getsize(ud.localpath) == 0:
+            os.remove(ud.localpath)
+            raise FetchError("The aws cp command for s3://%s%s resulted in a zero size file?! Deleting and 
failing since this isn't right." % (ud.host, ud.path))
+
+        return True
+
+    def checkstatus(self, fetch, ud, d):
+        """
+        Check the status of a URL
+        """
+
+        cmd = 'aws s3 ls s3://%s%s' % (ud.host, ud.path)
+        bb.fetch2.check_network_access(d, cmd, ud.url)
+        output = runfetchcmd(cmd, d)
+
+        # "aws s3 ls s3://mybucket/foo" will exit with success even if the file
+        # is not found, so check output of the command to confirm success.
+
+        if not output:
+            raise FetchError("The aws ls command for s3://%s%s gave empty output" % (ud.host, ud.path))
+
+        return True


[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]