github.com/1aal/kubeblocks@v0.0.0-20231107070852-e1c03e598921/pkg/cli/cmd/plugin/download/download.go (about)

     1  // Copyright 2019 The Kubernetes Authors.
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //      http://www.apache.org/licenses/LICENSE-2.0
     8  //
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  // See the License for the specific language governing permissions and
    13  // limitations under the License.
    14  
    15  package download
    16  
    17  import (
    18  	"archive/tar"
    19  	"archive/zip"
    20  	"bytes"
    21  	"compress/gzip"
    22  	"io"
    23  	"net/http"
    24  	"os"
    25  	"path/filepath"
    26  	"strings"
    27  
    28  	"github.com/pkg/errors"
    29  	"k8s.io/klog/v2"
    30  )
    31  
    32  // download gets a file from the internet in memory and writes it content
    33  // to a Verifier.
    34  func download(url string, verifier Verifier, fetcher Fetcher) (io.ReaderAt, int64, error) {
    35  	body, err := fetcher.Get(url)
    36  	if err != nil {
    37  		return nil, 0, errors.Wrapf(err, "failed to obtain plugin archive")
    38  	}
    39  	defer body.Close()
    40  
    41  	klog.V(3).Infof("Reading archive file into memory")
    42  	data, err := io.ReadAll(io.TeeReader(body, verifier))
    43  	if err != nil {
    44  		return nil, 0, errors.Wrap(err, "could not read archive")
    45  	}
    46  	klog.V(2).Infof("Read %d bytes from archive into memory", len(data))
    47  
    48  	return bytes.NewReader(data), int64(len(data)), verifier.Verify()
    49  }
    50  
    51  // extractZIP extracts a zip file into the target directory.
    52  func extractZIP(targetDir string, read io.ReaderAt, size int64) error {
    53  	klog.V(4).Infof("Extracting zip archive to %q", targetDir)
    54  	zipReader, err := zip.NewReader(read, size)
    55  	if err != nil {
    56  		return err
    57  	}
    58  
    59  	for _, f := range zipReader.File {
    60  		if err := suspiciousPath(f.Name); err != nil {
    61  			return err
    62  		}
    63  
    64  		path := filepath.Join(targetDir, filepath.FromSlash(f.Name))
    65  		if f.FileInfo().IsDir() {
    66  			if err := os.MkdirAll(path, f.Mode()); err != nil {
    67  				return errors.Wrap(err, "can't create directory tree")
    68  			}
    69  			continue
    70  		}
    71  
    72  		dir := filepath.Dir(path)
    73  		klog.V(4).Infof("zip: ensuring parent dirs exist for regular file, dir=%s", dir)
    74  		if err := os.MkdirAll(dir, 0o755); err != nil {
    75  			return errors.Wrap(err, "failed to create directory for zip entry")
    76  		}
    77  		src, err := f.Open()
    78  		if err != nil {
    79  			return errors.Wrap(err, "could not open inflating zip file")
    80  		}
    81  
    82  		dst, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, f.Mode())
    83  		if err != nil {
    84  			src.Close()
    85  			return errors.Wrap(err, "can't create file in zip destination dir")
    86  		}
    87  		closeAll := func() {
    88  			src.Close()
    89  			dst.Close()
    90  		}
    91  
    92  		if _, err := io.Copy(dst, src); err != nil {
    93  			closeAll()
    94  			return errors.Wrap(err, "can't copy content to zip destination file")
    95  		}
    96  		closeAll()
    97  	}
    98  
    99  	return nil
   100  }
   101  
   102  // extractTARGZ extracts a gzipped tar file into the target directory.
   103  func extractTARGZ(targetDir string, at io.ReaderAt, size int64) error {
   104  	klog.V(4).Infof("tar: extracting to %q", targetDir)
   105  	in := io.NewSectionReader(at, 0, size)
   106  
   107  	gzr, err := gzip.NewReader(in)
   108  	if err != nil {
   109  		return errors.Wrap(err, "failed to create gzip reader")
   110  	}
   111  	defer gzr.Close()
   112  
   113  	tr := tar.NewReader(gzr)
   114  	for {
   115  		hdr, err := tr.Next()
   116  		if err == io.EOF {
   117  			break
   118  		}
   119  		if err != nil {
   120  			return errors.Wrap(err, "tar extraction error")
   121  		}
   122  		klog.V(4).Infof("tar: processing %q (type=%d, mode=%s)", hdr.Name, hdr.Typeflag, os.FileMode(hdr.Mode))
   123  		// see https://golang.org/cl/78355 for handling pax_global_header
   124  		if hdr.Name == "pax_global_header" {
   125  			klog.V(4).Infof("tar: skipping pax_global_header file")
   126  			continue
   127  		}
   128  
   129  		if err := suspiciousPath(hdr.Name); err != nil {
   130  			return err
   131  		}
   132  
   133  		path := filepath.Join(targetDir, filepath.FromSlash(hdr.Name))
   134  		switch hdr.Typeflag {
   135  		case tar.TypeDir:
   136  			if err := os.MkdirAll(path, os.FileMode(hdr.Mode)); err != nil {
   137  				return errors.Wrap(err, "failed to create directory from tar")
   138  			}
   139  		case tar.TypeReg:
   140  			dir := filepath.Dir(path)
   141  			klog.V(4).Infof("tar: ensuring parent dirs exist for regular file, dir=%s", dir)
   142  			if err := os.MkdirAll(dir, 0o755); err != nil {
   143  				return errors.Wrap(err, "failed to create directory for tar")
   144  			}
   145  			f, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY, os.FileMode(hdr.Mode))
   146  			if err != nil {
   147  				return errors.Wrapf(err, "failed to create file %q", path)
   148  			}
   149  
   150  			if _, err := io.Copy(f, tr); err != nil {
   151  				f.Close()
   152  				return errors.Wrapf(err, "failed to copy %q from tar into file", hdr.Name)
   153  			}
   154  			f.Close()
   155  		default:
   156  			return errors.Errorf("unable to handle file type %d for %q in tar", hdr.Typeflag, hdr.Name)
   157  		}
   158  		klog.V(4).Infof("tar: processed %q", hdr.Name)
   159  	}
   160  	klog.V(4).Infof("tar extraction to %s complete", targetDir)
   161  	return nil
   162  }
   163  
   164  func suspiciousPath(path string) error {
   165  	if strings.Contains(path, "..") {
   166  		return errors.Errorf("refusing to unpack archive with suspicious entry %q", path)
   167  	}
   168  
   169  	if strings.HasPrefix(path, `/`) || strings.HasPrefix(path, `\`) {
   170  		return errors.Errorf("refusing to unpack archive with absolute entry %q", path)
   171  	}
   172  
   173  	return nil
   174  }
   175  
   176  func detectMIMEType(at io.ReaderAt) (string, error) {
   177  	buf := make([]byte, 512)
   178  	n, err := at.ReadAt(buf, 0)
   179  	if err != nil && err != io.EOF {
   180  		return "", errors.Wrap(err, "failed to read first 512 bytes")
   181  	}
   182  	if n < 512 {
   183  		klog.V(5).Infof("Only read %d of 512 bytes to determine the file type", n)
   184  	}
   185  
   186  	// Cut off mime extra info beginning with ';' i.e:
   187  	// "text/plain; charset=utf-8" should result in "text/plain".
   188  	return strings.Split(http.DetectContentType(buf[:n]), ";")[0], nil
   189  }
   190  
   191  type extractor func(targetDir string, read io.ReaderAt, size int64) error
   192  
   193  var defaultExtractors = map[string]extractor{
   194  	"application/zip":    extractZIP,
   195  	"application/x-gzip": extractTARGZ,
   196  }
   197  
   198  func extractArchive(dst string, at io.ReaderAt, size int64) error {
   199  	t, err := detectMIMEType(at)
   200  	if err != nil {
   201  		return errors.Wrap(err, "failed to determine content type")
   202  	}
   203  	klog.V(4).Infof("detected %q file type", t)
   204  	exf, ok := defaultExtractors[t]
   205  	if !ok {
   206  		return errors.Errorf("mime type %q for archive file is not a supported archive format", t)
   207  	}
   208  	return errors.Wrap(exf(dst, at, size), "failed to extract file")
   209  }
   210  
   211  // Downloader is responsible for fetching, verifying and extracting a binary.
   212  type Downloader struct {
   213  	verifier Verifier
   214  	fetcher  Fetcher
   215  }
   216  
   217  // NewDownloader builds a new Downloader.
   218  func NewDownloader(v Verifier, f Fetcher) Downloader {
   219  	return Downloader{
   220  		verifier: v,
   221  		fetcher:  f,
   222  	}
   223  }
   224  
   225  // Get pulls the uri and verifies it. On success, the download gets extracted
   226  // into dst.
   227  func (d Downloader) Get(uri, dst string) error {
   228  	body, size, err := download(uri, d.verifier, d.fetcher)
   229  	if err != nil {
   230  		return err
   231  	}
   232  	return extractArchive(dst, body, size)
   233  }
   234  
   235  // DownloadAndExtract downloads the specified archive uri (or uses the provided overrideFile, if a non-empty value)
   236  // while validating its checksum with the provided sha256sum, and extracts its contents to extractDir that must be.
   237  // created.
   238  func DownloadAndExtract(extractDir, uri, sha256sum, overrideFile string) error {
   239  	var fetcher Fetcher = HTTPFetcher{}
   240  	if overrideFile != "" {
   241  		fetcher = NewFileFetcher(overrideFile)
   242  	}
   243  
   244  	verifier := NewSha256Verifier(sha256sum)
   245  	err := NewDownloader(verifier, fetcher).Get(uri, extractDir)
   246  	return errors.Wrap(err, "failed to unpack the plugin archive")
   247  }