github.com/StackPointCloud/packer@v0.10.2-0.20180716202532-b28098e0f79b/post-processor/compress/post-processor.go (about)

     1  package compress
     2  
     3  import (
     4  	"archive/tar"
     5  	"archive/zip"
     6  	"compress/gzip"
     7  	"fmt"
     8  	"io"
     9  	"os"
    10  	"path/filepath"
    11  	"regexp"
    12  	"runtime"
    13  
    14  	"github.com/biogo/hts/bgzf"
    15  	"github.com/hashicorp/packer/common"
    16  	"github.com/hashicorp/packer/helper/config"
    17  	"github.com/hashicorp/packer/packer"
    18  	"github.com/hashicorp/packer/template/interpolate"
    19  	"github.com/klauspost/pgzip"
    20  	"github.com/pierrec/lz4"
    21  )
    22  
    23  var (
    24  	// ErrInvalidCompressionLevel is returned when the compression level passed
    25  	// to gzip is not in the expected range. See compress/flate for details.
    26  	ErrInvalidCompressionLevel = fmt.Errorf(
    27  		"Invalid compression level. Expected an integer from -1 to 9.")
    28  
    29  	ErrWrongInputCount = fmt.Errorf(
    30  		"Can only have 1 input file when not using tar/zip")
    31  
    32  	filenamePattern = regexp.MustCompile(`(?:\.([a-z0-9]+))`)
    33  )
    34  
    35  type Config struct {
    36  	common.PackerConfig `mapstructure:",squash"`
    37  
    38  	// Fields from config file
    39  	OutputPath        string `mapstructure:"output"`
    40  	Format            string `mapstructure:"format"`
    41  	CompressionLevel  int    `mapstructure:"compression_level"`
    42  	KeepInputArtifact bool   `mapstructure:"keep_input_artifact"`
    43  
    44  	// Derived fields
    45  	Archive   string
    46  	Algorithm string
    47  
    48  	ctx interpolate.Context
    49  }
    50  
    51  type PostProcessor struct {
    52  	config Config
    53  }
    54  
    55  func (p *PostProcessor) Configure(raws ...interface{}) error {
    56  	err := config.Decode(&p.config, &config.DecodeOpts{
    57  		Interpolate:        true,
    58  		InterpolateContext: &p.config.ctx,
    59  		InterpolateFilter: &interpolate.RenderFilter{
    60  			Exclude: []string{"output"},
    61  		},
    62  	}, raws...)
    63  	if err != nil {
    64  		return err
    65  	}
    66  
    67  	errs := new(packer.MultiError)
    68  
    69  	// If there is no explicit number of Go threads to use, then set it
    70  	if os.Getenv("GOMAXPROCS") == "" {
    71  		runtime.GOMAXPROCS(runtime.NumCPU())
    72  	}
    73  
    74  	if p.config.OutputPath == "" {
    75  		p.config.OutputPath = "packer_{{.BuildName}}_{{.BuilderType}}"
    76  	}
    77  
    78  	if p.config.CompressionLevel > pgzip.BestCompression {
    79  		p.config.CompressionLevel = pgzip.BestCompression
    80  	}
    81  	// Technically 0 means "don't compress" but I don't know how to
    82  	// differentiate between "user entered zero" and "user entered nothing".
    83  	// Also, why bother creating a compressed file with zero compression?
    84  	if p.config.CompressionLevel == -1 || p.config.CompressionLevel == 0 {
    85  		p.config.CompressionLevel = pgzip.DefaultCompression
    86  	}
    87  
    88  	if err = interpolate.Validate(p.config.OutputPath, &p.config.ctx); err != nil {
    89  		errs = packer.MultiErrorAppend(
    90  			errs, fmt.Errorf("Error parsing target template: %s", err))
    91  	}
    92  
    93  	p.config.detectFromFilename()
    94  
    95  	if len(errs.Errors) > 0 {
    96  		return errs
    97  	}
    98  
    99  	return nil
   100  }
   101  
   102  func (p *PostProcessor) PostProcess(ui packer.Ui, artifact packer.Artifact) (packer.Artifact, bool, error) {
   103  
   104  	// These are extra variables that will be made available for interpolation.
   105  	p.config.ctx.Data = map[string]string{
   106  		"BuildName":   p.config.PackerBuildName,
   107  		"BuilderType": p.config.PackerBuilderType,
   108  	}
   109  
   110  	target, err := interpolate.Render(p.config.OutputPath, &p.config.ctx)
   111  	if err != nil {
   112  		return nil, false, fmt.Errorf("Error interpolating output value: %s", err)
   113  	} else {
   114  		fmt.Println(target)
   115  	}
   116  
   117  	keep := p.config.KeepInputArtifact
   118  	newArtifact := &Artifact{Path: target}
   119  
   120  	if err = os.MkdirAll(filepath.Dir(target), os.FileMode(0755)); err != nil {
   121  		return nil, false, fmt.Errorf(
   122  			"Unable to create dir for archive %s: %s", target, err)
   123  	}
   124  	outputFile, err := os.Create(target)
   125  	if err != nil {
   126  		return nil, false, fmt.Errorf(
   127  			"Unable to create archive %s: %s", target, err)
   128  	}
   129  	defer outputFile.Close()
   130  
   131  	// Setup output interface. If we're using compression, output is a
   132  	// compression writer. Otherwise it's just a file.
   133  	var output io.WriteCloser
   134  	switch p.config.Algorithm {
   135  	case "bgzf":
   136  		ui.Say(fmt.Sprintf("Using bgzf compression with %d cores for %s",
   137  			runtime.GOMAXPROCS(-1), target))
   138  		output, err = makeBGZFWriter(outputFile, p.config.CompressionLevel)
   139  		defer output.Close()
   140  	case "lz4":
   141  		ui.Say(fmt.Sprintf("Using lz4 compression with %d cores for %s",
   142  			runtime.GOMAXPROCS(-1), target))
   143  		output, err = makeLZ4Writer(outputFile, p.config.CompressionLevel)
   144  		defer output.Close()
   145  	case "pgzip":
   146  		ui.Say(fmt.Sprintf("Using pgzip compression with %d cores for %s",
   147  			runtime.GOMAXPROCS(-1), target))
   148  		output, err = makePgzipWriter(outputFile, p.config.CompressionLevel)
   149  		defer output.Close()
   150  	default:
   151  		output = outputFile
   152  	}
   153  
   154  	compression := p.config.Algorithm
   155  	if compression == "" {
   156  		compression = "no compression"
   157  	}
   158  
   159  	// Build an archive, if we're supposed to do that.
   160  	switch p.config.Archive {
   161  	case "tar":
   162  		ui.Say(fmt.Sprintf("Tarring %s with %s", target, compression))
   163  		err = createTarArchive(artifact.Files(), output)
   164  		if err != nil {
   165  			return nil, keep, fmt.Errorf("Error creating tar: %s", err)
   166  		}
   167  	case "zip":
   168  		ui.Say(fmt.Sprintf("Zipping %s", target))
   169  		err = createZipArchive(artifact.Files(), output)
   170  		if err != nil {
   171  			return nil, keep, fmt.Errorf("Error creating zip: %s", err)
   172  		}
   173  	default:
   174  		// Filename indicates no tarball (just compress) so we'll do an io.Copy
   175  		// into our compressor.
   176  		if len(artifact.Files()) != 1 {
   177  			return nil, keep, fmt.Errorf(
   178  				"Can only have 1 input file when not using tar/zip. Found %d "+
   179  					"files: %v", len(artifact.Files()), artifact.Files())
   180  		}
   181  		archiveFile := artifact.Files()[0]
   182  		ui.Say(fmt.Sprintf("Archiving %s with %s", archiveFile, compression))
   183  
   184  		source, err := os.Open(archiveFile)
   185  		if err != nil {
   186  			return nil, keep, fmt.Errorf(
   187  				"Failed to open source file %s for reading: %s",
   188  				archiveFile, err)
   189  		}
   190  		defer source.Close()
   191  
   192  		if _, err = io.Copy(output, source); err != nil {
   193  			return nil, keep, fmt.Errorf("Failed to compress %s: %s",
   194  				archiveFile, err)
   195  		}
   196  	}
   197  
   198  	ui.Say(fmt.Sprintf("Archive %s completed", target))
   199  
   200  	return newArtifact, keep, nil
   201  }
   202  
   203  func (config *Config) detectFromFilename() {
   204  	var result [][]string
   205  
   206  	extensions := map[string]string{
   207  		"tar":  "tar",
   208  		"zip":  "zip",
   209  		"gz":   "pgzip",
   210  		"lz4":  "lz4",
   211  		"bgzf": "bgzf",
   212  	}
   213  
   214  	if config.Format == "" {
   215  		result = filenamePattern.FindAllStringSubmatch(config.OutputPath, -1)
   216  	} else {
   217  		result = filenamePattern.FindAllStringSubmatch(fmt.Sprintf("%s.%s", config.OutputPath, config.Format), -1)
   218  	}
   219  
   220  	// No dots. Bail out with defaults.
   221  	if len(result) == 0 {
   222  		config.Algorithm = "pgzip"
   223  		config.Archive = "tar"
   224  		return
   225  	}
   226  
   227  	// Parse the last two .groups, if they're there
   228  	lastItem := result[len(result)-1][1]
   229  	var nextToLastItem string
   230  	if len(result) == 1 {
   231  		nextToLastItem = ""
   232  	} else {
   233  		nextToLastItem = result[len(result)-2][1]
   234  	}
   235  
   236  	// Should we make an archive? E.g. tar or zip?
   237  	if nextToLastItem == "tar" {
   238  		config.Archive = "tar"
   239  	}
   240  	if lastItem == "zip" || lastItem == "tar" {
   241  		config.Archive = lastItem
   242  		// Tar or zip is our final artifact. Bail out.
   243  		return
   244  	}
   245  
   246  	// Should we compress the artifact?
   247  	algorithm, ok := extensions[lastItem]
   248  	if ok {
   249  		config.Algorithm = algorithm
   250  		// We found our compression algorithm. Bail out.
   251  		return
   252  	}
   253  
   254  	// We didn't match a known compression format. Default to tar + pgzip
   255  	config.Algorithm = "pgzip"
   256  	config.Archive = "tar"
   257  	return
   258  }
   259  
   260  func makeBGZFWriter(output io.WriteCloser, compressionLevel int) (io.WriteCloser, error) {
   261  	bgzfWriter, err := bgzf.NewWriterLevel(output, compressionLevel, runtime.GOMAXPROCS(-1))
   262  	if err != nil {
   263  		return nil, ErrInvalidCompressionLevel
   264  	}
   265  	return bgzfWriter, nil
   266  }
   267  
   268  func makeLZ4Writer(output io.WriteCloser, compressionLevel int) (io.WriteCloser, error) {
   269  	lzwriter := lz4.NewWriter(output)
   270  	if compressionLevel > gzip.DefaultCompression {
   271  		lzwriter.Header.HighCompression = true
   272  	}
   273  	return lzwriter, nil
   274  }
   275  
   276  func makePgzipWriter(output io.WriteCloser, compressionLevel int) (io.WriteCloser, error) {
   277  	gzipWriter, err := pgzip.NewWriterLevel(output, compressionLevel)
   278  	if err != nil {
   279  		return nil, ErrInvalidCompressionLevel
   280  	}
   281  	gzipWriter.SetConcurrency(500000, runtime.GOMAXPROCS(-1))
   282  	return gzipWriter, nil
   283  }
   284  
   285  func createTarArchive(files []string, output io.WriteCloser) error {
   286  	archive := tar.NewWriter(output)
   287  	defer archive.Close()
   288  
   289  	for _, path := range files {
   290  		file, err := os.Open(path)
   291  		if err != nil {
   292  			return fmt.Errorf("Unable to read file %s: %s", path, err)
   293  		}
   294  		defer file.Close()
   295  
   296  		fi, err := file.Stat()
   297  		if err != nil {
   298  			return fmt.Errorf("Unable to get fileinfo for %s: %s", path, err)
   299  		}
   300  
   301  		header, err := tar.FileInfoHeader(fi, path)
   302  		if err != nil {
   303  			return fmt.Errorf("Failed to create tar header for %s: %s", path, err)
   304  		}
   305  
   306  		// workaround for archive format on go >=1.10
   307  		setHeaderFormat(header)
   308  
   309  		if err := archive.WriteHeader(header); err != nil {
   310  			return fmt.Errorf("Failed to write tar header for %s: %s", path, err)
   311  		}
   312  
   313  		if _, err := io.Copy(archive, file); err != nil {
   314  			return fmt.Errorf("Failed to copy %s data to archive: %s", path, err)
   315  		}
   316  	}
   317  	return nil
   318  }
   319  
   320  func createZipArchive(files []string, output io.WriteCloser) error {
   321  	archive := zip.NewWriter(output)
   322  	defer archive.Close()
   323  
   324  	for _, path := range files {
   325  		path = filepath.ToSlash(path)
   326  
   327  		source, err := os.Open(path)
   328  		if err != nil {
   329  			return fmt.Errorf("Unable to read file %s: %s", path, err)
   330  		}
   331  		defer source.Close()
   332  
   333  		target, err := archive.Create(path)
   334  		if err != nil {
   335  			return fmt.Errorf("Failed to add zip header for %s: %s", path, err)
   336  		}
   337  
   338  		_, err = io.Copy(target, source)
   339  		if err != nil {
   340  			return fmt.Errorf("Failed to copy %s data to archive: %s", path, err)
   341  		}
   342  	}
   343  	return nil
   344  }