github.com/vtorhonen/terraform@v0.9.0-beta2.0.20170307220345-5d894e4ffda7/builtin/providers/archive/data_source_archive_file.go (about) 1 package archive 2 3 import ( 4 "bytes" 5 "crypto/md5" 6 "crypto/sha1" 7 "crypto/sha256" 8 "encoding/base64" 9 "encoding/hex" 10 "fmt" 11 "io/ioutil" 12 "os" 13 "path" 14 15 "github.com/hashicorp/terraform/helper/hashcode" 16 "github.com/hashicorp/terraform/helper/schema" 17 ) 18 19 func dataSourceFile() *schema.Resource { 20 return &schema.Resource{ 21 Read: dataSourceFileRead, 22 23 Schema: map[string]*schema.Schema{ 24 "type": &schema.Schema{ 25 Type: schema.TypeString, 26 Required: true, 27 ForceNew: true, 28 }, 29 "source": &schema.Schema{ 30 Type: schema.TypeSet, 31 Optional: true, 32 Computed: true, 33 Elem: &schema.Resource{ 34 Schema: map[string]*schema.Schema{ 35 "content": &schema.Schema{ 36 Type: schema.TypeString, 37 Required: true, 38 ForceNew: true, 39 }, 40 "filename": &schema.Schema{ 41 Type: schema.TypeString, 42 Required: true, 43 ForceNew: true, 44 }, 45 }, 46 }, 47 ConflictsWith: []string{"source_file", "source_dir", "source_content", "source_content_filename"}, 48 Set: func(v interface{}) int { 49 var buf bytes.Buffer 50 m := v.(map[string]interface{}) 51 buf.WriteString(fmt.Sprintf("%s-", m["filename"].(string))) 52 buf.WriteString(fmt.Sprintf("%s-", m["content"].(string))) 53 return hashcode.String(buf.String()) 54 }, 55 }, 56 "source_content": &schema.Schema{ 57 Type: schema.TypeString, 58 Optional: true, 59 ForceNew: true, 60 ConflictsWith: []string{"source_file", "source_dir"}, 61 }, 62 "source_content_filename": &schema.Schema{ 63 Type: schema.TypeString, 64 Optional: true, 65 ForceNew: true, 66 ConflictsWith: []string{"source_file", "source_dir"}, 67 }, 68 "source_file": &schema.Schema{ 69 Type: schema.TypeString, 70 Optional: true, 71 ForceNew: true, 72 ConflictsWith: []string{"source_content", "source_content_filename", "source_dir"}, 73 }, 74 "source_dir": &schema.Schema{ 75 Type: schema.TypeString, 76 Optional: true, 77 ForceNew: true, 78 ConflictsWith: []string{"source_content", "source_content_filename", "source_file"}, 79 }, 80 "output_path": &schema.Schema{ 81 Type: schema.TypeString, 82 Required: true, 83 }, 84 "output_size": &schema.Schema{ 85 Type: schema.TypeInt, 86 Computed: true, 87 ForceNew: true, 88 }, 89 "output_sha": &schema.Schema{ 90 Type: schema.TypeString, 91 Computed: true, 92 ForceNew: true, 93 Description: "SHA1 checksum of output file", 94 }, 95 "output_base64sha256": &schema.Schema{ 96 Type: schema.TypeString, 97 Computed: true, 98 ForceNew: true, 99 Description: "Base64 Encoded SHA256 checksum of output file", 100 }, 101 "output_md5": &schema.Schema{ 102 Type: schema.TypeString, 103 Computed: true, 104 ForceNew: true, 105 Description: "MD5 of output file", 106 }, 107 }, 108 } 109 } 110 111 func dataSourceFileRead(d *schema.ResourceData, meta interface{}) error { 112 outputPath := d.Get("output_path").(string) 113 114 outputDirectory := path.Dir(outputPath) 115 if outputDirectory != "" { 116 if _, err := os.Stat(outputDirectory); err != nil { 117 if err := os.MkdirAll(outputDirectory, 0755); err != nil { 118 return err 119 } 120 } 121 } 122 123 if err := archive(d); err != nil { 124 return err 125 } 126 127 // Generate archived file stats 128 fi, err := os.Stat(outputPath) 129 if err != nil { 130 return err 131 } 132 133 sha1, base64sha256, md5, err := genFileShas(outputPath) 134 if err != nil { 135 136 return fmt.Errorf("could not generate file checksum sha256: %s", err) 137 } 138 d.Set("output_sha", sha1) 139 d.Set("output_base64sha256", base64sha256) 140 d.Set("output_md5", md5) 141 142 d.Set("output_size", fi.Size()) 143 d.SetId(d.Get("output_sha").(string)) 144 145 return nil 146 } 147 148 func archive(d *schema.ResourceData) error { 149 archiveType := d.Get("type").(string) 150 outputPath := d.Get("output_path").(string) 151 152 archiver := getArchiver(archiveType, outputPath) 153 if archiver == nil { 154 return fmt.Errorf("archive type not supported: %s", archiveType) 155 } 156 157 if dir, ok := d.GetOk("source_dir"); ok { 158 if err := archiver.ArchiveDir(dir.(string)); err != nil { 159 return fmt.Errorf("error archiving directory: %s", err) 160 } 161 } else if file, ok := d.GetOk("source_file"); ok { 162 if err := archiver.ArchiveFile(file.(string)); err != nil { 163 return fmt.Errorf("error archiving file: %s", err) 164 } 165 } else if filename, ok := d.GetOk("source_content_filename"); ok { 166 content := d.Get("source_content").(string) 167 if err := archiver.ArchiveContent([]byte(content), filename.(string)); err != nil { 168 return fmt.Errorf("error archiving content: %s", err) 169 } 170 } else if v, ok := d.GetOk("source"); ok { 171 vL := v.(*schema.Set).List() 172 content := make(map[string][]byte) 173 for _, v := range vL { 174 src := v.(map[string]interface{}) 175 content[src["filename"].(string)] = []byte(src["content"].(string)) 176 } 177 if err := archiver.ArchiveMultiple(content); err != nil { 178 return fmt.Errorf("error archiving content: %s", err) 179 } 180 } else { 181 return fmt.Errorf("one of 'source_dir', 'source_file', 'source_content_filename' must be specified") 182 } 183 return nil 184 } 185 186 func genFileShas(filename string) (string, string, string, error) { 187 data, err := ioutil.ReadFile(filename) 188 if err != nil { 189 return "", "", "", fmt.Errorf("could not compute file '%s' checksum: %s", filename, err) 190 } 191 h := sha1.New() 192 h.Write([]byte(data)) 193 sha1 := hex.EncodeToString(h.Sum(nil)) 194 195 h256 := sha256.New() 196 h256.Write([]byte(data)) 197 shaSum := h256.Sum(nil) 198 sha256base64 := base64.StdEncoding.EncodeToString(shaSum[:]) 199 200 md5 := md5.New() 201 md5.Write([]byte(data)) 202 md5Sum := hex.EncodeToString(md5.Sum(nil)) 203 204 return sha1, sha256base64, md5Sum, nil 205 }