github.com/Jeffail/benthos/v3@v3.65.0/lib/output/azure_blob_storage.go (about)

     1  package output
     2  
     3  import (
     4  	"github.com/Jeffail/benthos/v3/internal/docs"
     5  	"github.com/Jeffail/benthos/v3/lib/log"
     6  	"github.com/Jeffail/benthos/v3/lib/metrics"
     7  	"github.com/Jeffail/benthos/v3/lib/output/writer"
     8  	"github.com/Jeffail/benthos/v3/lib/types"
     9  )
    10  
    11  //------------------------------------------------------------------------------
    12  
    13  func init() {
    14  	Constructors[TypeAzureBlobStorage] = TypeSpec{
    15  		constructor: fromSimpleConstructor(NewAzureBlobStorage),
    16  		Status:      docs.StatusBeta,
    17  		Version:     "3.36.0",
    18  		Summary: `
    19  Sends message parts as objects to an Azure Blob Storage Account container. Each
    20  object is uploaded with the filename specified with the ` + "`container`" + `
    21  field.`,
    22  		Description: `
    23  Only one authentication method is required, ` + "`storage_connection_string`" + ` or ` + "`storage_account` and `storage_access_key`" + `. If both are set then the ` + "`storage_connection_string`" + ` is given priority.
    24  
    25  In order to have a different path for each object you should use function
    26  interpolations described [here](/docs/configuration/interpolation#bloblang-queries), which are
    27  calculated per message of a batch.`,
    28  		Async: true,
    29  		FieldSpecs: docs.FieldSpecs{
    30  			docs.FieldCommon(
    31  				"storage_account",
    32  				"The storage account to upload messages to. This field is ignored if `storage_connection_string` is set.",
    33  			),
    34  			docs.FieldCommon(
    35  				"storage_access_key",
    36  				"The storage account access key. This field is ignored if `storage_connection_string` is set.",
    37  			),
    38  			docs.FieldCommon(
    39  				"storage_sas_token",
    40  				"The storage account SAS token. This field is ignored if `storage_connection_string` or `storage_access_key` / `storage_sas_token` are set.",
    41  			).AtVersion("3.38.0"),
    42  			docs.FieldCommon(
    43  				"storage_connection_string",
    44  				"A storage account connection string. This field is required if `storage_account` and `storage_access_key` are not set.",
    45  			),
    46  			docs.FieldAdvanced("public_access_level", `The container's public access level. The default value is `+"`PRIVATE`"+`.`).HasOptions(
    47  				"PRIVATE", "BLOB", "CONTAINER",
    48  			),
    49  			docs.FieldCommon(
    50  				"container", "The container for uploading the messages to.",
    51  				`messages-${!timestamp("2006")}`,
    52  			).IsInterpolated(),
    53  			docs.FieldCommon(
    54  				"path", "The path of each message to upload.",
    55  				`${!count("files")}-${!timestamp_unix_nano()}.json`,
    56  				`${!meta("kafka_key")}.json`,
    57  				`${!json("doc.namespace")}/${!json("doc.id")}.json`,
    58  			).IsInterpolated(),
    59  			docs.FieldAdvanced("blob_type", "Block and Append blobs are comprised of blocks, and each blob can support up to 50,000 blocks. The default value is `+\"`BLOCK`\"+`.`").HasOptions(
    60  				"BLOCK", "APPEND",
    61  			).IsInterpolated(),
    62  			docs.FieldCommon("max_in_flight", "The maximum number of messages to have in flight at a given time. Increase this to improve throughput."),
    63  		},
    64  		Categories: []Category{
    65  			CategoryServices,
    66  			CategoryAzure,
    67  		},
    68  	}
    69  
    70  	Constructors[TypeBlobStorage] = TypeSpec{
    71  		constructor: fromSimpleConstructor(newDeprecatedBlobStorage),
    72  		Status:      docs.StatusDeprecated,
    73  		Summary:     "This component has been renamed to [`azure_blob_storage`](/docs/components/outputs/azure_blob_storage).",
    74  		Async:       true,
    75  		FieldSpecs: docs.FieldSpecs{
    76  			docs.FieldCommon(
    77  				"storage_account",
    78  				"The storage account to upload messages to. This field is ignored if `storage_connection_string` is set.",
    79  			),
    80  			docs.FieldCommon(
    81  				"storage_access_key",
    82  				"The storage account access key. This field is ignored if `storage_connection_string` is set.",
    83  			),
    84  			docs.FieldCommon(
    85  				"storage_sas_token",
    86  				"The storage account SAS token. This field is ignored if `storage_connection_string` or `storage_access_key` are set.",
    87  			),
    88  			docs.FieldCommon(
    89  				"storage_connection_string",
    90  				"A storage account connection string. This field is required if `storage_account` and `storage_access_key` / `storage_sas_token` are not set.",
    91  			),
    92  			docs.FieldAdvanced("public_access_level", `The container's public access level. The default value is `+"`PRIVATE`"+`.`).HasOptions(
    93  				"PRIVATE", "BLOB", "CONTAINER",
    94  			),
    95  			docs.FieldCommon(
    96  				"container", "The container for uploading the messages to.",
    97  				`messages-${!timestamp("2006")}`,
    98  			).IsInterpolated(),
    99  			docs.FieldCommon(
   100  				"path", "The path of each message to upload.",
   101  				`${!count("files")}-${!timestamp_unix_nano()}.json`,
   102  				`${!meta("kafka_key")}.json`,
   103  				`${!json("doc.namespace")}/${!json("doc.id")}.json`,
   104  			).IsInterpolated(),
   105  			docs.FieldAdvanced("blob_type", "Block and Append blobs are comprised of blocks, and each blob can support up to 50,000 blocks. The default value is `+\"`BLOCK`\"+`.`").HasOptions(
   106  				"BLOCK", "APPEND",
   107  			).IsInterpolated(),
   108  			docs.FieldCommon("max_in_flight", "The maximum number of messages to have in flight at a given time. Increase this to improve throughput."),
   109  		},
   110  		Categories: []Category{
   111  			CategoryServices,
   112  			CategoryAzure,
   113  		},
   114  	}
   115  }
   116  
   117  //------------------------------------------------------------------------------
   118  
   119  // NewAzureBlobStorage creates a new AzureBlobStorage output type.
   120  func NewAzureBlobStorage(conf Config, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) {
   121  	blobStorage, err := writer.NewAzureBlobStorage(conf.AzureBlobStorage, log, stats)
   122  	if err != nil {
   123  		return nil, err
   124  	}
   125  	a, err := NewAsyncWriter(
   126  		TypeAzureBlobStorage, conf.AzureBlobStorage.MaxInFlight, blobStorage, log, stats,
   127  	)
   128  	if err != nil {
   129  		return nil, err
   130  	}
   131  	return OnlySinglePayloads(a), nil
   132  }
   133  
   134  func newDeprecatedBlobStorage(conf Config, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) {
   135  	blobStorage, err := writer.NewAzureBlobStorage(conf.BlobStorage, log, stats)
   136  	if err != nil {
   137  		return nil, err
   138  	}
   139  	if conf.BlobStorage.MaxInFlight == 1 {
   140  		return NewWriter(
   141  			TypeBlobStorage, blobStorage, log, stats,
   142  		)
   143  	}
   144  	return NewAsyncWriter(
   145  		TypeBlobStorage, conf.BlobStorage.MaxInFlight, blobStorage, log, stats,
   146  	)
   147  }
   148  
   149  //------------------------------------------------------------------------------