github.com/haraldrudell/parl@v0.4.176/nb-chan.go (about)

     1  /*
     2  © 2022–present Harald Rudell <harald.rudell@gmail.com> (https://haraldrudell.github.io/haraldrudell/)
     3  ISC License
     4  */
     5  
     6  package parl
     7  
     8  import (
     9  	"sync"
    10  	"sync/atomic"
    11  
    12  	"github.com/haraldrudell/parl/perrors"
    13  )
    14  
    15  const (
    16  	// T elements default allocation size for buffers unless specified
    17  	defaultNBChanSize = 10
    18  )
    19  
    20  // NBChan is a non-blocking send, unbound-queue channel.
    21  //   - NBChan behaves like a channel and a thread-safe slice
    22  //   - — efficiency of sending and receiving multiple items at once
    23  //   - — ability to wait for items to become available
    24  //   - NBChan is initialization-free, thread-safe, idempotent and observable with panic-free and/or deferrable methods
    25  //   - values are sent non-blocking, panic free and error free using:
    26  //   - — [NBChan.Send] for single item
    27  //   - — [NBChan.SendMany] for any number of items
    28  //   - values are received from NBChan via:
    29  //   - — a Go receive channel returned by [NBChan.Ch] or
    30  //   - — fetched all, one or many at once using [NBChan.Get]
    31  //   - — for Get, values can be awaited using [NBChan.DataWaitCh]
    32  //   - [NBChanThreadType] provided to [NewNBChan] configures for performance:
    33  //   - — [NBChanNone]: highest throughput at lowest cpu load
    34  //   - — — cost is no channel sending values, ie. Ch is not available
    35  //   - — — only way to receive items is [NBChan.Get]
    36  //   - — — Get returns any number of items at once
    37  //   - — — wait is by [NBChan.DataWaitCh]
    38  //   - — — benefit is no thread
    39  //   - — [NBChanAlways] is higher throughput than regular thread
    40  //   - — — cost is thread is always running
    41  //   - — — Ch Get and DataWaitCh are all available
    42  //   - — with regular thread or [NBChanAlways]:
    43  //   - — — [NBChan.Ch] offers wait with channel receive
    44  //   - — — [NBChan.DataWaitCh] offers wait for data available
    45  //   - low-allocation throughput can be obtained by using NBChan to handle
    46  //     slices of items []Value. NBChan can then operate at near zero allocations.
    47  //   - NBChan has deferrable, panic-free, observable, idempotent close.
    48  //     The underlying channel is closed when:
    49  //   - — [NBChan.Close] is invoked and the channel is read to empty, ie.deferred close
    50  //   - — [NBChan.CloseNow] is invoked
    51  //   - NBChan is observable:
    52  //   - — [NBCHan.DidClose] indicates whether Close or CloseNow has been invoked
    53  //   - — [NBChan.IsClosed] indicates whether the underlying channel has closed
    54  //   - — [NBChan.WaitForClose] is deferrable and panic-free and waits until the underlying channel has been closed.
    55  //   - — [NBChan.WaitForCloseCh] returns a channel that closes when the underlying channel closes
    56  //   - NBChan is designed for error-free operation and only has panics and close errrors. All errors can be collected via:
    57  //   - — [NBChan.CloseNow] [NBChan.WaitForClose] or [NBChan.GetError]
    58  //   - NBChan has contention-separation between Send/SendMany and Get
    59  //   - NBChan used as an error channel avoids the sending thread blocking
    60  //     from a delayed or missing reader.
    61  //   - see also:
    62  //   - — [AwaitableSlice] unbound awaitable queue
    63  //   - — preferred for any situation, has parl.AwaitableCh receive and
    64  //     lacks close mechanic
    65  //   - — [NBRareChan] low-usage unbound channel
    66  //   - — use for channel receive of data when rarely used, ie.
    67  //     an error channel
    68  //
    69  // Usage:
    70  //
    71  //	var errCh parl.NBChan[error]
    72  //	go thread(&errCh)
    73  //	err, ok := <-errCh.Ch()
    74  //	errCh.WaitForClose()
    75  //	errCh.GetError()
    76  //	…
    77  //	func thread(errCh *parl.NBChan[error]) {
    78  //	defer errCh.Close() // non-blocking close effective on send complete
    79  //	var err error
    80  //	defer parl.Recover(parl."", &err, errCh.AddErrorProc)
    81  //	errCh.Ch() <- err // non-blocking
    82  //	if err = someFunc(); err != nil {
    83  //	err = perrors.Errorf("someFunc: %w", err)
    84  //	return
    85  //
    86  // Deprecated: NBChan is replaced by [github.com/haraldrudell/parl.AwaitableSlice] for performance and
    87  // efficiency reasons. [github.com/haraldrudell/parl.ErrSlice] is an error container implementation
    88  type NBChan[T any] struct {
    89  	// [NBChan.Ch] returns this channel allowing consumers to await data items one at a time
    90  	//	- NBChan must be configured to have thread
    91  	closableChan ClosableChan[T]
    92  	// size to use for [NBChan.newQueue]
    93  	allocationSize atomic.Uint64
    94  	// number of items held by NBChan, updated at any time
    95  	//	- [NBChan.sendThread] may hold one item
    96  	//	- only incremented by Send SendMany while holding
    97  	//		inputLock appending to input queue or handing value to thread.
    98  	//		Increment may be delegated to always-thread
    99  	//	- decreased by Get when removing from output buffer while
   100  	//		holding outputLock
   101  	//	- decremented by send-thread when value sent on channel
   102  	//	- decremented by send-thread when detecting CloseNow
   103  	//	- set to zero by CloseNow while holding outputLock
   104  	//	- a on-demand thread or deferred-close always-thread may
   105  	//		exit on observing unsent count zero
   106  	unsentCount atomic.Uint64
   107  	// number of pending [NBChan.Get] invocations
   108  	//	- blocks [NBChan.sendThread] from fetching more values
   109  	//	- awaitable via getsWait
   110  	gets atomic.Uint64
   111  	// holds thread waiting while gets > 0
   112  	//	- [NBChan.CloseNow] uses getsWait to await Get conclusion
   113  	//	- executeChClose uses getsWait to await Get conclusion
   114  	//	- thread uses getsWait to reduce outputLock contention by
   115  	//		not retrieving values while Get invocations in progress or holding at lock
   116  	getsWait PeriodWaiter
   117  	// number of pending [NBChan.Send] [NBChan.SendMany] invocations
   118  	//	- awaitable via sendsWait
   119  	sends atomic.Uint64
   120  	// prevents thread from exiting while Send SendMany active
   121  	//	- [NBChan.CloseNow] uses sendsWait to await Send SendMany conclusion
   122  	//	- executeChClose uses sendsWait to await Send SendMany conclusion
   123  	//	- thread uses sendsWait to await the conclusion of possible Send SendMany before
   124  	//		checking for another item
   125  	sendsWait PeriodWaiter
   126  	// capacity of [NBChan.inputQueue]
   127  	//	- written behind inputLock
   128  	inputCapacity atomic.Uint64
   129  	// capacity of [NBChan.outputQueue]
   130  	//	- written behind outputLock
   131  	outputCapacity atomic.Uint64
   132  	// indicates threadless NBChan
   133  	//	- from [NBChanNone] or [NBChan.SetNoThread]
   134  	//	- [NBChan.Ch] is unavailable
   135  	//	- [NBChan.DataWaitCh] is used for wait
   136  	isNoThread atomic.Bool
   137  	// a channel that closes when data is available
   138  	dataWaitCh atomic.Pointer[chan struct{}]
   139  	// makes data channel wait operations executing serially
   140  	availableLock sync.Mutex
   141  	// written behind availableLock
   142  	isDataAvailable atomic.Bool
   143  	// indicates thread always running, ie. no on-demand
   144  	//	- from [NBChanAlways] or [NBChan.SetAlwaysThread]
   145  	isOnDemandThread atomic.Bool
   146  	// wait mechanic with value for always-thread alert-wait
   147  	//	- used in two-chan send with threadCh2
   148  	alertChan LacyChan[*T]
   149  	// second channel for two-chan send with threadCh
   150  	alertChan2       LacyChan[struct{}]
   151  	alertChan2Active atomic.Pointer[chan struct{}]
   152  	// tcAlertActive ensures one alert action per alert wait
   153  	//	- sendThreadWaitForAlert: sets to true while awaiting
   154  	//	- tcAlertThread: picks winner for alerting
   155  	//	- two-chan send with threadCh threadCh2
   156  	tcAlertActive atomic.Bool
   157  	// true if a thread was ever launched
   158  	//	- [NBChan.ThreadStatus] uses tcDidLaunchThread to distinguish between
   159  	//		NBChanNoLaunch and NBChanRunning
   160  	tcDidLaunchThread atomic.Bool
   161  	// tcThreadLock atomizes tcRunningThread with other actions:
   162  	//	- tcThreadLock enforces order so that no thread is created after CloseNow
   163  	//	- tcStartThreadWinner: atomize isCloseNow detection with setting tcRunningThread to true
   164  	//	- selectCloseNowWinner: atomize tcRunningThread read with isCloseNow and isCloseInvoked set to true
   165  	//	- selectCloseWinner: atomize tcRunningThread read with setting isCloseInvoked to true
   166  	//	- sendThreadExitCheck: make seting tcRunningThread to false mutually exclusive with other operations
   167  	//	- sendThreadIsCloseNow: atomize isCloseNow detection with setting tcRunningThread to false
   168  	tcThreadLock sync.Mutex
   169  	// tcRunningThread indicates that [NBChan.sendThread] is about to be created or running
   170  	//	- set to true when background decides to launch the thread
   171  	//	- set to false when:
   172  	//	- — send-thread detects CloseNow
   173  	//	- — an on-demand thread or a deferred-close always-thread encountering:
   174  	//	- — unsent-count zero with no ongoing Send SendMany
   175  	//	- selects winner to invoke [NBChan.tcStartThread]
   176  	//	- written behind tcThreadLock
   177  	tcRunningThread atomic.Bool
   178  	// tcThreadExitAwaitable makes the thread awaitable
   179  	//	- tcStartThreadWinner: arms or re-arms behind tcThreadLock
   180  	//	- triggered by thread on exit
   181  	//	- [NBChan.CloseNow] awaits tcThreadExitAwaitable to ensure thread exit
   182  	//	- used in two-chan receive with tcState for awaiting static thread-state
   183  	tcThreadExitAwaitable CyclicAwaitable
   184  	// tcSendBlock true indicates that send-thread has value
   185  	// availble for two-chan receive from underlying channel and collectChan
   186  	tcSendBlock atomic.Bool
   187  	// collectorLock ensures any alert-thread will not be alerted while
   188  	// Get is in progress
   189  	//	- preGet: on transition 0 to 1 Get, acquires collectorLock to establish Get in progress
   190  	//	- tcAlertThread: atomizes observing gets zero with alert operation
   191  	collectorLock sync.Mutex
   192  	// collectChan is used in two-chan receive with underlying channel
   193  	// by tcCollectThreadValue
   194  	collectChan LacyChan[struct{}]
   195  	// collectChanActive is the channel being used by two-chan receive
   196  	// from underlying channel
   197  	collectChanActive atomic.Pointer[chan struct{}]
   198  	// inputLock makes inputQueue thread-safe
   199  	//	- used by: [NBChan.CloseNow] swapQueues [NBChan.NBChanState]
   200  	//		[NBChan.Scavenge] [NBChan.Send] [NBChan.SendMany]
   201  	//		[NBChan.SetAllocationSize]
   202  	//	- isCloseComplete acquires inputLock to Send SendMany
   203  	//		have ceased and no further will invocations commence
   204  	inputLock sync.Mutex
   205  	// inputQueue holds items from Send SendMany
   206  	//	- access behind inputLock
   207  	//	- one additional item may be with sendThread
   208  	inputQueue []T
   209  	// isCloseInvoked selects Close winner thread and provides Close invocation wait
   210  	//	- isCloseInvoked.IsWinner select winner
   211  	//A winner of Close or CloseNow was selected
   212  	//	- for Close, close may be deferred while NBChan is not empty
   213  	//	- written behind tcThreadLock to ensure no further thread launches
   214  	isCloseInvoked OnceCh
   215  	// A winner of CloseNow was selected
   216  	//	- written inside threadLock
   217  	isCloseNow OnceCh
   218  	// mechanic to wait for underlying channel close complete
   219  	//	- the underlying channel is closed by:
   220  	//	- — non-deferred Close
   221  	//	- — send-thread in deferred Close
   222  	//	- CloseNow
   223  	waitForClose Awaitable
   224  	// tcProgressLock atomizes tcProgressRequired updates with their justifying observation
   225  	tcProgressLock sync.Mutex
   226  	// tcProgressRequired indicates that thread progress must be secured
   227  	//	- tcAddProgress: set to true by Send SendMany when adding from unsent count zero
   228  	//	- sendThreadZero: set to true by send-thread when taking action on unsent count zero
   229  	//	- tcLaunchProgress: set to false on obtaining thread launch permisssion
   230  	//	- tcAlertProgress: set to false on successful alert
   231  	//	- tcThreadStateProgress: set to false on observing any thread state but NBChanAlert
   232  	//	- write behind tcProgressLock
   233  	tcProgressRequired atomic.Bool
   234  	// tcProgressRaised notes intermediate events:
   235  	//	- send-thread taking action on unsent count zero
   236  	//	- Send SendMany increasing unsent count from zero
   237  	tcProgressRaised atomic.Bool
   238  	// tcAwaitProgressLock makes tcAwaitProgress a critical section
   239  	//	- only one thread at a time may await the next static thread state
   240  	tcAwaitProgressLock sync.Mutex
   241  	// tcGetProgressLock atomizes read of tcProgressRequired and pending Get
   242  	//	- this ensures that when progress is required while Get in progress,
   243  	//		action is guaranteed by Get on the last invocation ending
   244  	tcGetProgressLock sync.Mutex
   245  	// tcState is a channel that sends state names when thread is in static hold
   246  	//	- returned by [NBChan.StateCh]
   247  	tcState atomic.Pointer[chan NBChanTState]
   248  	// outputLock makes outputQueue thread-safe
   249  	//	- must not be acquired while holding inputLock
   250  	//	- used by: [NBChan.Get] [NBChan.CloseNow] [NBChan.NBChanState]
   251  	//		[NBChan.Scavenge] [NBChan.SetAllocationSize]
   252  	//	- used by thread to ontain next value when Get not in progress
   253  	outputLock sync.Mutex
   254  	// behind outputLock: outputQueue initial state, sliced to zero length
   255  	outputQueue0 []T
   256  	// behind outputLock: outputQueue sliced off from low to high indexes
   257  	outputQueue []T
   258  	// thread panics and channel close errors
   259  	perrors.ParlError
   260  }
   261  
   262  // NewNBChan returns a non-blocking trillion-size buffer channel.
   263  //   - NewNBChan allows initialization based on an existing channel.
   264  //   - NBChan does not need initialization and can be used like:
   265  //
   266  // Usage:
   267  //
   268  //	var nbChan NBChan[error]
   269  //	go thread(&nbChan)
   270  func NewNBChan[T any](threadType ...NBChanThreadType) (nbChan *NBChan[T]) {
   271  	n := NBChan[T]{}
   272  	if len(threadType) > 0 {
   273  		switch threadType[0] {
   274  		case NBChanOnDemand:
   275  			n.isOnDemandThread.Store(true)
   276  		case NBChanNone:
   277  			n.isNoThread.Store(true)
   278  		}
   279  	}
   280  	return &n
   281  }
   282  
   283  // SetAllocationSize sets the initial element size of the two queues. Thread-safe
   284  //   - NBChan allocates two queues of size which may be enlarged by item counts
   285  //   - supports functional chaining
   286  //   - 0 or less does nothing
   287  func (n *NBChan[T]) SetAllocationSize(size int) (nb *NBChan[T]) {
   288  	nb = n
   289  	if size <= 0 {
   290  		return // noop return
   291  	}
   292  	n.allocationSize.Store(uint64(size))
   293  	n.ensureInput(size)
   294  	n.ensureOutput(size)
   295  	return
   296  }
   297  
   298  // SetOnDemandThread configures [NBChanAlways] operation
   299  func (n *NBChan[T]) SetOnDemandThread() (nb *NBChan[T]) {
   300  	nb = n
   301  	n.isOnDemandThread.Store(true)
   302  	n.isNoThread.Store(false)
   303  	return
   304  }
   305  
   306  // SetNoThread configures [NBChanNone] operation
   307  func (n *NBChan[T]) SetNoThread() {
   308  	n.isNoThread.Store(true)
   309  	n.isOnDemandThread.Store(false)
   310  }
   311  
   312  const (
   313  	// [NBChan.ThreadStatus] await a blocked thread state or exit
   314  	AwaitThread = true
   315  )
   316  
   317  // ThreadStatus indicates the current status of a possible thread
   318  func (n *NBChan[T]) ThreadStatus(await ...bool) (threadStatus NBChanTState) {
   319  	if len(await) > 0 && await[0] {
   320  		// await thread status
   321  		//	- thread not launched
   322  		if !n.tcDidLaunchThread.Load() {
   323  			threadStatus = NBChanNoLaunch
   324  			return
   325  		}
   326  		select {
   327  		// status from a blocked thread
   328  		//	- NBChanSendBlock NBChanAlert NBChanGets NBChanSends
   329  		case threadStatus = <-n.stateCh():
   330  			// thread did exit
   331  		case <-n.tcThreadExitAwaitable.Ch():
   332  			threadStatus = NBChanExit
   333  		}
   334  		return
   335  	}
   336  
   337  	// obtain current thread status, including running: ie. no idea where
   338  	select {
   339  	// status from a blocked thread
   340  	//	- NBChanSendBlock NBChanAlert NBChanGets NBChanSends
   341  	case threadStatus = <-n.stateCh():
   342  		// thread did exit
   343  	case <-n.tcThreadExitAwaitable.Ch():
   344  		threadStatus = NBChanExit
   345  	default:
   346  		// thread is somewhere else
   347  		if n.tcDidLaunchThread.Load() {
   348  			threadStatus = NBChanRunning
   349  		} else {
   350  			threadStatus = NBChanNoLaunch
   351  		}
   352  	}
   353  	return
   354  }
   355  
   356  // Ch obtains the receive-only channel
   357  //   - values can be retrieved using this channel or [NBChan.Get]
   358  //   - not available for [NBChanNone] NBChan
   359  func (n *NBChan[T]) Ch() (ch <-chan T) { return n.closableChan.Ch() }
   360  
   361  // Count returns number of unsent values
   362  func (n *NBChan[T]) Count() (unsentCount int) { return int(n.unsentCount.Load()) }
   363  
   364  // Capacity returns size of allocated queues
   365  func (n *NBChan[T]) Capacity() (capacity int) {
   366  	return int(n.inputCapacity.Load() + n.outputCapacity.Load())
   367  }
   368  
   369  // DataWaitCh indicates if the NBChan object has data available
   370  //   - the initial state of the returned channel is open, ie. receive will block
   371  //   - upon data available the channel closes, ie. receive will not block
   372  //   - to again wait, DataWaitCh should be invoked again to get a current channel
   373  //   - upon CloseNow or Close and empty, the returned channel is closed
   374  //     and receive will not block
   375  //   - alternative is [NBChan.Ch] when NBChan configured with threading
   376  //   - DataWaitCh offers more efficient operation that with a thread
   377  func (n *NBChan[T]) DataWaitCh() (ch AwaitableCh) { return n.updateDataAvailable() }
   378  
   379  // DidClose indicates if Close or CloseNow was invoked
   380  //   - the channel may remain open until the last item has been read
   381  //   - [NBChan.CloseNow] immediately closes the channel discarding onread items
   382  //   - [NBChan.IsClosed] checks if the channel is closed
   383  func (n *NBChan[T]) DidClose() (didClose bool) { return n.isCloseInvoked.IsInvoked() }
   384  
   385  // IsClosed indicates whether the channel has actually closed.
   386  func (n *NBChan[T]) IsClosed() (isClosed bool) { return n.closableChan.IsClosed() }