github.com/clly/consul@v1.4.5/connect/proxy/proxy.go (about)

     1  package proxy
     2  
     3  import (
     4  	"crypto/x509"
     5  	"log"
     6  
     7  	"github.com/hashicorp/consul/api"
     8  	"github.com/hashicorp/consul/connect"
     9  	"github.com/hashicorp/consul/lib"
    10  )
    11  
    12  // Proxy implements the built-in connect proxy.
    13  type Proxy struct {
    14  	client     *api.Client
    15  	cfgWatcher ConfigWatcher
    16  	stopChan   chan struct{}
    17  	logger     *log.Logger
    18  	service    *connect.Service
    19  }
    20  
    21  // New returns a proxy with the given configuration source.
    22  //
    23  // The ConfigWatcher can be used to update the configuration of the proxy.
    24  // Whenever a new configuration is detected, the proxy will reconfigure itself.
    25  func New(client *api.Client, cw ConfigWatcher, logger *log.Logger) (*Proxy, error) {
    26  	return &Proxy{
    27  		client:     client,
    28  		cfgWatcher: cw,
    29  		stopChan:   make(chan struct{}),
    30  		logger:     logger,
    31  	}, nil
    32  }
    33  
    34  // Serve the proxy instance until a fatal error occurs or proxy is closed.
    35  func (p *Proxy) Serve() error {
    36  	var cfg *Config
    37  
    38  	// failCh is used to stop Serve and return an error from another goroutine we
    39  	// spawn.
    40  	failCh := make(chan error, 1)
    41  
    42  	// Watch for config changes (initial setup happens on first "change")
    43  	for {
    44  		select {
    45  		case err := <-failCh:
    46  			// don't log here, we can log with better context at the point where we
    47  			// write the err to the chan
    48  			return err
    49  
    50  		case newCfg := <-p.cfgWatcher.Watch():
    51  			p.logger.Printf("[DEBUG] got new config")
    52  
    53  			if cfg == nil {
    54  				// Initial setup
    55  
    56  				// Setup telemetry if configured
    57  				_, err := lib.InitTelemetry(newCfg.Telemetry)
    58  				if err != nil {
    59  					p.logger.Printf("[ERR] proxy telemetry config error: %s", err)
    60  				}
    61  
    62  				// Setup Service instance now we know target ID etc
    63  				service, err := newCfg.Service(p.client, p.logger)
    64  				if err != nil {
    65  					return err
    66  				}
    67  				p.service = service
    68  
    69  				go func() {
    70  					<-service.ReadyWait()
    71  					p.logger.Printf("[INFO] Proxy loaded config and ready to serve")
    72  					tcfg := service.ServerTLSConfig()
    73  					cert, _ := tcfg.GetCertificate(nil)
    74  					leaf, _ := x509.ParseCertificate(cert.Certificate[0])
    75  					p.logger.Printf("[INFO] TLS Identity: %s", leaf.URIs[0])
    76  					roots, err := connect.CommonNamesFromCertPool(tcfg.RootCAs)
    77  					if err != nil {
    78  						p.logger.Printf("[ERR] Failed to parse root subjects: %s", err)
    79  					} else {
    80  						p.logger.Printf("[INFO] TLS Roots   : %v", roots)
    81  					}
    82  
    83  					// Only start a listener if we have a port set. This allows
    84  					// the configuration to disable our public listener.
    85  					if newCfg.PublicListener.BindPort != 0 {
    86  						newCfg.PublicListener.applyDefaults()
    87  						l := NewPublicListener(p.service, newCfg.PublicListener, p.logger)
    88  						err = p.startListener("public listener", l)
    89  						if err != nil {
    90  							// This should probably be fatal.
    91  							p.logger.Printf("[ERR] failed to start public listener: %s", err)
    92  							failCh <- err
    93  						}
    94  
    95  					}
    96  				}()
    97  			}
    98  
    99  			// TODO(banks) update/remove upstreams properly based on a diff with current. Can
   100  			// store a map of uc.String() to Listener here and then use it to only
   101  			// start one of each and stop/modify if changes occur.
   102  			for _, uc := range newCfg.Upstreams {
   103  				uc.applyDefaults()
   104  
   105  				if uc.LocalBindPort < 1 {
   106  					p.logger.Printf("[ERR] upstream %s has no local_bind_port. "+
   107  						"Can't start upstream.", uc.String())
   108  					continue
   109  				}
   110  
   111  				l := NewUpstreamListener(p.service, p.client, uc, p.logger)
   112  				err := p.startListener(uc.String(), l)
   113  				if err != nil {
   114  					p.logger.Printf("[ERR] failed to start upstream %s: %s", uc.String(),
   115  						err)
   116  				}
   117  			}
   118  			cfg = newCfg
   119  
   120  		case <-p.stopChan:
   121  			return nil
   122  		}
   123  	}
   124  }
   125  
   126  // startPublicListener is run from the internal state machine loop
   127  func (p *Proxy) startListener(name string, l *Listener) error {
   128  	p.logger.Printf("[INFO] %s starting on %s", name, l.BindAddr())
   129  	go func() {
   130  		err := l.Serve()
   131  		if err != nil {
   132  			p.logger.Printf("[ERR] %s stopped with error: %s", name, err)
   133  			return
   134  		}
   135  		p.logger.Printf("[INFO] %s stopped", name)
   136  	}()
   137  
   138  	go func() {
   139  		<-p.stopChan
   140  		l.Close()
   141  
   142  	}()
   143  
   144  	return nil
   145  }
   146  
   147  // Close stops the proxy and terminates all active connections. It must be
   148  // called only once.
   149  func (p *Proxy) Close() {
   150  	close(p.stopChan)
   151  	if p.service != nil {
   152  		p.service.Close()
   153  	}
   154  }