mirror of
https://github.com/caddyserver/caddy.git
synced 2025-04-08 14:31:45 +02:00
telemetry: Record TLS ClientHellos by hash of key of structured data
Also improve handling of disabled metrics, and record TLS ClientHello in association with User-Agent
This commit is contained in:
parent
518edd3cd4
commit
078770a5a6
7 changed files with 211 additions and 77 deletions
caddy/caddymain
caddyhttp/httpserver
caddytls
telemetry
|
@ -91,7 +91,10 @@ func Run() {
|
||||||
|
|
||||||
// initialize telemetry client
|
// initialize telemetry client
|
||||||
if enableTelemetry {
|
if enableTelemetry {
|
||||||
initTelemetry()
|
err := initTelemetry()
|
||||||
|
if err != nil {
|
||||||
|
mustLogFatalf("[ERROR] Initializing telemetry: %v", err)
|
||||||
|
}
|
||||||
} else if disabledMetrics != "" {
|
} else if disabledMetrics != "" {
|
||||||
mustLogFatalf("[ERROR] Cannot disable specific metrics because telemetry is disabled")
|
mustLogFatalf("[ERROR] Cannot disable specific metrics because telemetry is disabled")
|
||||||
}
|
}
|
||||||
|
@ -293,7 +296,7 @@ func setCPU(cpu string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// initTelemetry initializes the telemetry engine.
|
// initTelemetry initializes the telemetry engine.
|
||||||
func initTelemetry() {
|
func initTelemetry() error {
|
||||||
uuidFilename := filepath.Join(caddy.AssetsPath(), "uuid")
|
uuidFilename := filepath.Join(caddy.AssetsPath(), "uuid")
|
||||||
|
|
||||||
newUUID := func() uuid.UUID {
|
newUUID := func() uuid.UUID {
|
||||||
|
@ -329,7 +332,34 @@ func initTelemetry() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
telemetry.Init(id, strings.Split(disabledMetrics, ","))
|
// parse and check the list of disabled metrics
|
||||||
|
var disabledMetricsSlice []string
|
||||||
|
if len(disabledMetrics) > 0 {
|
||||||
|
if len(disabledMetrics) > 1024 {
|
||||||
|
// mitigate disk space exhaustion at the collection endpoint
|
||||||
|
return fmt.Errorf("too many metrics to disable")
|
||||||
|
}
|
||||||
|
disabledMetricsSlice = strings.Split(disabledMetrics, ",")
|
||||||
|
for i, metric := range disabledMetricsSlice {
|
||||||
|
if metric == "instance_id" || metric == "timestamp" || metric == "disabled_metrics" {
|
||||||
|
return fmt.Errorf("instance_id, timestamp, and disabled_metrics cannot be disabled")
|
||||||
|
}
|
||||||
|
if metric == "" {
|
||||||
|
disabledMetricsSlice = append(disabledMetricsSlice[:i], disabledMetricsSlice[i+1:]...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// initialize telemetry
|
||||||
|
telemetry.Init(id, disabledMetricsSlice)
|
||||||
|
|
||||||
|
// if any metrics were disabled, report it
|
||||||
|
if len(disabledMetricsSlice) > 0 {
|
||||||
|
telemetry.Set("disabled_metrics", disabledMetricsSlice)
|
||||||
|
log.Printf("[NOTICE] The following telemetry metrics are disabled: %s", disabledMetrics)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const appName = "Caddy"
|
const appName = "Caddy"
|
||||||
|
|
|
@ -25,6 +25,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
"github.com/mholt/caddy/caddytls"
|
||||||
"github.com/mholt/caddy/telemetry"
|
"github.com/mholt/caddy/telemetry"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -65,6 +66,9 @@ func (h *tlsHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
ua := r.Header.Get("User-Agent")
|
ua := r.Header.Get("User-Agent")
|
||||||
|
|
||||||
|
// report this request's UA in connection with this ClientHello
|
||||||
|
go telemetry.AppendUnique("tls_client_hello_ua:"+info.Key(), ua)
|
||||||
|
|
||||||
var checked, mitm bool
|
var checked, mitm bool
|
||||||
if r.Header.Get("X-BlueCoat-Via") != "" || // Blue Coat (masks User-Agent header to generic values)
|
if r.Header.Get("X-BlueCoat-Via") != "" || // Blue Coat (masks User-Agent header to generic values)
|
||||||
r.Header.Get("X-FCCKV2") != "" || // Fortinet
|
r.Header.Get("X-FCCKV2") != "" || // Fortinet
|
||||||
|
@ -207,6 +211,11 @@ func (c *clientHelloConn) Read(b []byte) (n int, err error) {
|
||||||
c.listener.helloInfos[c.Conn.RemoteAddr().String()] = rawParsed
|
c.listener.helloInfos[c.Conn.RemoteAddr().String()] = rawParsed
|
||||||
c.listener.helloInfosMu.Unlock()
|
c.listener.helloInfosMu.Unlock()
|
||||||
|
|
||||||
|
// report this ClientHello to telemetry
|
||||||
|
chKey := rawParsed.Key()
|
||||||
|
go telemetry.SetNested("tls_client_hello", chKey, rawParsed)
|
||||||
|
go telemetry.AppendUnique("tls_client_hello_count", chKey)
|
||||||
|
|
||||||
c.readHello = true
|
c.readHello = true
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -227,6 +236,7 @@ func parseRawClientHello(data []byte) (info rawHelloInfo) {
|
||||||
if len(data) < 42 {
|
if len(data) < 42 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
info.Version = uint16(data[4])<<8 | uint16(data[5])
|
||||||
sessionIDLen := int(data[38])
|
sessionIDLen := int(data[38])
|
||||||
if sessionIDLen > 32 || len(data) < 39+sessionIDLen {
|
if sessionIDLen > 32 || len(data) < 39+sessionIDLen {
|
||||||
return
|
return
|
||||||
|
@ -243,9 +253,9 @@ func parseRawClientHello(data []byte) (info rawHelloInfo) {
|
||||||
}
|
}
|
||||||
numCipherSuites := cipherSuiteLen / 2
|
numCipherSuites := cipherSuiteLen / 2
|
||||||
// read in the cipher suites
|
// read in the cipher suites
|
||||||
info.cipherSuites = make([]uint16, numCipherSuites)
|
info.CipherSuites = make([]uint16, numCipherSuites)
|
||||||
for i := 0; i < numCipherSuites; i++ {
|
for i := 0; i < numCipherSuites; i++ {
|
||||||
info.cipherSuites[i] = uint16(data[2+2*i])<<8 | uint16(data[3+2*i])
|
info.CipherSuites[i] = uint16(data[2+2*i])<<8 | uint16(data[3+2*i])
|
||||||
}
|
}
|
||||||
data = data[2+cipherSuiteLen:]
|
data = data[2+cipherSuiteLen:]
|
||||||
if len(data) < 1 {
|
if len(data) < 1 {
|
||||||
|
@ -256,7 +266,7 @@ func parseRawClientHello(data []byte) (info rawHelloInfo) {
|
||||||
if len(data) < 1+compressionMethodsLen {
|
if len(data) < 1+compressionMethodsLen {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
info.compressionMethods = data[1 : 1+compressionMethodsLen]
|
info.CompressionMethods = data[1 : 1+compressionMethodsLen]
|
||||||
|
|
||||||
data = data[1+compressionMethodsLen:]
|
data = data[1+compressionMethodsLen:]
|
||||||
|
|
||||||
|
@ -284,7 +294,7 @@ func parseRawClientHello(data []byte) (info rawHelloInfo) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// record that the client advertised support for this extension
|
// record that the client advertised support for this extension
|
||||||
info.extensions = append(info.extensions, extension)
|
info.Extensions = append(info.Extensions, extension)
|
||||||
|
|
||||||
switch extension {
|
switch extension {
|
||||||
case extensionSupportedCurves:
|
case extensionSupportedCurves:
|
||||||
|
@ -297,10 +307,10 @@ func parseRawClientHello(data []byte) (info rawHelloInfo) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
numCurves := l / 2
|
numCurves := l / 2
|
||||||
info.curves = make([]tls.CurveID, numCurves)
|
info.Curves = make([]tls.CurveID, numCurves)
|
||||||
d := data[2:]
|
d := data[2:]
|
||||||
for i := 0; i < numCurves; i++ {
|
for i := 0; i < numCurves; i++ {
|
||||||
info.curves[i] = tls.CurveID(d[0])<<8 | tls.CurveID(d[1])
|
info.Curves[i] = tls.CurveID(d[0])<<8 | tls.CurveID(d[1])
|
||||||
d = d[2:]
|
d = d[2:]
|
||||||
}
|
}
|
||||||
case extensionSupportedPoints:
|
case extensionSupportedPoints:
|
||||||
|
@ -312,8 +322,8 @@ func parseRawClientHello(data []byte) (info rawHelloInfo) {
|
||||||
if length != l+1 {
|
if length != l+1 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
info.points = make([]uint8, l)
|
info.Points = make([]uint8, l)
|
||||||
copy(info.points, data[1:])
|
copy(info.Points, data[1:])
|
||||||
}
|
}
|
||||||
|
|
||||||
data = data[length:]
|
data = data[length:]
|
||||||
|
@ -364,18 +374,12 @@ func (l *tlsHelloListener) Accept() (net.Conn, error) {
|
||||||
// by Durumeric, Halderman, et. al. in
|
// by Durumeric, Halderman, et. al. in
|
||||||
// "The Security Impact of HTTPS Interception":
|
// "The Security Impact of HTTPS Interception":
|
||||||
// https://jhalderm.com/pub/papers/interception-ndss17.pdf
|
// https://jhalderm.com/pub/papers/interception-ndss17.pdf
|
||||||
type rawHelloInfo struct {
|
type rawHelloInfo struct{ caddytls.ClientHelloInfo }
|
||||||
cipherSuites []uint16
|
|
||||||
extensions []uint16
|
|
||||||
compressionMethods []byte
|
|
||||||
curves []tls.CurveID
|
|
||||||
points []uint8
|
|
||||||
}
|
|
||||||
|
|
||||||
// advertisesHeartbeatSupport returns true if info indicates
|
// advertisesHeartbeatSupport returns true if info indicates
|
||||||
// that the client supports the Heartbeat extension.
|
// that the client supports the Heartbeat extension.
|
||||||
func (info rawHelloInfo) advertisesHeartbeatSupport() bool {
|
func (info rawHelloInfo) advertisesHeartbeatSupport() bool {
|
||||||
for _, ext := range info.extensions {
|
for _, ext := range info.Extensions {
|
||||||
if ext == extensionHeartbeat {
|
if ext == extensionHeartbeat {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
@ -398,31 +402,31 @@ func (info rawHelloInfo) looksLikeFirefox() bool {
|
||||||
// Note: Firefox 55+ doesn't appear to advertise 0xFF03 (65283, short headers). It used to be between 5 and 13.
|
// Note: Firefox 55+ doesn't appear to advertise 0xFF03 (65283, short headers). It used to be between 5 and 13.
|
||||||
// Note: Firefox on Fedora (or RedHat) doesn't include ECC suites because of patent liability.
|
// Note: Firefox on Fedora (or RedHat) doesn't include ECC suites because of patent liability.
|
||||||
requiredExtensionsOrder := []uint16{23, 65281, 10, 11, 35, 16, 5, 13}
|
requiredExtensionsOrder := []uint16{23, 65281, 10, 11, 35, 16, 5, 13}
|
||||||
if !assertPresenceAndOrdering(requiredExtensionsOrder, info.extensions, true) {
|
if !assertPresenceAndOrdering(requiredExtensionsOrder, info.Extensions, true) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// We check for both presence of curves and their ordering.
|
// We check for both presence of curves and their ordering.
|
||||||
requiredCurves := []tls.CurveID{29, 23, 24, 25}
|
requiredCurves := []tls.CurveID{29, 23, 24, 25}
|
||||||
if len(info.curves) < len(requiredCurves) {
|
if len(info.Curves) < len(requiredCurves) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
for i := range requiredCurves {
|
for i := range requiredCurves {
|
||||||
if info.curves[i] != requiredCurves[i] {
|
if info.Curves[i] != requiredCurves[i] {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(info.curves) > len(requiredCurves) {
|
if len(info.Curves) > len(requiredCurves) {
|
||||||
// newer Firefox (55 Nightly?) may have additional curves at end of list
|
// newer Firefox (55 Nightly?) may have additional curves at end of list
|
||||||
allowedCurves := []tls.CurveID{256, 257}
|
allowedCurves := []tls.CurveID{256, 257}
|
||||||
for i := range allowedCurves {
|
for i := range allowedCurves {
|
||||||
if info.curves[len(requiredCurves)+i] != allowedCurves[i] {
|
if info.Curves[len(requiredCurves)+i] != allowedCurves[i] {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if hasGreaseCiphers(info.cipherSuites) {
|
if hasGreaseCiphers(info.CipherSuites) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -449,7 +453,7 @@ func (info rawHelloInfo) looksLikeFirefox() bool {
|
||||||
tls.TLS_RSA_WITH_AES_256_CBC_SHA, // 0x35
|
tls.TLS_RSA_WITH_AES_256_CBC_SHA, // 0x35
|
||||||
tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA, // 0xa
|
tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA, // 0xa
|
||||||
}
|
}
|
||||||
return assertPresenceAndOrdering(expectedCipherSuiteOrder, info.cipherSuites, false)
|
return assertPresenceAndOrdering(expectedCipherSuiteOrder, info.CipherSuites, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
// looksLikeChrome returns true if info looks like a handshake
|
// looksLikeChrome returns true if info looks like a handshake
|
||||||
|
@ -490,20 +494,20 @@ func (info rawHelloInfo) looksLikeChrome() bool {
|
||||||
TLS_DHE_RSA_WITH_AES_128_CBC_SHA: {}, // 0x33
|
TLS_DHE_RSA_WITH_AES_128_CBC_SHA: {}, // 0x33
|
||||||
TLS_DHE_RSA_WITH_AES_256_CBC_SHA: {}, // 0x39
|
TLS_DHE_RSA_WITH_AES_256_CBC_SHA: {}, // 0x39
|
||||||
}
|
}
|
||||||
for _, ext := range info.cipherSuites {
|
for _, ext := range info.CipherSuites {
|
||||||
if _, ok := chromeCipherExclusions[ext]; ok {
|
if _, ok := chromeCipherExclusions[ext]; ok {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Chrome does not include curve 25 (CurveP521) (as of Chrome 56, Feb. 2017).
|
// Chrome does not include curve 25 (CurveP521) (as of Chrome 56, Feb. 2017).
|
||||||
for _, curve := range info.curves {
|
for _, curve := range info.Curves {
|
||||||
if curve == 25 {
|
if curve == 25 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !hasGreaseCiphers(info.cipherSuites) {
|
if !hasGreaseCiphers(info.CipherSuites) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -521,19 +525,19 @@ func (info rawHelloInfo) looksLikeEdge() bool {
|
||||||
// More specifically, the OCSP status request extension appears
|
// More specifically, the OCSP status request extension appears
|
||||||
// *directly* before the other two extensions, which occur in that
|
// *directly* before the other two extensions, which occur in that
|
||||||
// order. (I contacted the authors for clarification and verified it.)
|
// order. (I contacted the authors for clarification and verified it.)
|
||||||
for i, ext := range info.extensions {
|
for i, ext := range info.Extensions {
|
||||||
if ext == extensionOCSPStatusRequest {
|
if ext == extensionOCSPStatusRequest {
|
||||||
if len(info.extensions) <= i+2 {
|
if len(info.Extensions) <= i+2 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if info.extensions[i+1] != extensionSupportedCurves ||
|
if info.Extensions[i+1] != extensionSupportedCurves ||
|
||||||
info.extensions[i+2] != extensionSupportedPoints {
|
info.Extensions[i+2] != extensionSupportedPoints {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, cs := range info.cipherSuites {
|
for _, cs := range info.CipherSuites {
|
||||||
// As of Feb. 2017, Edge does not have 0xff, but Avast adds it
|
// As of Feb. 2017, Edge does not have 0xff, but Avast adds it
|
||||||
if cs == scsvRenegotiation {
|
if cs == scsvRenegotiation {
|
||||||
return false
|
return false
|
||||||
|
@ -544,7 +548,7 @@ func (info rawHelloInfo) looksLikeEdge() bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if hasGreaseCiphers(info.cipherSuites) {
|
if hasGreaseCiphers(info.CipherSuites) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -570,23 +574,23 @@ func (info rawHelloInfo) looksLikeSafari() bool {
|
||||||
|
|
||||||
// We check for the presence and order of the extensions.
|
// We check for the presence and order of the extensions.
|
||||||
requiredExtensionsOrder := []uint16{10, 11, 13, 13172, 16, 5, 18, 23}
|
requiredExtensionsOrder := []uint16{10, 11, 13, 13172, 16, 5, 18, 23}
|
||||||
if !assertPresenceAndOrdering(requiredExtensionsOrder, info.extensions, true) {
|
if !assertPresenceAndOrdering(requiredExtensionsOrder, info.Extensions, true) {
|
||||||
// Safari on iOS 11 (beta) uses different set/ordering of extensions
|
// Safari on iOS 11 (beta) uses different set/ordering of extensions
|
||||||
requiredExtensionsOrderiOS11 := []uint16{65281, 0, 23, 13, 5, 13172, 18, 16, 11, 10}
|
requiredExtensionsOrderiOS11 := []uint16{65281, 0, 23, 13, 5, 13172, 18, 16, 11, 10}
|
||||||
if !assertPresenceAndOrdering(requiredExtensionsOrderiOS11, info.extensions, true) {
|
if !assertPresenceAndOrdering(requiredExtensionsOrderiOS11, info.Extensions, true) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// For these versions of Safari, expect TLS_EMPTY_RENEGOTIATION_INFO_SCSV first.
|
// For these versions of Safari, expect TLS_EMPTY_RENEGOTIATION_INFO_SCSV first.
|
||||||
if len(info.cipherSuites) < 1 {
|
if len(info.CipherSuites) < 1 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if info.cipherSuites[0] != scsvRenegotiation {
|
if info.CipherSuites[0] != scsvRenegotiation {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if hasGreaseCiphers(info.cipherSuites) {
|
if hasGreaseCiphers(info.CipherSuites) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -611,19 +615,19 @@ func (info rawHelloInfo) looksLikeSafari() bool {
|
||||||
tls.TLS_RSA_WITH_AES_256_CBC_SHA, // 0x35
|
tls.TLS_RSA_WITH_AES_256_CBC_SHA, // 0x35
|
||||||
tls.TLS_RSA_WITH_AES_128_CBC_SHA, // 0x2f
|
tls.TLS_RSA_WITH_AES_128_CBC_SHA, // 0x2f
|
||||||
}
|
}
|
||||||
return assertPresenceAndOrdering(expectedCipherSuiteOrder, info.cipherSuites, true)
|
return assertPresenceAndOrdering(expectedCipherSuiteOrder, info.CipherSuites, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
// looksLikeTor returns true if the info looks like a ClientHello from Tor browser
|
// looksLikeTor returns true if the info looks like a ClientHello from Tor browser
|
||||||
// (based on Firefox).
|
// (based on Firefox).
|
||||||
func (info rawHelloInfo) looksLikeTor() bool {
|
func (info rawHelloInfo) looksLikeTor() bool {
|
||||||
requiredExtensionsOrder := []uint16{10, 11, 16, 5, 13}
|
requiredExtensionsOrder := []uint16{10, 11, 16, 5, 13}
|
||||||
if !assertPresenceAndOrdering(requiredExtensionsOrder, info.extensions, true) {
|
if !assertPresenceAndOrdering(requiredExtensionsOrder, info.Extensions, true) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// check for session tickets support; Tor doesn't support them to prevent tracking
|
// check for session tickets support; Tor doesn't support them to prevent tracking
|
||||||
for _, ext := range info.extensions {
|
for _, ext := range info.Extensions {
|
||||||
if ext == 35 {
|
if ext == 35 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -631,12 +635,12 @@ func (info rawHelloInfo) looksLikeTor() bool {
|
||||||
|
|
||||||
// We check for both presence of curves and their ordering, including
|
// We check for both presence of curves and their ordering, including
|
||||||
// an optional curve at the beginning (for Tor based on Firefox 52)
|
// an optional curve at the beginning (for Tor based on Firefox 52)
|
||||||
infoCurves := info.curves
|
infoCurves := info.Curves
|
||||||
if len(info.curves) == 4 {
|
if len(info.Curves) == 4 {
|
||||||
if info.curves[0] != 29 {
|
if info.Curves[0] != 29 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
infoCurves = info.curves[1:]
|
infoCurves = info.Curves[1:]
|
||||||
}
|
}
|
||||||
requiredCurves := []tls.CurveID{23, 24, 25}
|
requiredCurves := []tls.CurveID{23, 24, 25}
|
||||||
if len(infoCurves) < len(requiredCurves) {
|
if len(infoCurves) < len(requiredCurves) {
|
||||||
|
@ -648,7 +652,7 @@ func (info rawHelloInfo) looksLikeTor() bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if hasGreaseCiphers(info.cipherSuites) {
|
if hasGreaseCiphers(info.CipherSuites) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -675,7 +679,7 @@ func (info rawHelloInfo) looksLikeTor() bool {
|
||||||
tls.TLS_RSA_WITH_AES_256_CBC_SHA, // 0x35
|
tls.TLS_RSA_WITH_AES_256_CBC_SHA, // 0x35
|
||||||
tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA, // 0xa
|
tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA, // 0xa
|
||||||
}
|
}
|
||||||
return assertPresenceAndOrdering(expectedCipherSuiteOrder, info.cipherSuites, false)
|
return assertPresenceAndOrdering(expectedCipherSuiteOrder, info.CipherSuites, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
// assertPresenceAndOrdering will return true if candidateList contains
|
// assertPresenceAndOrdering will return true if candidateList contains
|
||||||
|
|
|
@ -67,6 +67,12 @@ func init() {
|
||||||
caddy.RegisterParsingCallback(serverType, "root", hideCaddyfile)
|
caddy.RegisterParsingCallback(serverType, "root", hideCaddyfile)
|
||||||
caddy.RegisterParsingCallback(serverType, "tls", activateHTTPS)
|
caddy.RegisterParsingCallback(serverType, "tls", activateHTTPS)
|
||||||
caddytls.RegisterConfigGetter(serverType, func(c *caddy.Controller) *caddytls.Config { return GetConfig(c).TLS })
|
caddytls.RegisterConfigGetter(serverType, func(c *caddy.Controller) *caddytls.Config { return GetConfig(c).TLS })
|
||||||
|
|
||||||
|
// disable the caddytls package reporting ClientHellos
|
||||||
|
// to telemetry, since our MITM detector does this but
|
||||||
|
// with more information than the standard lib provides
|
||||||
|
// (as of May 2018)
|
||||||
|
caddytls.ClientHelloTelemetry = false
|
||||||
}
|
}
|
||||||
|
|
||||||
// hideCaddyfile hides the source/origin Caddyfile if it is within the
|
// hideCaddyfile hides the source/origin Caddyfile if it is within the
|
||||||
|
|
|
@ -349,8 +349,12 @@ func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// TODO: Somehow report UA string in conjunction with TLS handshake, if any (and just once per connection)
|
// record the User-Agent string (with a cap on its length to mitigate attacks)
|
||||||
go telemetry.AppendUnique("http_user_agent", r.Header.Get("User-Agent"))
|
ua := r.Header.Get("User-Agent")
|
||||||
|
if len(ua) > 512 {
|
||||||
|
ua = ua[:512]
|
||||||
|
}
|
||||||
|
go telemetry.AppendUnique("http_user_agent", ua)
|
||||||
go telemetry.Increment("http_request_count")
|
go telemetry.Increment("http_request_count")
|
||||||
|
|
||||||
// copy the original, unchanged URL into the context
|
// copy the original, unchanged URL into the context
|
||||||
|
|
|
@ -341,7 +341,7 @@ func standaloneTLSTicketKeyRotation(c *tls.Config, ticker *time.Ticker, exitChan
|
||||||
// Do not use this for cryptographic purposes.
|
// Do not use this for cryptographic purposes.
|
||||||
func fastHash(input []byte) string {
|
func fastHash(input []byte) string {
|
||||||
h := fnv.New32a()
|
h := fnv.New32a()
|
||||||
h.Write([]byte(input))
|
h.Write(input)
|
||||||
return fmt.Sprintf("%x", h.Sum32())
|
return fmt.Sprintf("%x", h.Sum32())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -99,25 +99,23 @@ func (cg configGroup) GetConfigForClient(clientHello *tls.ClientHelloInfo) (*tls
|
||||||
//
|
//
|
||||||
// This method is safe for use as a tls.Config.GetCertificate callback.
|
// This method is safe for use as a tls.Config.GetCertificate callback.
|
||||||
func (cfg *Config) GetCertificate(clientHello *tls.ClientHelloInfo) (*tls.Certificate, error) {
|
func (cfg *Config) GetCertificate(clientHello *tls.ClientHelloInfo) (*tls.Certificate, error) {
|
||||||
// TODO: We need to collect this in a heavily de-duplicating way
|
if ClientHelloTelemetry {
|
||||||
// It would also be nice to associate a handshake with the UA string (but that is only for HTTP server type)
|
// If no other plugin (such as the HTTP server type) is implementing ClientHello telemetry, we do it.
|
||||||
// go telemetry.Append("tls_client_hello", struct {
|
// NOTE: The values in the Go standard lib's ClientHelloInfo aren't guaranteed to be in order.
|
||||||
// NoSNI bool `json:"no_sni,omitempty"`
|
info := ClientHelloInfo{
|
||||||
// CipherSuites []uint16 `json:"cipher_suites,omitempty"`
|
Version: clientHello.SupportedVersions[0], // report the highest
|
||||||
// SupportedCurves []tls.CurveID `json:"curves,omitempty"`
|
CipherSuites: clientHello.CipherSuites,
|
||||||
// SupportedPoints []uint8 `json:"points,omitempty"`
|
ExtensionsUnknown: true, // no extension info... :(
|
||||||
// SignatureSchemes []tls.SignatureScheme `json:"sig_scheme,omitempty"`
|
CompressionMethodsUnknown: true, // no compression methods... :(
|
||||||
// ALPN []string `json:"alpn,omitempty"`
|
Curves: clientHello.SupportedCurves,
|
||||||
// SupportedVersions []uint16 `json:"versions,omitempty"`
|
Points: clientHello.SupportedPoints,
|
||||||
// }{
|
// We also have, but do not yet use: SignatureSchemes, ServerName, and SupportedProtos (ALPN)
|
||||||
// NoSNI: clientHello.ServerName == "",
|
// because the standard lib parses some extensions, but our MITM detector generally doesn't.
|
||||||
// CipherSuites: clientHello.CipherSuites,
|
}
|
||||||
// SupportedCurves: clientHello.SupportedCurves,
|
go telemetry.SetNested("tls_client_hello", info.Key(), info)
|
||||||
// SupportedPoints: clientHello.SupportedPoints,
|
}
|
||||||
// SignatureSchemes: clientHello.SignatureSchemes,
|
|
||||||
// ALPN: clientHello.SupportedProtos,
|
// get the certificate and serve it up
|
||||||
// SupportedVersions: clientHello.SupportedVersions,
|
|
||||||
// })
|
|
||||||
cert, err := cfg.getCertDuringHandshake(strings.ToLower(clientHello.ServerName), true, true)
|
cert, err := cfg.getCertDuringHandshake(strings.ToLower(clientHello.ServerName), true, true)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
go telemetry.Increment("tls_handshake_count") // TODO: This is a "best guess" for now, we need something listener-level
|
go telemetry.Increment("tls_handshake_count") // TODO: This is a "best guess" for now, we need something listener-level
|
||||||
|
@ -487,6 +485,42 @@ func (cfg *Config) renewDynamicCertificate(name string, currentCert Certificate)
|
||||||
return cfg.getCertDuringHandshake(name, true, false)
|
return cfg.getCertDuringHandshake(name, true, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClientHelloInfo is our own version of the standard lib's
|
||||||
|
// tls.ClientHelloInfo. As of May 2018, any fields populated
|
||||||
|
// by the Go standard library are not guaranteed to have their
|
||||||
|
// values in the original order as on the wire.
|
||||||
|
type ClientHelloInfo struct {
|
||||||
|
Version uint16 `json:"version,omitempty"`
|
||||||
|
CipherSuites []uint16 `json:"cipher_suites,omitempty"`
|
||||||
|
Extensions []uint16 `json:"extensions,omitempty"`
|
||||||
|
CompressionMethods []byte `json:"compression,omitempty"`
|
||||||
|
Curves []tls.CurveID `json:"curves,omitempty"`
|
||||||
|
Points []uint8 `json:"points,omitempty"`
|
||||||
|
|
||||||
|
// Whether a couple of fields are unknown; if not, the key will encode
|
||||||
|
// differently to reflect that, as opposed to being known empty values.
|
||||||
|
// (some fields may be unknown depending on what package is being used;
|
||||||
|
// i.e. the Go standard lib doesn't expose some things)
|
||||||
|
// (very important to NOT encode these to JSON)
|
||||||
|
ExtensionsUnknown bool `json:"-"`
|
||||||
|
CompressionMethodsUnknown bool `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Key returns a standardized string form of the data in info,
|
||||||
|
// useful for identifying duplicates.
|
||||||
|
func (info ClientHelloInfo) Key() string {
|
||||||
|
extensions, compressionMethods := "?", "?"
|
||||||
|
if !info.ExtensionsUnknown {
|
||||||
|
extensions = fmt.Sprintf("%x", info.Extensions)
|
||||||
|
}
|
||||||
|
if !info.CompressionMethodsUnknown {
|
||||||
|
compressionMethods = fmt.Sprintf("%x", info.CompressionMethods)
|
||||||
|
}
|
||||||
|
return fastHash([]byte(fmt.Sprintf("%x-%x-%s-%s-%x-%x",
|
||||||
|
info.Version, info.CipherSuites, extensions,
|
||||||
|
compressionMethods, info.Curves, info.Points)))
|
||||||
|
}
|
||||||
|
|
||||||
// obtainCertWaitChans is used to coordinate obtaining certs for each hostname.
|
// obtainCertWaitChans is used to coordinate obtaining certs for each hostname.
|
||||||
var obtainCertWaitChans = make(map[string]chan struct{})
|
var obtainCertWaitChans = make(map[string]chan struct{})
|
||||||
var obtainCertWaitChansMu sync.Mutex
|
var obtainCertWaitChansMu sync.Mutex
|
||||||
|
@ -501,3 +535,8 @@ var failedIssuanceMu sync.RWMutex
|
||||||
// If this value is recent, do not make any on-demand certificate requests.
|
// If this value is recent, do not make any on-demand certificate requests.
|
||||||
var lastIssueTime time.Time
|
var lastIssueTime time.Time
|
||||||
var lastIssueTimeMu sync.Mutex
|
var lastIssueTimeMu sync.Mutex
|
||||||
|
|
||||||
|
// ClientHelloTelemetry determines whether to report
|
||||||
|
// TLS ClientHellos to telemetry. Disable if doing
|
||||||
|
// it from a different package.
|
||||||
|
var ClientHelloTelemetry = true
|
||||||
|
|
|
@ -16,6 +16,7 @@ package telemetry
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"log"
|
"log"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
)
|
)
|
||||||
|
@ -117,17 +118,58 @@ func Set(key string, val interface{}) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
bufferMu.Lock()
|
bufferMu.Lock()
|
||||||
if bufferItemCount >= maxBufferItems {
|
|
||||||
bufferMu.Unlock()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if _, ok := buffer[key]; !ok {
|
if _, ok := buffer[key]; !ok {
|
||||||
|
if bufferItemCount >= maxBufferItems {
|
||||||
|
bufferMu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
bufferItemCount++
|
bufferItemCount++
|
||||||
}
|
}
|
||||||
buffer[key] = val
|
buffer[key] = val
|
||||||
bufferMu.Unlock()
|
bufferMu.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetNested puts a value in the buffer to be included
|
||||||
|
// in the next emission, nested under the top-level key
|
||||||
|
// as subkey. It overwrites any previous value.
|
||||||
|
//
|
||||||
|
// This function is safe for multiple goroutines,
|
||||||
|
// and it is recommended to call this using the
|
||||||
|
// go keyword after the call to SendHello so it
|
||||||
|
// doesn't block crucial code.
|
||||||
|
func SetNested(key, subkey string, val interface{}) {
|
||||||
|
if !enabled || isDisabled(key) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
bufferMu.Lock()
|
||||||
|
if topLevel, ok1 := buffer[key]; ok1 {
|
||||||
|
topLevelMap, ok2 := topLevel.(map[string]interface{})
|
||||||
|
if !ok2 {
|
||||||
|
bufferMu.Unlock()
|
||||||
|
log.Printf("[PANIC] Telemetry: key %s is already used for non-nested-map value", key)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if _, ok3 := topLevelMap[subkey]; !ok3 {
|
||||||
|
// don't exceed max buffer size
|
||||||
|
if bufferItemCount >= maxBufferItems {
|
||||||
|
bufferMu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
bufferItemCount++
|
||||||
|
}
|
||||||
|
topLevelMap[subkey] = val
|
||||||
|
} else {
|
||||||
|
// don't exceed max buffer size
|
||||||
|
if bufferItemCount >= maxBufferItems {
|
||||||
|
bufferMu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
bufferItemCount++
|
||||||
|
buffer[key] = map[string]interface{}{subkey: val}
|
||||||
|
}
|
||||||
|
bufferMu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
// Append appends value to a list named key.
|
// Append appends value to a list named key.
|
||||||
// If key is new, a new list will be created.
|
// If key is new, a new list will be created.
|
||||||
// If key maps to a type that is not a list,
|
// If key maps to a type that is not a list,
|
||||||
|
@ -161,7 +203,8 @@ func Append(key string, value interface{}) {
|
||||||
// AppendUnique adds value to a set named key.
|
// AppendUnique adds value to a set named key.
|
||||||
// Set items are unordered. Values in the set
|
// Set items are unordered. Values in the set
|
||||||
// are unique, but how many times they are
|
// are unique, but how many times they are
|
||||||
// appended is counted.
|
// appended is counted. The value must be
|
||||||
|
// hashable.
|
||||||
//
|
//
|
||||||
// If key is new, a new set will be created for
|
// If key is new, a new set will be created for
|
||||||
// values with that key. If key maps to a type
|
// values with that key. If key maps to a type
|
||||||
|
@ -238,8 +281,16 @@ func atomicAdd(key string, amount int) {
|
||||||
// functions should call this and not
|
// functions should call this and not
|
||||||
// save the value if this returns true.
|
// save the value if this returns true.
|
||||||
func isDisabled(key string) bool {
|
func isDisabled(key string) bool {
|
||||||
|
// for keys that are augmented with data, such as
|
||||||
|
// "tls_client_hello_ua:<hash>", just
|
||||||
|
// check the prefix "tls_client_hello_ua"
|
||||||
|
checkKey := key
|
||||||
|
if idx := strings.Index(key, ":"); idx > -1 {
|
||||||
|
checkKey = key[:idx]
|
||||||
|
}
|
||||||
|
|
||||||
disabledMetricsMu.RLock()
|
disabledMetricsMu.RLock()
|
||||||
_, ok := disabledMetrics[key]
|
_, ok := disabledMetrics[checkKey]
|
||||||
disabledMetricsMu.RUnlock()
|
disabledMetricsMu.RUnlock()
|
||||||
return ok
|
return ok
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue