Disable KeepAlive on proxy (#21)

* Service lookup

* Disable KeepAlive to force RR for replias.

* set KeepAlive to 0

* Remove temporary file

* Support white and rename function

* Remove debug from node sample
This commit is contained in:
Alex Ellis 2017-03-21 08:39:36 +00:00 committed by GitHub
parent 9cf4376096
commit 2aeadfda1c
9 changed files with 201 additions and 119 deletions

View File

@ -8,3 +8,8 @@ This is a useful Prometheus query to show:
http://localhost:9090/graph?g0.range_input=15m&g0.expr=gateway_service_count&g0.tab=0&g1.range_input=15m&g1.expr=rate(gateway_function_invocation_total%5B20s%5D)&g1.tab=0&g2.range_input=15m&g2.expr=gateway_functions_seconds_sum+%2F+gateway_functions_seconds_count&g2.tab=0 http://localhost:9090/graph?g0.range_input=15m&g0.expr=gateway_service_count&g0.tab=0&g1.range_input=15m&g1.expr=rate(gateway_function_invocation_total%5B20s%5D)&g1.tab=0&g2.range_input=15m&g2.expr=gateway_functions_seconds_sum+%2F+gateway_functions_seconds_count&g2.tab=0
```
$ docker service ls -q |xargs -n 1 -I {} docker service scale {}=10;docker service scale func_gateway=1 ;
$ docker service scale func_prometheus=1 ; docker service scale func_alertmanager=1
```

View File

@ -12,63 +12,14 @@ import (
"github.com/docker/docker/client" "github.com/docker/docker/client"
) )
// CalculateReplicas decides what replica count to set depending on a Prometheus alert // MakeAlertHandler handles alerts from Prometheus Alertmanager
func CalculateReplicas(status string, currentReplicas uint64) uint64 {
newReplicas := currentReplicas
if status == "firing" {
if currentReplicas == 1 {
newReplicas = 5
} else {
if currentReplicas+5 > 20 {
newReplicas = 20
} else {
newReplicas = currentReplicas + 5
}
}
} else { // Resolved event.
newReplicas = 1
}
return newReplicas
}
func scaleService(req requests.PrometheusAlert, c *client.Client) error {
var err error
//Todo: convert to loop / handler.
serviceName := req.Alerts[0].Labels.FunctionName
service, _, inspectErr := c.ServiceInspectWithRaw(context.Background(), serviceName)
if inspectErr == nil {
currentReplicas := *service.Spec.Mode.Replicated.Replicas
status := req.Status
newReplicas := CalculateReplicas(status, currentReplicas)
if newReplicas == currentReplicas {
return nil
}
log.Printf("Scaling %s to %d replicas.\n", serviceName, newReplicas)
service.Spec.Mode.Replicated.Replicas = &newReplicas
updateOpts := types.ServiceUpdateOptions{}
updateOpts.RegistryAuthFrom = types.RegistryAuthFromSpec
response, updateErr := c.ServiceUpdate(context.Background(), service.ID, service.Version, service.Spec, updateOpts)
if updateErr != nil {
err = updateErr
}
log.Println(response)
} else {
err = inspectErr
}
return err
}
func MakeAlertHandler(c *client.Client) http.HandlerFunc { func MakeAlertHandler(c *client.Client) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) {
log.Println("Alert received.") log.Println("Alert received.")
body, readErr := ioutil.ReadAll(r.Body) body, readErr := ioutil.ReadAll(r.Body)
log.Println(string(body))
if readErr != nil { if readErr != nil {
log.Println(readErr) log.Println(readErr)
return return
@ -92,3 +43,58 @@ func MakeAlertHandler(c *client.Client) http.HandlerFunc {
} }
} }
} }
// CalculateReplicas decides what replica count to set depending on a Prometheus alert
func CalculateReplicas(status string, currentReplicas uint64) uint64 {
newReplicas := currentReplicas
if status == "firing" {
if currentReplicas == 1 {
newReplicas = 5
} else {
if currentReplicas+5 > 20 {
newReplicas = 20
} else {
newReplicas = currentReplicas + 5
}
}
} else { // Resolved event.
newReplicas = 1
}
return newReplicas
}
func scaleService(req requests.PrometheusAlert, c *client.Client) error {
var err error
serviceName := req.Alerts[0].Labels.FunctionName
if len(serviceName) > 0 {
service, _, inspectErr := c.ServiceInspectWithRaw(context.Background(), serviceName)
if inspectErr == nil {
currentReplicas := *service.Spec.Mode.Replicated.Replicas
status := req.Status
newReplicas := CalculateReplicas(status, currentReplicas)
if newReplicas == currentReplicas {
return nil
}
log.Printf("Scaling %s to %d replicas.\n", serviceName, newReplicas)
service.Spec.Mode.Replicated.Replicas = &newReplicas
updateOpts := types.ServiceUpdateOptions{}
updateOpts.RegistryAuthFrom = types.RegistryAuthFromSpec
response, updateErr := c.ServiceUpdate(context.Background(), service.ID, service.Version, service.Spec, updateOpts)
if updateErr != nil {
err = updateErr
}
log.Println(response)
} else {
err = inspectErr
}
}
return err
}

View File

@ -5,6 +5,7 @@ import (
"context" "context"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"net"
"net/http" "net/http"
"strconv" "strconv"
"time" "time"
@ -20,6 +21,20 @@ import (
// MakeProxy creates a proxy for HTTP web requests which can be routed to a function. // MakeProxy creates a proxy for HTTP web requests which can be routed to a function.
func MakeProxy(metrics metrics.MetricOptions, wildcard bool, c *client.Client, logger *logrus.Logger) http.HandlerFunc { func MakeProxy(metrics metrics.MetricOptions, wildcard bool, c *client.Client, logger *logrus.Logger) http.HandlerFunc {
proxyClient := http.Client{
Transport: &http.Transport{
Proxy: http.ProxyFromEnvironment,
DialContext: (&net.Dialer{
Timeout: 3 * time.Second,
KeepAlive: 0,
}).DialContext,
MaxIdleConns: 1,
DisableKeepAlives: true,
IdleConnTimeout: 120 * time.Millisecond,
ExpectContinueTimeout: 1500 * time.Millisecond,
},
}
return func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) {
if r.Method == "POST" { if r.Method == "POST" {
@ -31,11 +46,11 @@ func MakeProxy(metrics metrics.MetricOptions, wildcard bool, c *client.Client, l
vars := mux.Vars(r) vars := mux.Vars(r)
name := vars["name"] name := vars["name"]
fmt.Println("invoke by name") fmt.Println("invoke by name")
lookupInvoke(w, r, metrics, name, c, logger) lookupInvoke(w, r, metrics, name, c, logger, &proxyClient)
defer r.Body.Close() defer r.Body.Close()
} else if len(header) > 0 { } else if len(header) > 0 {
lookupInvoke(w, r, metrics, header[0], c, logger) lookupInvoke(w, r, metrics, header[0], c, logger, &proxyClient)
defer r.Body.Close() defer r.Body.Close()
} else { } else {
w.WriteHeader(http.StatusBadRequest) w.WriteHeader(http.StatusBadRequest)
@ -59,7 +74,7 @@ func trackTime(then time.Time, metrics metrics.MetricOptions, name string) {
metrics.GatewayFunctionsHistogram.WithLabelValues(name).Observe(since.Seconds()) metrics.GatewayFunctionsHistogram.WithLabelValues(name).Observe(since.Seconds())
} }
func lookupInvoke(w http.ResponseWriter, r *http.Request, metrics metrics.MetricOptions, name string, c *client.Client, logger *logrus.Logger) { func lookupInvoke(w http.ResponseWriter, r *http.Request, metrics metrics.MetricOptions, name string, c *client.Client, logger *logrus.Logger, proxyClient *http.Client) {
exists, err := lookupSwarmService(name, c) exists, err := lookupSwarmService(name, c)
if err != nil || exists == false { if err != nil || exists == false {
@ -75,7 +90,7 @@ func lookupInvoke(w http.ResponseWriter, r *http.Request, metrics metrics.Metric
if exists { if exists {
defer trackTime(time.Now(), metrics, name) defer trackTime(time.Now(), metrics, name)
requestBody, _ := ioutil.ReadAll(r.Body) requestBody, _ := ioutil.ReadAll(r.Body)
invokeService(w, r, metrics, name, requestBody, logger) invokeService(w, r, metrics, name, requestBody, logger, proxyClient)
} }
} }
@ -88,7 +103,7 @@ func lookupSwarmService(serviceName string, c *client.Client) (bool, error) {
return len(services) > 0, err return len(services) > 0, err
} }
func invokeService(w http.ResponseWriter, r *http.Request, metrics metrics.MetricOptions, service string, requestBody []byte, logger *logrus.Logger) { func invokeService(w http.ResponseWriter, r *http.Request, metrics metrics.MetricOptions, service string, requestBody []byte, logger *logrus.Logger, proxyClient *http.Client) {
stamp := strconv.FormatInt(time.Now().Unix(), 10) stamp := strconv.FormatInt(time.Now().Unix(), 10)
defer func(when time.Time) { defer func(when time.Time) {
@ -98,9 +113,15 @@ func invokeService(w http.ResponseWriter, r *http.Request, metrics metrics.Metri
metrics.GatewayFunctionsHistogram.WithLabelValues(service).Observe(seconds) metrics.GatewayFunctionsHistogram.WithLabelValues(service).Observe(seconds)
}(time.Now()) }(time.Now())
// start := time.Now() watchdogPort := 8080
buf := bytes.NewBuffer(requestBody) addr, lookupErr := net.LookupIP(service)
url := "http://" + service + ":" + strconv.Itoa(8080) + "/" var url string
if len(addr) > 0 && lookupErr == nil {
url = fmt.Sprintf("http://%s:%d/", addr[0].String(), watchdogPort)
} else {
url = fmt.Sprintf("http://%s:%d/", service, watchdogPort)
}
contentType := r.Header.Get("Content-Type") contentType := r.Header.Get("Content-Type")
if len(contentType) == 0 { if len(contentType) == 0 {
contentType = "text/plain" contentType = "text/plain"
@ -108,7 +129,11 @@ func invokeService(w http.ResponseWriter, r *http.Request, metrics metrics.Metri
fmt.Printf("[%s] Forwarding request [%s] to: %s\n", stamp, contentType, url) fmt.Printf("[%s] Forwarding request [%s] to: %s\n", stamp, contentType, url)
response, err := http.Post(url, r.Header.Get("Content-Type"), buf) request, err := http.NewRequest("POST", url, bytes.NewReader(requestBody))
request.Header.Add("Content-Type", contentType)
defer request.Body.Close()
response, err := proxyClient.Do(request)
if err != nil { if err != nil {
logger.Infoln(err) logger.Infoln(err)
writeHead(service, metrics, http.StatusInternalServerError, w) writeHead(service, metrics, http.StatusInternalServerError, w)

View File

@ -1,47 +1,47 @@
{ {
"receiver":"scale-up", "receiver": "scale-up",
"status":"firing", "status": "firing",
"alerts":[ "alerts": [{
{ "status": "firing",
"status":"firing", "labels": {
"labels":{ "alertname": "APIHighInvocationRate",
"alertname":"APIHighInvocationRate", "code": "200",
"function_name":"func_echoit", "function_name": "func_nodeinfo",
"instance":"gateway:8080", "instance": "gateway:8080",
"job":"gateway", "job": "gateway",
"monitor":"faas-monitor", "monitor": "faas-monitor",
"service":"gateway", "service": "gateway",
"severity":"major", "severity": "major",
"value":"8" "value": "8.998200359928017"
}, },
"annotations":{ "annotations": {
"description":"High invocation total on gateway:8080", "description": "High invocation total on gateway:8080",
"summary":"High invocation total on gateway:8080" "summary": "High invocation total on gateway:8080"
}, },
"startsAt":"2017-01-22T10:40:52.804Z", "startsAt": "2017-03-15T15:52:57.805Z",
"endsAt":"0001-01-01T00:00:00Z", "endsAt": "0001-01-01T00:00:00Z",
"generatorURL":"http://bb1b23e87070:9090/graph?g0.expr=rate%28gateway_function_invocation_total%5B10s%5D%29+%3E+5\u0026g0.tab=0" "generatorURL": "http://4156cb797423:9090/graph?g0.expr=rate%28gateway_function_invocation_total%5B10s%5D%29+%3E+5\u0026g0.tab=0"
} }],
], "groupLabels": {
"groupLabels":{ "alertname": "APIHighInvocationRate",
"alertname":"APIHighInvocationRate", "service": "gateway"
"service":"gateway" },
}, "commonLabels": {
"commonLabels":{ "alertname": "APIHighInvocationRate",
"alertname":"APIHighInvocationRate", "code": "200",
"function_name":"func_echoit", "function_name": "func_nodeinfo",
"instance":"gateway:8080", "instance": "gateway:8080",
"job":"gateway", "job": "gateway",
"monitor":"faas-monitor", "monitor": "faas-monitor",
"service":"gateway", "service": "gateway",
"severity":"major", "severity": "major",
"value":"8" "value": "8.998200359928017"
}, },
"commonAnnotations":{ "commonAnnotations": {
"description":"High invocation total on gateway:8080", "description": "High invocation total on gateway:8080",
"summary":"High invocation total on gateway:8080" "summary": "High invocation total on gateway:8080"
}, },
"externalURL":"http://c052c835bcee:9093", "externalURL": "http://f054879d97db:9093",
"version":"3", "version": "3",
"groupKey":18195285354214864953 "groupKey": 18195285354214864953
} }

View File

@ -0,0 +1,47 @@
{
"receiver": "scale-up",
"status": "resolved",
"alerts": [{
"status": "resolved",
"labels": {
"alertname": "APIHighInvocationRate",
"code": "200",
"function_name": "func_nodeinfo",
"instance": "gateway:8080",
"job": "gateway",
"monitor": "faas-monitor",
"service": "gateway",
"severity": "major",
"value": "8.998200359928017"
},
"annotations": {
"description": "High invocation total on gateway:8080",
"summary": "High invocation total on gateway:8080"
},
"startsAt": "2017-03-15T15:52:57.805Z",
"endsAt": "2017-03-15T15:53:52.806Z",
"generatorURL": "http://4156cb797423:9090/graph?g0.expr=rate%28gateway_function_invocation_total%5B10s%5D%29+%3E+5\u0026g0.tab=0"
}],
"groupLabels": {
"alertname": "APIHighInvocationRate",
"service": "gateway"
},
"commonLabels": {
"alertname": "APIHighInvocationRate",
"code": "200",
"function_name": "func_nodeinfo",
"instance": "gateway:8080",
"job": "gateway",
"monitor": "faas-monitor",
"service": "gateway",
"severity": "major",
"value": "8.998200359928017"
},
"commonAnnotations": {
"description": "High invocation total on gateway:8080",
"summary": "High invocation total on gateway:8080"
},
"externalURL": "http://f054879d97db:9093",
"version": "3",
"groupKey": 18195285354214864953
}

Binary file not shown.

View File

@ -6,11 +6,11 @@ ADD https://github.com/alexellis/faas/releases/download/v0.3-alpha/fwatchdog /us
RUN chmod +x /usr/bin/fwatchdog RUN chmod +x /usr/bin/fwatchdog
COPY package.json . COPY package.json .
RUN npm i
COPY handler.js . COPY handler.js .
COPY sendColor.js . COPY sendColor.js .
COPY sample_response.json . COPY sample_response.json .
RUN npm i
ENV fprocess="node handler.js" ENV fprocess="node handler.js"
CMD ["fwatchdog"] CMD ["fwatchdog"]

View File

@ -1,4 +1,3 @@
#!/bin/bash #!/bin/sh
docker build -t alexellis2/faas-alexachangecolorintent:latest-dev1 .
docker build -t functions/alexa-leds:latest .

View File

@ -12,7 +12,6 @@ getStdin().then(content => {
}); });
function tellWithCard(speechOutput, request) { function tellWithCard(speechOutput, request) {
// console.log(sample)
sample.response.session = request.session; sample.response.session = request.session;
sample.response.outputSpeech.text = speechOutput; sample.response.outputSpeech.text = speechOutput;
sample.response.card.content = speechOutput; sample.response.card.content = speechOutput;
@ -23,8 +22,6 @@ function tellWithCard(speechOutput, request) {
} }
function handle(request, intent) { function handle(request, intent) {
// console.log("Intent: " + intent.name);
if(intent.name == "TurnOffIntent") { if(intent.name == "TurnOffIntent") {
let req = {r:0,g:0,b:0}; let req = {r:0,g:0,b:0};
var speechOutput = "Lights off."; var speechOutput = "Lights off.";
@ -40,12 +37,15 @@ function handle(request, intent) {
req.b = 255; req.b = 255;
} else if (colorRequested == "green") { } else if (colorRequested == "green") {
req.g = 255; req.g = 255;
} else if (colorRequested == "white") {
req.r = 255;
req.g = 103;
req.b = 23;
} else {
let msg = "I heard "+colorRequested+ " but can only show: red, green, blue and white.";
return tellWithCard(msg, request);
} }
else {
return tellWithCard("I heard "+colorRequested+
" but can only do: red, green, blue.",
"I heard "+colorRequested+ " but can only do: red, green, blue.", request);
}
sendColor.sendColor(req, () => { sendColor.sendColor(req, () => {
var speechOutput = "OK, " + colorRequested + "."; var speechOutput = "OK, " + colorRequested + ".";
return tellWithCard(speechOutput, request); return tellWithCard(speechOutput, request);