mirror of
https://github.com/openfaas/faas.git
synced 2025-06-09 00:36:46 +00:00
Disable KeepAlive on proxy (#21)
* Service lookup * Disable KeepAlive to force RR for replias. * set KeepAlive to 0 * Remove temporary file * Support white and rename function * Remove debug from node sample
This commit is contained in:
parent
9cf4376096
commit
2aeadfda1c
5
debug.md
5
debug.md
@ -8,3 +8,8 @@ This is a useful Prometheus query to show:
|
||||
|
||||
http://localhost:9090/graph?g0.range_input=15m&g0.expr=gateway_service_count&g0.tab=0&g1.range_input=15m&g1.expr=rate(gateway_function_invocation_total%5B20s%5D)&g1.tab=0&g2.range_input=15m&g2.expr=gateway_functions_seconds_sum+%2F+gateway_functions_seconds_count&g2.tab=0
|
||||
|
||||
|
||||
```
|
||||
$ docker service ls -q |xargs -n 1 -I {} docker service scale {}=10;docker service scale func_gateway=1 ;
|
||||
$ docker service scale func_prometheus=1 ; docker service scale func_alertmanager=1
|
||||
```
|
||||
|
@ -12,63 +12,14 @@ import (
|
||||
"github.com/docker/docker/client"
|
||||
)
|
||||
|
||||
// CalculateReplicas decides what replica count to set depending on a Prometheus alert
|
||||
func CalculateReplicas(status string, currentReplicas uint64) uint64 {
|
||||
newReplicas := currentReplicas
|
||||
|
||||
if status == "firing" {
|
||||
if currentReplicas == 1 {
|
||||
newReplicas = 5
|
||||
} else {
|
||||
if currentReplicas+5 > 20 {
|
||||
newReplicas = 20
|
||||
} else {
|
||||
newReplicas = currentReplicas + 5
|
||||
}
|
||||
}
|
||||
} else { // Resolved event.
|
||||
newReplicas = 1
|
||||
}
|
||||
return newReplicas
|
||||
}
|
||||
|
||||
func scaleService(req requests.PrometheusAlert, c *client.Client) error {
|
||||
var err error
|
||||
//Todo: convert to loop / handler.
|
||||
serviceName := req.Alerts[0].Labels.FunctionName
|
||||
service, _, inspectErr := c.ServiceInspectWithRaw(context.Background(), serviceName)
|
||||
if inspectErr == nil {
|
||||
|
||||
currentReplicas := *service.Spec.Mode.Replicated.Replicas
|
||||
status := req.Status
|
||||
newReplicas := CalculateReplicas(status, currentReplicas)
|
||||
|
||||
if newReplicas == currentReplicas {
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Printf("Scaling %s to %d replicas.\n", serviceName, newReplicas)
|
||||
service.Spec.Mode.Replicated.Replicas = &newReplicas
|
||||
updateOpts := types.ServiceUpdateOptions{}
|
||||
updateOpts.RegistryAuthFrom = types.RegistryAuthFromSpec
|
||||
|
||||
response, updateErr := c.ServiceUpdate(context.Background(), service.ID, service.Version, service.Spec, updateOpts)
|
||||
if updateErr != nil {
|
||||
err = updateErr
|
||||
}
|
||||
log.Println(response)
|
||||
|
||||
} else {
|
||||
err = inspectErr
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// MakeAlertHandler handles alerts from Prometheus Alertmanager
|
||||
func MakeAlertHandler(c *client.Client) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
log.Println("Alert received.")
|
||||
body, readErr := ioutil.ReadAll(r.Body)
|
||||
|
||||
log.Println(string(body))
|
||||
|
||||
if readErr != nil {
|
||||
log.Println(readErr)
|
||||
return
|
||||
@ -92,3 +43,58 @@ func MakeAlertHandler(c *client.Client) http.HandlerFunc {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// CalculateReplicas decides what replica count to set depending on a Prometheus alert
|
||||
func CalculateReplicas(status string, currentReplicas uint64) uint64 {
|
||||
newReplicas := currentReplicas
|
||||
|
||||
if status == "firing" {
|
||||
if currentReplicas == 1 {
|
||||
newReplicas = 5
|
||||
} else {
|
||||
if currentReplicas+5 > 20 {
|
||||
newReplicas = 20
|
||||
} else {
|
||||
newReplicas = currentReplicas + 5
|
||||
}
|
||||
}
|
||||
} else { // Resolved event.
|
||||
newReplicas = 1
|
||||
}
|
||||
return newReplicas
|
||||
}
|
||||
|
||||
func scaleService(req requests.PrometheusAlert, c *client.Client) error {
|
||||
var err error
|
||||
serviceName := req.Alerts[0].Labels.FunctionName
|
||||
|
||||
if len(serviceName) > 0 {
|
||||
|
||||
service, _, inspectErr := c.ServiceInspectWithRaw(context.Background(), serviceName)
|
||||
if inspectErr == nil {
|
||||
|
||||
currentReplicas := *service.Spec.Mode.Replicated.Replicas
|
||||
status := req.Status
|
||||
newReplicas := CalculateReplicas(status, currentReplicas)
|
||||
|
||||
if newReplicas == currentReplicas {
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Printf("Scaling %s to %d replicas.\n", serviceName, newReplicas)
|
||||
service.Spec.Mode.Replicated.Replicas = &newReplicas
|
||||
updateOpts := types.ServiceUpdateOptions{}
|
||||
updateOpts.RegistryAuthFrom = types.RegistryAuthFromSpec
|
||||
|
||||
response, updateErr := c.ServiceUpdate(context.Background(), service.ID, service.Version, service.Spec, updateOpts)
|
||||
if updateErr != nil {
|
||||
err = updateErr
|
||||
}
|
||||
log.Println(response)
|
||||
|
||||
} else {
|
||||
err = inspectErr
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
@ -20,6 +21,20 @@ import (
|
||||
|
||||
// MakeProxy creates a proxy for HTTP web requests which can be routed to a function.
|
||||
func MakeProxy(metrics metrics.MetricOptions, wildcard bool, c *client.Client, logger *logrus.Logger) http.HandlerFunc {
|
||||
proxyClient := http.Client{
|
||||
Transport: &http.Transport{
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
DialContext: (&net.Dialer{
|
||||
Timeout: 3 * time.Second,
|
||||
KeepAlive: 0,
|
||||
}).DialContext,
|
||||
MaxIdleConns: 1,
|
||||
DisableKeepAlives: true,
|
||||
IdleConnTimeout: 120 * time.Millisecond,
|
||||
ExpectContinueTimeout: 1500 * time.Millisecond,
|
||||
},
|
||||
}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if r.Method == "POST" {
|
||||
@ -31,11 +46,11 @@ func MakeProxy(metrics metrics.MetricOptions, wildcard bool, c *client.Client, l
|
||||
vars := mux.Vars(r)
|
||||
name := vars["name"]
|
||||
fmt.Println("invoke by name")
|
||||
lookupInvoke(w, r, metrics, name, c, logger)
|
||||
lookupInvoke(w, r, metrics, name, c, logger, &proxyClient)
|
||||
defer r.Body.Close()
|
||||
|
||||
} else if len(header) > 0 {
|
||||
lookupInvoke(w, r, metrics, header[0], c, logger)
|
||||
lookupInvoke(w, r, metrics, header[0], c, logger, &proxyClient)
|
||||
defer r.Body.Close()
|
||||
} else {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
@ -59,7 +74,7 @@ func trackTime(then time.Time, metrics metrics.MetricOptions, name string) {
|
||||
metrics.GatewayFunctionsHistogram.WithLabelValues(name).Observe(since.Seconds())
|
||||
}
|
||||
|
||||
func lookupInvoke(w http.ResponseWriter, r *http.Request, metrics metrics.MetricOptions, name string, c *client.Client, logger *logrus.Logger) {
|
||||
func lookupInvoke(w http.ResponseWriter, r *http.Request, metrics metrics.MetricOptions, name string, c *client.Client, logger *logrus.Logger, proxyClient *http.Client) {
|
||||
exists, err := lookupSwarmService(name, c)
|
||||
|
||||
if err != nil || exists == false {
|
||||
@ -75,7 +90,7 @@ func lookupInvoke(w http.ResponseWriter, r *http.Request, metrics metrics.Metric
|
||||
if exists {
|
||||
defer trackTime(time.Now(), metrics, name)
|
||||
requestBody, _ := ioutil.ReadAll(r.Body)
|
||||
invokeService(w, r, metrics, name, requestBody, logger)
|
||||
invokeService(w, r, metrics, name, requestBody, logger, proxyClient)
|
||||
}
|
||||
}
|
||||
|
||||
@ -88,7 +103,7 @@ func lookupSwarmService(serviceName string, c *client.Client) (bool, error) {
|
||||
return len(services) > 0, err
|
||||
}
|
||||
|
||||
func invokeService(w http.ResponseWriter, r *http.Request, metrics metrics.MetricOptions, service string, requestBody []byte, logger *logrus.Logger) {
|
||||
func invokeService(w http.ResponseWriter, r *http.Request, metrics metrics.MetricOptions, service string, requestBody []byte, logger *logrus.Logger, proxyClient *http.Client) {
|
||||
stamp := strconv.FormatInt(time.Now().Unix(), 10)
|
||||
|
||||
defer func(when time.Time) {
|
||||
@ -98,9 +113,15 @@ func invokeService(w http.ResponseWriter, r *http.Request, metrics metrics.Metri
|
||||
metrics.GatewayFunctionsHistogram.WithLabelValues(service).Observe(seconds)
|
||||
}(time.Now())
|
||||
|
||||
// start := time.Now()
|
||||
buf := bytes.NewBuffer(requestBody)
|
||||
url := "http://" + service + ":" + strconv.Itoa(8080) + "/"
|
||||
watchdogPort := 8080
|
||||
addr, lookupErr := net.LookupIP(service)
|
||||
var url string
|
||||
if len(addr) > 0 && lookupErr == nil {
|
||||
url = fmt.Sprintf("http://%s:%d/", addr[0].String(), watchdogPort)
|
||||
} else {
|
||||
url = fmt.Sprintf("http://%s:%d/", service, watchdogPort)
|
||||
}
|
||||
|
||||
contentType := r.Header.Get("Content-Type")
|
||||
if len(contentType) == 0 {
|
||||
contentType = "text/plain"
|
||||
@ -108,7 +129,11 @@ func invokeService(w http.ResponseWriter, r *http.Request, metrics metrics.Metri
|
||||
|
||||
fmt.Printf("[%s] Forwarding request [%s] to: %s\n", stamp, contentType, url)
|
||||
|
||||
response, err := http.Post(url, r.Header.Get("Content-Type"), buf)
|
||||
request, err := http.NewRequest("POST", url, bytes.NewReader(requestBody))
|
||||
request.Header.Add("Content-Type", contentType)
|
||||
defer request.Body.Close()
|
||||
|
||||
response, err := proxyClient.Do(request)
|
||||
if err != nil {
|
||||
logger.Infoln(err)
|
||||
writeHead(service, metrics, http.StatusInternalServerError, w)
|
||||
|
@ -1,47 +1,47 @@
|
||||
{
|
||||
"receiver":"scale-up",
|
||||
"status":"firing",
|
||||
"alerts":[
|
||||
{
|
||||
"status":"firing",
|
||||
"labels":{
|
||||
"alertname":"APIHighInvocationRate",
|
||||
"function_name":"func_echoit",
|
||||
"instance":"gateway:8080",
|
||||
"job":"gateway",
|
||||
"monitor":"faas-monitor",
|
||||
"service":"gateway",
|
||||
"severity":"major",
|
||||
"value":"8"
|
||||
},
|
||||
"annotations":{
|
||||
"description":"High invocation total on gateway:8080",
|
||||
"summary":"High invocation total on gateway:8080"
|
||||
},
|
||||
"startsAt":"2017-01-22T10:40:52.804Z",
|
||||
"endsAt":"0001-01-01T00:00:00Z",
|
||||
"generatorURL":"http://bb1b23e87070:9090/graph?g0.expr=rate%28gateway_function_invocation_total%5B10s%5D%29+%3E+5\u0026g0.tab=0"
|
||||
}
|
||||
],
|
||||
"groupLabels":{
|
||||
"alertname":"APIHighInvocationRate",
|
||||
"service":"gateway"
|
||||
},
|
||||
"commonLabels":{
|
||||
"alertname":"APIHighInvocationRate",
|
||||
"function_name":"func_echoit",
|
||||
"instance":"gateway:8080",
|
||||
"job":"gateway",
|
||||
"monitor":"faas-monitor",
|
||||
"service":"gateway",
|
||||
"severity":"major",
|
||||
"value":"8"
|
||||
},
|
||||
"commonAnnotations":{
|
||||
"description":"High invocation total on gateway:8080",
|
||||
"summary":"High invocation total on gateway:8080"
|
||||
},
|
||||
"externalURL":"http://c052c835bcee:9093",
|
||||
"version":"3",
|
||||
"groupKey":18195285354214864953
|
||||
"receiver": "scale-up",
|
||||
"status": "firing",
|
||||
"alerts": [{
|
||||
"status": "firing",
|
||||
"labels": {
|
||||
"alertname": "APIHighInvocationRate",
|
||||
"code": "200",
|
||||
"function_name": "func_nodeinfo",
|
||||
"instance": "gateway:8080",
|
||||
"job": "gateway",
|
||||
"monitor": "faas-monitor",
|
||||
"service": "gateway",
|
||||
"severity": "major",
|
||||
"value": "8.998200359928017"
|
||||
},
|
||||
"annotations": {
|
||||
"description": "High invocation total on gateway:8080",
|
||||
"summary": "High invocation total on gateway:8080"
|
||||
},
|
||||
"startsAt": "2017-03-15T15:52:57.805Z",
|
||||
"endsAt": "0001-01-01T00:00:00Z",
|
||||
"generatorURL": "http://4156cb797423:9090/graph?g0.expr=rate%28gateway_function_invocation_total%5B10s%5D%29+%3E+5\u0026g0.tab=0"
|
||||
}],
|
||||
"groupLabels": {
|
||||
"alertname": "APIHighInvocationRate",
|
||||
"service": "gateway"
|
||||
},
|
||||
"commonLabels": {
|
||||
"alertname": "APIHighInvocationRate",
|
||||
"code": "200",
|
||||
"function_name": "func_nodeinfo",
|
||||
"instance": "gateway:8080",
|
||||
"job": "gateway",
|
||||
"monitor": "faas-monitor",
|
||||
"service": "gateway",
|
||||
"severity": "major",
|
||||
"value": "8.998200359928017"
|
||||
},
|
||||
"commonAnnotations": {
|
||||
"description": "High invocation total on gateway:8080",
|
||||
"summary": "High invocation total on gateway:8080"
|
||||
},
|
||||
"externalURL": "http://f054879d97db:9093",
|
||||
"version": "3",
|
||||
"groupKey": 18195285354214864953
|
||||
}
|
47
gateway/tests/test_resolved_alert.json
Normal file
47
gateway/tests/test_resolved_alert.json
Normal file
@ -0,0 +1,47 @@
|
||||
{
|
||||
"receiver": "scale-up",
|
||||
"status": "resolved",
|
||||
"alerts": [{
|
||||
"status": "resolved",
|
||||
"labels": {
|
||||
"alertname": "APIHighInvocationRate",
|
||||
"code": "200",
|
||||
"function_name": "func_nodeinfo",
|
||||
"instance": "gateway:8080",
|
||||
"job": "gateway",
|
||||
"monitor": "faas-monitor",
|
||||
"service": "gateway",
|
||||
"severity": "major",
|
||||
"value": "8.998200359928017"
|
||||
},
|
||||
"annotations": {
|
||||
"description": "High invocation total on gateway:8080",
|
||||
"summary": "High invocation total on gateway:8080"
|
||||
},
|
||||
"startsAt": "2017-03-15T15:52:57.805Z",
|
||||
"endsAt": "2017-03-15T15:53:52.806Z",
|
||||
"generatorURL": "http://4156cb797423:9090/graph?g0.expr=rate%28gateway_function_invocation_total%5B10s%5D%29+%3E+5\u0026g0.tab=0"
|
||||
}],
|
||||
"groupLabels": {
|
||||
"alertname": "APIHighInvocationRate",
|
||||
"service": "gateway"
|
||||
},
|
||||
"commonLabels": {
|
||||
"alertname": "APIHighInvocationRate",
|
||||
"code": "200",
|
||||
"function_name": "func_nodeinfo",
|
||||
"instance": "gateway:8080",
|
||||
"job": "gateway",
|
||||
"monitor": "faas-monitor",
|
||||
"service": "gateway",
|
||||
"severity": "major",
|
||||
"value": "8.998200359928017"
|
||||
},
|
||||
"commonAnnotations": {
|
||||
"description": "High invocation total on gateway:8080",
|
||||
"summary": "High invocation total on gateway:8080"
|
||||
},
|
||||
"externalURL": "http://f054879d97db:9093",
|
||||
"version": "3",
|
||||
"groupKey": 18195285354214864953
|
||||
}
|
Binary file not shown.
@ -6,11 +6,11 @@ ADD https://github.com/alexellis/faas/releases/download/v0.3-alpha/fwatchdog /us
|
||||
RUN chmod +x /usr/bin/fwatchdog
|
||||
|
||||
COPY package.json .
|
||||
RUN npm i
|
||||
COPY handler.js .
|
||||
COPY sendColor.js .
|
||||
COPY sample_response.json .
|
||||
|
||||
RUN npm i
|
||||
|
||||
ENV fprocess="node handler.js"
|
||||
CMD ["fwatchdog"]
|
||||
|
@ -1,4 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
docker build -t alexellis2/faas-alexachangecolorintent:latest-dev1 .
|
||||
#!/bin/sh
|
||||
|
||||
docker build -t functions/alexa-leds:latest .
|
||||
|
@ -12,7 +12,6 @@ getStdin().then(content => {
|
||||
});
|
||||
|
||||
function tellWithCard(speechOutput, request) {
|
||||
// console.log(sample)
|
||||
sample.response.session = request.session;
|
||||
sample.response.outputSpeech.text = speechOutput;
|
||||
sample.response.card.content = speechOutput;
|
||||
@ -23,8 +22,6 @@ function tellWithCard(speechOutput, request) {
|
||||
}
|
||||
|
||||
function handle(request, intent) {
|
||||
// console.log("Intent: " + intent.name);
|
||||
|
||||
if(intent.name == "TurnOffIntent") {
|
||||
let req = {r:0,g:0,b:0};
|
||||
var speechOutput = "Lights off.";
|
||||
@ -40,12 +37,15 @@ function handle(request, intent) {
|
||||
req.b = 255;
|
||||
} else if (colorRequested == "green") {
|
||||
req.g = 255;
|
||||
} else if (colorRequested == "white") {
|
||||
req.r = 255;
|
||||
req.g = 103;
|
||||
req.b = 23;
|
||||
} else {
|
||||
let msg = "I heard "+colorRequested+ " but can only show: red, green, blue and white.";
|
||||
return tellWithCard(msg, request);
|
||||
}
|
||||
else {
|
||||
return tellWithCard("I heard "+colorRequested+
|
||||
" but can only do: red, green, blue.",
|
||||
"I heard "+colorRequested+ " but can only do: red, green, blue.", request);
|
||||
}
|
||||
|
||||
sendColor.sendColor(req, () => {
|
||||
var speechOutput = "OK, " + colorRequested + ".";
|
||||
return tellWithCard(speechOutput, request);
|
||||
|
Loading…
x
Reference in New Issue
Block a user