Skip to content

Commit

Permalink
add global rate limiting support
Browse files Browse the repository at this point in the history
Closes projectcontour#370.

Signed-off-by: Steve Kriss <krisss@vmware.com>
  • Loading branch information
skriss committed Feb 9, 2021
1 parent 6180966 commit e94b1f7
Show file tree
Hide file tree
Showing 29 changed files with 2,744 additions and 94 deletions.
1 change: 1 addition & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -322,6 +322,7 @@ integration: ## Run integration tests against a real k8s cluster
./_integration/testsuite/install-service-apis.sh
./_integration/testsuite/install-contour-working.sh
./_integration/testsuite/install-fallback-certificate.sh
./_integration/testsuite/install-ratelimit-service.sh
./_integration/testsuite/run-test-case.sh ./_integration/testsuite/httpproxy/*.yaml
./_integration/testsuite/cleanup.sh

Expand Down
30 changes: 15 additions & 15 deletions _integration/testsuite/httpproxy/019-local-rate-limiting.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,21 @@ $apply:

---

# Wait for the service to have endpoints before trying to make
# a request.

import data.contour.resources

error_endpoints_not_ready[msg] {
ep := resources.get("endpoints", "echo")

not ep.subsets[0].addresses

msg := "endpoints for svc/ingress-conformance-echo are not ready"
}

---

# This proxy has a local rate limit on the virtual host.
apiVersion: projectcontour.io/v1
kind: HTTPProxy
Expand All @@ -55,21 +70,6 @@ spec:
port: 80
---

# Wait for the service to have endpoints before trying to make
# a request.

import data.contour.resources

error_endpoints_not_ready[msg] {
ep := resources.get("endpoints", "echo")

not ep.subsets[0].addresses

msg := "endpoints for svc/ingress-conformance-echo are not ready"
}

---

# Make a request against the proxy, confirm a 200 response
# is returned.

Expand Down
203 changes: 203 additions & 0 deletions _integration/testsuite/httpproxy/020-global-rate-limiting.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,203 @@
# Copyright Project Contour Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.

# This check depends on the `--watch=endpoints` argument being given
# to integration-tester.

---

apiVersion: apps/v1
kind: Deployment
metadata:
name: ingress-conformance-echo
$apply:
fixture:
as: echo

---

apiVersion: v1
kind: Service
metadata:
name: ingress-conformance-echo
$apply:
fixture:
as: echo

---

# Wait for the service to have endpoints.

import data.contour.resources

error_endpoints_not_ready[msg] {
ep := resources.get("endpoints", "echo")

not ep.subsets[0].addresses

msg := "endpoints for svc/ingress-conformance-echo are not ready"
}

---

# This proxy has a global rate limit on the virtual host.
apiVersion: projectcontour.io/v1
kind: HTTPProxy
metadata:
name: vhostratelimit
spec:
virtualhost:
fqdn: vhostratelimit.projectcontour.io
rateLimitPolicy:
global:
descriptors:
- entries:
- genericKey:
value: vhostlimit
routes:
- services:
- name: echo
port: 80
---

# Make a request against the proxy, confirm a 200 response
# is returned.

import data.contour.http.client
import data.contour.http.client.url
import data.contour.http.expect

Response := client.Get({
"url": url.http("/"),
"headers": {
"Host": "vhostratelimit.projectcontour.io",
"User-Agent": client.ua("global-rate-limit"),
},
})

check_for_status_code [msg] {
msg := expect.response_status_is(Response, 200)
}

---

# Make another request against the proxy, confirm a 429
# response is now gotten since we've exceeded the rate
# limit.

import data.contour.http.client
import data.contour.http.client.url
import data.contour.http.expect

Response := client.Get({
"url": url.http("/"),
"headers": {
"Host": "vhostratelimit.projectcontour.io",
"User-Agent": client.ua("global-rate-limit"),
},
})

check_for_status_code [msg] {
msg := expect.response_status_is(Response, 429)
}

---

# This proxy has a global rate limit on a route.
apiVersion: projectcontour.io/v1
kind: HTTPProxy
metadata:
name: routeratelimit
spec:
virtualhost:
fqdn: routeratelimit.projectcontour.io
routes:
- services:
- name: echo
port: 80
rateLimitPolicy:
global:
descriptors:
- entries:
- genericKey:
value: routelimit
- conditions:
- prefix: /unlimited
services:
- name: echo
port: 80
---

# Make a request against the proxy, confirm a 200 response
# is returned.

import data.contour.http.client
import data.contour.http.client.url
import data.contour.http.expect

Response := client.Get({
"url": url.http("/"),
"headers": {
"Host": "routeratelimit.projectcontour.io",
"User-Agent": client.ua("global-rate-limit"),
},
})

check_for_status_code [msg] {
msg := expect.response_status_is(Response, 200)
}

---

# Make another request against the proxy, confirm a 429
# response is now gotten since we've exceeded the rate
# limit.

import data.contour.http.client
import data.contour.http.client.url
import data.contour.http.expect

Response := client.Get({
"url": url.http("/"),
"headers": {
"Host": "routeratelimit.projectcontour.io",
"User-Agent": client.ua("global-rate-limit"),
},
})

check_for_status_code [msg] {
msg := expect.response_status_is(Response, 429)
}

---

# Make a request against the route that doesn't have
# rate limiting to confirm we still get a 200 for that
# route.

import data.contour.http.client
import data.contour.http.client.url
import data.contour.http.expect

Response := client.Get({
"url": url.http("/unlimited"),
"headers": {
"Host": "routeratelimit.projectcontour.io",
"User-Agent": client.ua("global-rate-limit"),
},
})

check_for_status_code [msg] {
msg := expect.response_status_is(Response, 200)
}
90 changes: 90 additions & 0 deletions _integration/testsuite/install-ratelimit-service.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
#! /usr/bin/env bash

# Copyright Project Contour Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.

# install-ratelimit-service.sh: Install a rate limit service and configuration
# for Contour.

set -o pipefail
set -o errexit
set -o nounset

readonly KIND=${KIND:-kind}
readonly KUBECTL=${KUBECTL:-kubectl}

readonly WAITTIME=${WAITTIME:-5m}

readonly HERE=$(cd $(dirname $0) && pwd)
readonly REPO=$(cd ${HERE}/../.. && pwd)

# Define some rate limiting policies to correspond to
# testsuite/httpproxy/020-global-rate-limiting.yaml.
${KUBECTL} apply -f - <<EOF
apiVersion: v1
kind: ConfigMap
metadata:
name: ratelimit-config
namespace: projectcontour
data:
ratelimit-config.yaml: |
domain: contour
descriptors:
- key: generic_key
value: vhostlimit
rate_limit:
unit: hour
requests_per_unit: 1
- key: generic_key
value: routelimit
rate_limit:
unit: hour
requests_per_unit: 1
EOF

# Create the ratelimit deployment, service and extension service.
${KUBECTL} apply -f ${REPO}/examples/ratelimit/02-ratelimit.yaml
${KUBECTL} apply -f ${REPO}/examples/ratelimit/03-ratelimit-extsvc.yaml

# Configure the ratelimit extension service with Contour.
${KUBECTL} apply -f - <<EOF
apiVersion: v1
kind: ConfigMap
metadata:
name: contour
namespace: projectcontour
data:
contour.yaml: |
rateLimitService:
extensionService: projectcontour/ratelimit
domain: contour
failOpen: false
# The options below are copied from
# install-fallback-certificate.sh.
tls:
fallback-certificate:
name: fallback-cert
namespace: projectcontour
accesslog-format: envoy
disablePermitInsecure: false
EOF

# Restart to pick up the new config map.
${KUBECTL} rollout restart deployment -n projectcontour contour

# Wait for contour to restart.
${KUBECTL} wait --timeout="${WAITTIME}" -n projectcontour -l app=contour deployments --for=condition=Available

# Wait for ratelimit service to be ready.
${KUBECTL} wait --timeout="${WAITTIME}" -n projectcontour -l app=ratelimit deployments --for=condition=Available
Loading

0 comments on commit e94b1f7

Please sign in to comment.