Skip to content

On send_http_response proxy died #44

Closed
@eloycoto

Description

@eloycoto

Hi,

Using envoy-latest with proxy-wasm, if the filter uses a send_http_response helper, the proxy dies with the following output.

proxy_1  | [2020-10-30 21:23:48.275][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:104] Caught Segmentation fault, suspect faulting address 0x0
proxy_1  | [2020-10-30 21:23:48.275][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:91] Backtrace (use tools/stack_decode.py to get line numbers):
proxy_1  | [2020-10-30 21:23:48.275][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:92] Envoy version: 3a32d23c7c361b6ffd5860a707af8957326b2b17/1.17.0-dev/Clean/RELEASE/BoringSSL
proxy_1  | [2020-10-30 21:23:48.276][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #0: __restore_rt [0x7f7a678188a0]->[0x29a36a9a88a0] ??:0
proxy_1  | [2020-10-30 21:23:48.314][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #1: Envoy::ConnectionPool::ConnPoolImplBase::attachStreamToClient() [0x55d6fe79cd29]->[0x192cd29] bazel-out/k8-opt/bin/source/extensions/filters/network/kafka/_virtual_includes/serialization_lib/extensions/filters/network/kafka/serialization.h:455
proxy_1  | [2020-10-30 21:23:48.329][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #2: Envoy::ConnectionPool::ConnPoolImplBase::onUpstreamReady() [0x55d6fe79d669]->[0x192d669] /opt/llvm/bin/../include/c++/v1/memory:3483
proxy_1  | [2020-10-30 21:23:48.340][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #3: Envoy::ConnectionPool::ConnPoolImplBase::onConnectionEvent() [0x55d6fe79e4e0]->[0x192e4e0] /opt/llvm/bin/../include/c++/v1/vector:1635
proxy_1  | [2020-10-30 21:23:48.351][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #4: Envoy::Network::ConnectionImplBase::raiseConnectionEvent() [0x55d6fe630e3b]->[0x17c0e3b] /opt/llvm/bin/../include/c++/v1/memory:1876
proxy_1  | [2020-10-30 21:23:48.362][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #5: Envoy::Network::ConnectionImpl::raiseEvent() [0x55d6fe6299f9]->[0x17b99f9] /opt/llvm/bin/../include/c++/v1/vector:1540
proxy_1  | [2020-10-30 21:23:48.372][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #6: Envoy::Network::ConnectionImpl::onWriteReady() [0x55d6fe62bc6c]->[0x17bbc6c] bazel-out/k8-opt/bin/source/extensions/filters/network/kafka/_virtual_includes/tagged_fields_lib/extensions/filters/network/kafka/tagged_fields.h:101
proxy_1  | [2020-10-30 21:23:48.382][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #7: Envoy::Network::ConnectionImpl::onFileEvent() [0x55d6fe62ad09]->[0x17bad09] external/com_google_absl/absl/strings/str_cat.h:217
proxy_1  | [2020-10-30 21:23:48.392][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #8: Envoy::Event::FileEventImpl::assignEvents()::$_1::__invoke() [0x55d6fe620f66]->[0x17b0f66] bazel-out/k8-opt/bin/source/extensions/filters/network/kafka/_virtual_includes/kafka_request_parser_lib/extensions/filters/network/kafka/kafka_request_parser.h:188
proxy_1  | [2020-10-30 21:23:48.402][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #9: event_process_active_single_queue [0x55d6fea57128]->[0x1be7128] /opt/llvm/bin/../include/c++/v1/memory:3028
proxy_1  | [2020-10-30 21:23:48.412][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #10: event_base_loop [0x55d6fea55afe]->[0x1be5afe] /opt/llvm/bin/../include/c++/v1/memory:4056
proxy_1  | [2020-10-30 21:23:48.423][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #11: Envoy::Server::WorkerImpl::threadRoutine() [0x55d6fe612068]->[0x17a2068] /usr/include/x86_64-linux-gnu/bits/string_fortified.h:34
proxy_1  | [2020-10-30 21:23:48.432][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #12: Envoy::Thread::ThreadImplPosix::ThreadImplPosix()::{lambda()#1}::__invoke() [0x55d6fec06d13]->[0x1d96d13] /opt/llvm/bin/../include/c++/v1/__tree:1834
proxy_1  | [2020-10-30 21:23:48.432][32][critical][backtrace] [bazel-out/k8-opt/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:96] #13: start_thread [0x7f7a6780d6db]->[0x29a36a99d6db] ??:0

The current config:

Envoy container: envoyproxy/envoy-debug-dev:53c28f89f60c
Config:

static_resources:
  listeners:
  - name: main
    address:
      socket_address:
        address: 0.0.0.0
        port_value: 80
    filter_chains:
    - filters:
      - name: envoy.http_connection_manager
        config:
          stat_prefix: ingress_http
          codec_type: auto
          route_config:
            name: local_route
            virtual_hosts:
            - name: local_service
              domains:
              - "*"
              routes:
              - match:
                  prefix: "/"
                route:
                  cluster: httpbin
          http_filters:
          - name: envoy.filters.http.wasm
            config:
              config:
                name: "my_plugin"
                root_id: "add_header"
                vm_config:
                  vm_id: "my_vm_id"
                  runtime: "envoy.wasm.runtime.v8"
                  code:
                    local:
                      filename: "/opt/filter.wasm"
                  allow_precompiled: true
          - name: envoy.router
            config: {}
  clusters:
  - name: httpbin
    connect_timeout: 1s
    type: logical_dns
    lb_policy: round_robin
    hosts:
    - socket_address:
        address: httpbin.org
        port_value: 80
admin:
  access_log_path: "/dev/null"
  address:
    socket_address:
      address: 0.0.0.0
      port_value: 8001

Filter:

use chrono::{DateTime, Utc};
use log::{info, trace};
use proxy_wasm::traits::*;
use proxy_wasm::types::*;
use std::time::Duration;

#[no_mangle]
pub fn _start() {
    // Enable log level
    proxy_wasm::set_log_level(LogLevel::Trace);

    // RootContext this context is the main one for Envoy config
    proxy_wasm::set_root_context(|_| -> Box<dyn RootContext> { Box::new(RootConfig) });

    // This is the context call for request
    proxy_wasm::set_http_context(|context_id, _| -> Box<dyn HttpContext> {
        Box::new(HttpHeaders { context_id })
    });
}

struct RootConfig;

impl Context for RootConfig {}

impl RootContext for RootConfig {
    fn on_vm_start(&mut self, _: usize) -> bool {
        info!("Config:: VM Started correctly!");
        self.set_tick_period(Duration::from_secs(30));
        true
    }

    fn on_tick(&mut self) {
        let datetime: DateTime<Utc> = self.get_current_time().into();
        info!("Config:: New Tick at '{}'", datetime);
    }
}

struct HttpHeaders {
    context_id: u32,
}

impl Context for HttpHeaders {}

impl HttpContext for HttpHeaders {
    fn on_http_request_headers(&mut self, _: usize) -> Action {
        info!("Request headers phase here!");
        for (name, value) in &self.get_http_request_headers() {
            info!(
                "#Request HEADERS::{} -> {}: {}",
                self.context_id, name, value
            );
        }

        info!("ON HTTP REQUEST_HEADERS",);
        // self.set_http_request_header("TEST", Some("foobar"));
        self.send_http_response(403, vec![], Some(b"Access forbidden.\n"));
        Action::Continue
    }

    // fn on_log(&mut self) {
    //     info!("Request #{} completed.", self.context_id);
    // }
}

The best way to replicate the issue:

git clone https://github.com/eloycoto/envoy_playground.git
cd envoy_dump
docker-compose up
curl http://127.0.0.1:18000

Regards

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions