Skip to content

allow custom headers#129

Merged
ruslandoga merged 1 commit intomasterfrom
allow-custom-headers
Oct 29, 2023
Merged

allow custom headers#129
ruslandoga merged 1 commit intomasterfrom
allow-custom-headers

Conversation

@ruslandoga
Copy link
Collaborator

@ruslandoga ruslandoga commented Oct 25, 2023

Useful for fetching compressed responses, for example. https://clickhouse.com/docs/en/interfaces/http#compression

iex> {:ok, conn} = Ch.start_link()
iex> Ch.query(
  conn,
  "select * from system.numbers limit 100",
  _params = [],
  format: "CSVWithNames",
  headers: [{"accept-encoding", "gzip"}],
  settings: [enable_http_compression: 1]
)
{:ok,
 %Ch.Result{
   command: :select,
   num_rows: nil,
   rows: [
     <<31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 4, 193, 185, 77, 0, 64, 16, 4, 65, 191,
       195, 32, 130, 155, 157, 125, 211, 65, 194, 4, 3, 137, 252, 169, 250, 248,
       249, 251, 254, 252, 250, 253, 224, 33, ...>>
   ],
   headers: [
     {"date", "Wed, 25 Oct 2023 17:36:19 GMT"},
     {"connection", "Keep-Alive"},
     {"content-type", "text/csv; charset=UTF-8; header=present"},
     {"x-clickhouse-server-display-name", "c8f52f9cd86e"},
     {"transfer-encoding", "chunked"},
     {"x-clickhouse-query-id", "d341876d-edb4-4825-a965-008ab7b2f450"},
     {"x-clickhouse-format", "CSVWithNames"},
     {"x-clickhouse-timezone", "UTC"},
     {"keep-alive", "timeout=3"},
     {"content-encoding", "gzip"},
     {"x-clickhouse-summary",
      "{\"read_rows\":\"100\",\"read_bytes\":\"800\",\"written_rows\":\"0\",\"written_bytes\":\"0\",\"total_rows_to_read\":\"0\",\"result_rows\":\"0\",\"result_bytes\":\"0\"}"}
   ]
 }}

@ruslandoga
Copy link
Collaborator Author

ruslandoga commented Oct 25, 2023

Small compression ration / time test:

{:ok, conn} = Ch.start_link()
sql = "select * from system.numbers limit {$0:UInt64} format CSVWithNames"

for limit <- [
      10,
      100,
      1000,
      10000,
      100_000,
      1_000_000,
      10_000_000,
      100_000_000
    ],
    codec <- ["gzip", "zstd", "lz4", :default, nil] do
  opts =
    case codec do
      nil -> []
      :default -> [settings: [compress: 1]]
      _ -> [settings: [enable_http_compression: 1], headers: [{"accept-encoding", codec}]]
    end

  before = System.monotonic_time(:millisecond)
  resp = Ch.query!(conn, sql, [limit], opts)
  time = System.monotonic_time(:millisecond) - before
  size = IO.iodata_length(resp.rows)

  %{numbers: limit, compression: codec, byte_size: size, time_ms: time}
end
Result
result = [
  %{byte_size: 55, numbers: 10, compression: "gzip", time_ms: 25},
  %{byte_size: 48, numbers: 10, compression: "zstd", time_ms: 9},
  %{byte_size: 48, numbers: 10, compression: "lz4", time_ms: 5},
  %{byte_size: 82, numbers: 10, compression: :default, time_ms: 4},
  %{byte_size: 29, numbers: 10, compression: nil, time_ms: 3},
  %{byte_size: 175, numbers: 100, compression: "gzip", time_ms: 3},
  %{byte_size: 165, numbers: 100, compression: "zstd", time_ms: 4},
  %{byte_size: 318, numbers: 100, compression: "lz4", time_ms: 3},
  %{byte_size: 353, numbers: 100, compression: :default, time_ms: 2},
  %{byte_size: 299, numbers: 100, compression: nil, time_ms: 2},
  %{byte_size: 1782, numbers: 1000, compression: "gzip", time_ms: 2},
  %{byte_size: 1671, numbers: 1000, compression: "zstd", time_ms: 8},
  %{byte_size: 3829, numbers: 1000, compression: "lz4", time_ms: 4},
  %{byte_size: 3929, numbers: 1000, compression: :default, time_ms: 2},
  %{byte_size: 3899, numbers: 1000, compression: nil, time_ms: 1},
  %{byte_size: 21572, numbers: 10000, compression: "gzip", time_ms: 3},
  %{byte_size: 21262, numbers: 10000, compression: "zstd", time_ms: 3},
  %{byte_size: 38848, numbers: 10000, compression: "lz4", time_ms: 3},
  %{byte_size: 39836, numbers: 10000, compression: :default, time_ms: 2},
  %{byte_size: 48899, numbers: 10000, compression: nil, time_ms: 1},
  %{byte_size: 213195, numbers: 100000, compression: "gzip", time_ms: 9},
  %{byte_size: 74083, numbers: 100000, compression: "zstd", time_ms: 5},
  %{byte_size: 397842, numbers: 100000, compression: "lz4", time_ms: 7},
  %{byte_size: 411608, numbers: 100000, compression: :default, time_ms: 4},
  %{byte_size: 588899, numbers: 100000, compression: nil, time_ms: 2},
  %{byte_size: 2144414, numbers: 1000000, compression: "gzip", time_ms: 45},
  %{byte_size: 293704, numbers: 1000000, compression: "zstd", time_ms: 24},
  %{byte_size: 3997929, numbers: 1000000, compression: "lz4", time_ms: 43},
  %{byte_size: 4153466, numbers: 1000000, compression: :default, time_ms: 20},
  %{byte_size: 6888899, numbers: 1000000, compression: nil, time_ms: 6},
  %{byte_size: 21788402, numbers: 10000000, compression: "gzip", time_ms: 387},
  %{byte_size: 2980459, numbers: 10000000, compression: "zstd", time_ms: 215},
  %{byte_size: 40014640, numbers: 10000000, compression: "lz4", time_ms: 398},
  %{byte_size: 41831120, numbers: 10000000, compression: :default, time_ms: 157},
  %{byte_size: 78888899, numbers: 10000000, compression: nil, time_ms: 48},
  %{byte_size: 229961299, numbers: 100000000, compression: "gzip", time_ms: 3789},
  %{byte_size: 30967185, numbers: 100000000, compression: "zstd", time_ms: 2371},
  %{byte_size: 400253481, numbers: 100000000, compression: "lz4", time_ms: 4112},
  %{byte_size: 419014292, numbers: 100000000, compression: :default, time_ms: 1584},
  %{byte_size: 888888899, numbers: 100000000, compression: nil, time_ms: 643}
]
Fastests
Enum.group_by(results, & &1.numbers) |> Enum.map(fn {n, r} -> {n, r |> Enum.map(&Map.take(&1, [:compression, :time_ms])) |> Enum.sort_by(& &1.time_ms, :asc)} end)

[
  {10,
   [
     %{compression: nil, time_ms: 3},
     %{compression: :default, time_ms: 4},
     %{compression: "lz4", time_ms: 5},
     %{compression: "zstd", time_ms: 9},
     %{compression: "gzip", time_ms: 25}
   ]},
  {100,
   [
     %{compression: :default, time_ms: 2},
     %{compression: nil, time_ms: 2},
     %{compression: "gzip", time_ms: 3},
     %{compression: "lz4", time_ms: 3},
     %{compression: "zstd", time_ms: 4}
   ]},
  {1000,
   [
     %{compression: nil, time_ms: 1},
     %{compression: "gzip", time_ms: 2},
     %{compression: :default, time_ms: 2},
     %{compression: "lz4", time_ms: 4},
     %{compression: "zstd", time_ms: 8}
   ]},
  {10000,
   [
     %{compression: nil, time_ms: 1},
     %{compression: :default, time_ms: 2},
     %{compression: "gzip", time_ms: 3},
     %{compression: "zstd", time_ms: 3},
     %{compression: "lz4", time_ms: 3}
   ]},
  {100000,
   [
     %{compression: nil, time_ms: 2},
     %{compression: :default, time_ms: 4},
     %{compression: "zstd", time_ms: 5},
     %{compression: "lz4", time_ms: 7},
     %{compression: "gzip", time_ms: 9}
   ]},
  {1000000,
   [
     %{compression: nil, time_ms: 6},
     %{compression: :default, time_ms: 20},
     %{compression: "zstd", time_ms: 24},
     %{compression: "lz4", time_ms: 43},
     %{compression: "gzip", time_ms: 45}
   ]},
  {10000000,
   [
     %{compression: nil, time_ms: 48},
     %{compression: :default, time_ms: 157},
     %{compression: "zstd", time_ms: 215},
     %{compression: "gzip", time_ms: 387},
     %{compression: "lz4", time_ms: 398}
   ]},
  {100000000,
   [
     %{compression: nil, time_ms: 643},
     %{compression: :default, time_ms: 1584},
     %{compression: "zstd", time_ms: 2371},
     %{compression: "gzip", time_ms: 3789},
     %{compression: "lz4", time_ms: 4112}
   ]}
]
Smallest
Enum.group_by(results, & &1.numbers) |> Enum.map(fn {n, r} -> {n, r |> Enum.map(&Map.take(&1, [:compression, :byte_size])) |> Enum.sort_by(& &1.byte_size, :asc)} end)

[
  {10,
   [
     %{byte_size: 29, compression: nil},
     %{byte_size: 48, compression: "zstd"},
     %{byte_size: 48, compression: "lz4"},
     %{byte_size: 55, compression: "gzip"},
     %{byte_size: 82, compression: :default}
   ]},
  {100,
   [
     %{byte_size: 165, compression: "zstd"},
     %{byte_size: 175, compression: "gzip"},
     %{byte_size: 299, compression: nil},
     %{byte_size: 318, compression: "lz4"},
     %{byte_size: 353, compression: :default}
   ]},
  {1000,
   [
     %{byte_size: 1671, compression: "zstd"},
     %{byte_size: 1782, compression: "gzip"},
     %{byte_size: 3829, compression: "lz4"},
     %{byte_size: 3899, compression: nil},
     %{byte_size: 3929, compression: :default}
   ]},
  {10000,
   [
     %{byte_size: 21262, compression: "zstd"},
     %{byte_size: 21572, compression: "gzip"},
     %{byte_size: 38848, compression: "lz4"},
     %{byte_size: 39836, compression: :default},
     %{byte_size: 48899, compression: nil}
   ]},
  {100000,
   [
     %{byte_size: 74083, compression: "zstd"},
     %{byte_size: 213195, compression: "gzip"},
     %{byte_size: 397842, compression: "lz4"},
     %{byte_size: 411608, compression: :default},
     %{byte_size: 588899, compression: nil}
   ]},
  {1000000,
   [
     %{byte_size: 293704, compression: "zstd"},
     %{byte_size: 2144414, compression: "gzip"},
     %{byte_size: 3997929, compression: "lz4"},
     %{byte_size: 4153466, compression: :default},
     %{byte_size: 6888899, compression: nil}
   ]},
  {10000000,
   [
     %{byte_size: 2980459, compression: "zstd"},
     %{byte_size: 21788402, compression: "gzip"},
     %{byte_size: 40014640, compression: "lz4"},
     %{byte_size: 41831120, compression: :default},
     %{byte_size: 78888899, compression: nil}
   ]},
  {100000000,
   [
     %{byte_size: 30967185, compression: "zstd"},
     %{byte_size: 229961299, compression: "gzip"},
     %{byte_size: 400253481, compression: "lz4"},
     %{byte_size: 419014292, compression: :default},
     %{byte_size: 888888899, compression: nil}
   ]}
]

@ruslandoga ruslandoga force-pushed the allow-custom-headers branch from b32b2bc to 42cc0a8 Compare October 29, 2023 09:05
@ruslandoga ruslandoga merged commit 9b08f52 into master Oct 29, 2023
@ruslandoga ruslandoga deleted the allow-custom-headers branch October 29, 2023 09:07
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Labels

None yet

Projects

None yet

Development

Successfully merging this pull request may close these issues.

1 participant