Skip to content
3 changes: 2 additions & 1 deletion config/runtime.exs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,8 @@ config :logflare,
encryption_key_retired: System.get_env("LOGFLARE_DB_ENCRYPTION_KEY_RETIRED"),
metadata: logflare_metadata,
health: logflare_health,
http_connection_pools: http_connection_pools
http_connection_pools: http_connection_pools,
bq_write_api_pool_size: System.get_env("LOGFLARE_BQ_WRITE_API_POOL_SIZE")
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
bq_write_api_pool_size: System.get_env("LOGFLARE_BQ_WRITE_API_POOL_SIZE")
bq_write_api_pool_size: System.get_env("LOGFLARE_BIGQUERY_WRITE_API_POOL_SIZE")

In my opinion we should automatically manage connection pool. But we can give this a try first. Needs a sensible default too.

]
|> filter_nil_kv_pairs.()

Expand Down
5 changes: 4 additions & 1 deletion lib/logflare/application.ex
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,10 @@ defmodule Logflare.Application do
LogflareWeb.Endpoint,
# If we get a log event and the Source.Supervisor is not up it will 500
{GRPC.Server.Supervisor,
endpoint: LogflareGrpc.Endpoint, port: grpc_port, cred: grpc_creds, start_server: true},
endpoint: LogflareGrpc.Endpoint,
port: grpc_port,
start_server: true,
adapter_opts: [cred: grpc_creds]},
# Monitor system level metrics
SystemMetricsSup,
Logflare.Telemetry,
Expand Down
7 changes: 2 additions & 5 deletions lib/logflare/backends/adaptor/bigquery_adaptor.ex
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,6 @@ defmodule Logflare.Backends.Adaptor.BigQueryAdaptor do
end

def insert_log_events_via_storage_write_api(log_events, opts) do
# convert log events to table rows
context =
Keyword.validate!(opts, [
:project_id,
Expand All @@ -115,7 +114,6 @@ defmodule Logflare.Backends.Adaptor.BigQueryAdaptor do
:backend_id
])

# get table id
table_id = format_table_name(opts[:source_token])

arrow_data =
Expand Down Expand Up @@ -143,14 +141,13 @@ defmodule Logflare.Backends.Adaptor.BigQueryAdaptor do
OpenTelemetry.Tracer.with_span "ingest.bq_api_call", %{
attributes: %{insert_method: :bq_storage_write}
} do
GoogleApiClient.append_rows({:arrow, arrow_data}, context, table_id)
|> tap(fn
case GoogleApiClient.append_rows({:arrow, arrow_data}, context, table_id) do
{:error, reason} ->
OpenTelemetry.Tracer.set_status(:error, inspect(reason))

_ ->
:ok
end)
end
end
end

Expand Down

This file was deleted.

This file was deleted.

Loading
Loading