@@ -218,8 +210,6 @@ export default function Page() {
return (
@@ -235,15 +225,11 @@ export default function Page() {
@@ -258,14 +244,10 @@ export default function Page() {
function FiltersBar({
list,
- isAdmin,
- showDebug,
defaultPeriod,
retentionLimitDays,
}: {
list?: Exclude
["data"]>, { error: string }>;
- isAdmin: boolean;
- showDebug: boolean;
defaultPeriod?: string;
retentionLimitDays: number;
}) {
@@ -280,16 +262,6 @@ function FiltersBar({
searchParams.has("from") ||
searchParams.has("to");
- const handleDebugToggle = useCallback((checked: boolean) => {
- const url = new URL(window.location.href);
- if (checked) {
- url.searchParams.set("showDebug", "true");
- } else {
- url.searchParams.delete("showDebug");
- }
- window.location.href = url.toString();
- }, []);
-
return (
@@ -329,16 +301,6 @@ function FiltersBar({
>
)}
-
- {isAdmin && (
-
- )}
-
);
}
@@ -347,8 +309,6 @@ function LogsList({
list,
}: {
list: Exclude["data"]>, { error: string }>; //exclude error, it is handled
- isAdmin: boolean;
- showDebug: boolean;
defaultPeriod?: string;
}) {
const navigation = useNavigation();
diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx
deleted file mode 100644
index 53bc655a02..0000000000
--- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx
+++ /dev/null
@@ -1,100 +0,0 @@
-import { type LoaderFunctionArgs } from "@remix-run/server-runtime";
-import { json } from "@remix-run/node";
-import { requireUserId } from "~/services/session.server";
-import { EnvironmentParamSchema } from "~/utils/pathBuilder";
-import { findProjectBySlug } from "~/models/project.server";
-import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server";
-import { clickhouseClient } from "~/services/clickhouseInstance.server";
-
-// Convert ClickHouse kind to display level
-function kindToLevel(
- kind: string
-): "TRACE" | "DEBUG" | "INFO" | "WARN" | "ERROR" | "LOG" {
- switch (kind) {
- case "DEBUG_EVENT":
- case "LOG_DEBUG":
- return "DEBUG";
- case "LOG_INFO":
- return "INFO";
- case "LOG_WARN":
- return "WARN";
- case "LOG_ERROR":
- return "ERROR";
- case "LOG_LOG":
- return "LOG";
- case "SPAN":
- case "ANCESTOR_OVERRIDE":
- case "SPAN_EVENT":
- default:
- return "TRACE";
- }
-}
-
-// Fetch related spans for a log entry from the same trace
-export const loader = async ({ request, params }: LoaderFunctionArgs) => {
- const userId = await requireUserId(request);
- const { projectParam, organizationSlug, envParam, logId } = {
- ...EnvironmentParamSchema.parse(params),
- logId: params.logId,
- };
-
- if (!logId) {
- throw new Response("Log ID is required", { status: 400 });
- }
-
- const project = await findProjectBySlug(organizationSlug, projectParam, userId);
- if (!project) {
- throw new Response("Project not found", { status: 404 });
- }
-
- const environment = await findEnvironmentBySlug(project.id, envParam, userId);
- if (!environment) {
- throw new Response("Environment not found", { status: 404 });
- }
-
- // Get trace ID and run ID from query params
- const url = new URL(request.url);
- const traceId = url.searchParams.get("traceId");
- const runId = url.searchParams.get("runId");
- const currentSpanId = url.searchParams.get("spanId");
-
- if (!traceId || !runId) {
- throw new Response("Trace ID and Run ID are required", { status: 400 });
- }
-
- // Query ClickHouse for related spans in the same trace
- const queryBuilder = clickhouseClient.taskEventsV2.logsListQueryBuilder();
-
- queryBuilder.where("environment_id = {environmentId: String}", {
- environmentId: environment.id,
- });
- queryBuilder.where("trace_id = {traceId: String}", { traceId });
- queryBuilder.where("run_id = {runId: String}", { runId });
-
- // Order by start time to show spans in chronological order
- queryBuilder.orderBy("start_time ASC");
- queryBuilder.limit(50);
-
- const [queryError, records] = await queryBuilder.execute();
-
- if (queryError) {
- throw queryError;
- }
-
- const results = records || [];
-
- const spans = results.map((row) => ({
- id: `${row.trace_id}::${row.span_id}::${row.run_id}::${row.start_time}`,
- spanId: row.span_id,
- parentSpanId: row.parent_span_id || null,
- message: row.message.substring(0, 200), // Truncate for list view
- kind: row.kind,
- level: kindToLevel(row.kind),
- status: row.status,
- startTime: new Date(Number(row.start_time) / 1_000_000).toISOString(),
- duration: Number(row.duration),
- isCurrent: row.span_id === currentSpanId,
- }));
-
- return json({ spans });
-};
diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts
index 656e20472e..cac4c0f702 100644
--- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts
+++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts
@@ -21,7 +21,6 @@ function parseLevelsFromUrl(url: URL): LogLevel[] | undefined {
export const loader = async ({ request, params }: LoaderFunctionArgs) => {
const user = await requireUser(request);
const userId = user.id;
- const isAdmin = user?.admin || user?.isImpersonating;
const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params);
@@ -46,7 +45,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
const search = url.searchParams.get("search") ?? undefined;
const cursor = url.searchParams.get("cursor") ?? undefined;
const levels = parseLevelsFromUrl(url);
- const showDebug = url.searchParams.get("showDebug") === "true";
const period = url.searchParams.get("period") ?? undefined;
const fromStr = url.searchParams.get("from");
const toStr = url.searchParams.get("to");
@@ -67,7 +65,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
from,
to,
levels,
- includeDebugLogs: isAdmin && showDebug,
defaultPeriod: "1h",
retentionLimitDays,
}) as any; // Validated by LogsListOptionsSchema at runtime
diff --git a/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql b/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql
new file mode 100644
index 0000000000..60ed1eb1fc
--- /dev/null
+++ b/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql
@@ -0,0 +1,63 @@
+-- +goose Up
+CREATE TABLE IF NOT EXISTS trigger_dev.task_events_search_v1
+(
+ environment_id String,
+ organization_id String,
+ project_id String,
+ triggered_timestamp DateTime64(9) CODEC(Delta(8), ZSTD(1)),
+ trace_id String CODEC(ZSTD(1)),
+ span_id String CODEC(ZSTD(1)),
+ run_id String CODEC(ZSTD(1)),
+ task_identifier String CODEC(ZSTD(1)),
+ start_time DateTime64(9) CODEC(Delta(8), ZSTD(1)),
+ inserted_at DateTime64(3),
+ message String CODEC(ZSTD(1)),
+ kind LowCardinality(String) CODEC(ZSTD(1)),
+ status LowCardinality(String) CODEC(ZSTD(1)),
+ duration UInt64 CODEC(ZSTD(1)),
+ parent_span_id String CODEC(ZSTD(1)),
+ attributes_text String CODEC(ZSTD(1)),
+
+ INDEX idx_run_id run_id TYPE bloom_filter(0.001) GRANULARITY 1,
+ INDEX idx_message_text_search lower(message) TYPE ngrambf_v1(3, 32768, 2, 0) GRANULARITY 1,
+ INDEX idx_attributes_text_search lower(attributes_text) TYPE ngrambf_v1(3, 32768, 2, 0) GRANULARITY 1
+)
+ENGINE = MergeTree
+PARTITION BY toDate(triggered_timestamp)
+ORDER BY (organization_id, environment_id, triggered_timestamp, trace_id)
+--Right now we have maximum retention of up to 30 days based on plan.
+--We put a logical limit for now, the 90 DAY TTL is just a backup
+--This might need to be updated for longer retention periods
+TTL toDateTime(triggered_timestamp) + INTERVAL 90 DAY
+SETTINGS ttl_only_drop_parts = 1;
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS trigger_dev.task_events_search_mv_v1
+TO trigger_dev.task_events_search_v1 AS
+SELECT
+ environment_id,
+ organization_id,
+ project_id,
+ trace_id,
+ span_id,
+ run_id,
+ task_identifier,
+ start_time,
+ inserted_at,
+ message,
+ kind,
+ status,
+ duration,
+ parent_span_id,
+ attributes_text,
+ fromUnixTimestamp64Nano(toUnixTimestamp64Nano(start_time) + toInt64(duration)) AS triggered_timestamp
+FROM trigger_dev.task_events_v2
+WHERE
+ kind != 'DEBUG_EVENT'
+ AND status != 'PARTIAL'
+ AND NOT (kind = 'SPAN_EVENT' AND attributes_text = '{}')
+ AND kind != 'ANCESTOR_OVERRIDE'
+ AND message != 'trigger.dev/start';
+
+-- +goose Down
+DROP VIEW IF EXISTS trigger_dev.task_events_search_mv_v1;
+DROP TABLE IF EXISTS trigger_dev.task_events_search_v1;
diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts
index 50b39d35a7..47c2f34f2f 100644
--- a/internal-packages/clickhouse/src/index.ts
+++ b/internal-packages/clickhouse/src/index.ts
@@ -23,8 +23,8 @@ import {
getTraceSummaryQueryBuilderV2,
insertTaskEvents,
insertTaskEventsV2,
- getLogsListQueryBuilderV2,
getLogDetailQueryBuilderV2,
+ getLogsSearchListQueryBuilder,
} from "./taskEvents.js";
import { Logger, type LogLevel } from "@trigger.dev/core/logger";
import type { Agent as HttpAgent } from "http";
@@ -220,8 +220,13 @@ export class ClickHouse {
traceSummaryQueryBuilder: getTraceSummaryQueryBuilderV2(this.reader),
traceDetailedSummaryQueryBuilder: getTraceDetailedSummaryQueryBuilderV2(this.reader),
spanDetailsQueryBuilder: getSpanDetailsQueryBuilderV2(this.reader),
- logsListQueryBuilder: getLogsListQueryBuilderV2(this.reader, this.logsQuerySettings?.list),
logDetailQueryBuilder: getLogDetailQueryBuilderV2(this.reader, this.logsQuerySettings?.detail),
};
}
+
+ get taskEventsSearch() {
+ return {
+ logsListQueryBuilder: getLogsSearchListQueryBuilder(this.reader, this.logsQuerySettings?.list),
+ };
+ }
}
diff --git a/internal-packages/clickhouse/src/taskEvents.ts b/internal-packages/clickhouse/src/taskEvents.ts
index 890eab9cc7..73e2d8344e 100644
--- a/internal-packages/clickhouse/src/taskEvents.ts
+++ b/internal-packages/clickhouse/src/taskEvents.ts
@@ -231,11 +231,12 @@ export function getSpanDetailsQueryBuilderV2(
});
}
+
// ============================================================================
-// Logs List Query Builders (for aggregated logs page)
+// Search Table Query Builders (for logs page, using task_events_search_v1)
// ============================================================================
-export const LogsListResult = z.object({
+export const LogsSearchListResult = z.object({
environment_id: z.string(),
organization_id: z.string(),
project_id: z.string(),
@@ -250,14 +251,18 @@ export const LogsListResult = z.object({
status: z.string(),
duration: z.number().or(z.string()),
attributes_text: z.string(),
+ triggered_timestamp: z.string(),
});
-export type LogsListResult = z.output;
+export type LogsSearchListResult = z.output;
-export function getLogsListQueryBuilderV2(ch: ClickhouseReader, settings?: ClickHouseSettings) {
- return ch.queryBuilderFast({
- name: "getLogsList",
- table: "trigger_dev.task_events_v2",
+export function getLogsSearchListQueryBuilder(
+ ch: ClickhouseReader,
+ settings?: ClickHouseSettings
+) {
+ return ch.queryBuilderFast({
+ name: "getLogsSearchList",
+ table: "trigger_dev.task_events_search_v1",
columns: [
"environment_id",
"organization_id",
@@ -272,7 +277,8 @@ export function getLogsListQueryBuilderV2(ch: ClickhouseReader, settings?: Click
"kind",
"status",
"duration",
- "attributes_text"
+ "attributes_text",
+ "triggered_timestamp",
],
settings,
});
diff --git a/references/seed/src/trigger/spanSpammer.ts b/references/seed/src/trigger/spanSpammer.ts
index b16f00c4c2..dc79ce93bf 100644
--- a/references/seed/src/trigger/spanSpammer.ts
+++ b/references/seed/src/trigger/spanSpammer.ts
@@ -1,4 +1,4 @@
-import { logger, task, wait } from "@trigger.dev/sdk/v3";
+import { logger, task, wait, metadata } from "@trigger.dev/sdk/v3";
const CONFIG = {
delayBetweenBatchesSeconds: 0.2,
@@ -14,6 +14,22 @@ export const SpanSpammerTask = task({
const context = { payload, ctx };
let logCount = 0;
+ // 30s trace with events every 5s
+ await logger.trace("10s-span", async () => {
+ const totalSeconds = 10;
+ const intervalSeconds = 2;
+ const totalEvents = totalSeconds / intervalSeconds;
+
+ logger.info("Starting 30s span", context);
+
+ for (let i = 1; i <= totalEvents; i++) {
+ await wait.for({ seconds: intervalSeconds });
+ logger.info(`Inner event ${i}/${totalEvents} at ${i * intervalSeconds}s`, context);
+ }
+
+ logger.info("Completed 30s span", context);
+ });
+
logger.info("Starting span spammer task", context);
logger.warn("This will generate a lot of logs", context);
@@ -36,6 +52,14 @@ export const SpanSpammerTask = task({
emitBatch("This is a test log!!! Log number: ");
}
+ metadata.parent.set("childStatus", "running");
+ metadata.parent.increment("completedChildren", 1);
+
+ // Update the root run's metadata (top-level run in the chain)
+ metadata.root.set("deepChildStatus", "done");
+ metadata.root.append("completedTasks", "child-task");
+
+
logger.info("Completed span spammer task", context);
return { message: `Created ${logCount} logs` };
},