diff --git a/apps/webapp/app/components/logs/LogsTable.tsx b/apps/webapp/app/components/logs/LogsTable.tsx index 169e65515b..c9e442a824 100644 --- a/apps/webapp/app/components/logs/LogsTable.tsx +++ b/apps/webapp/app/components/logs/LogsTable.tsx @@ -27,7 +27,6 @@ import { PopoverMenuItem } from "~/components/primitives/Popover"; type LogsTableProps = { logs: LogEntry[]; - hasFilters: boolean; searchTerm?: string; isLoading?: boolean; isLoadingMore?: boolean; @@ -59,7 +58,6 @@ function getLevelBorderColor(level: LogEntry["level"]): string { export function LogsTable({ logs, - hasFilters, searchTerm, isLoading = false, isLoadingMore = false, @@ -126,11 +124,7 @@ export function LogsTable({ - {logs.length === 0 && !hasFilters ? ( - - {!isLoading && } - - ) : logs.length === 0 ? ( + {logs.length === 0 ? ( window.location.reload()} /> ) : ( logs.map((log) => { @@ -214,14 +208,6 @@ export function LogsTable({ ); } -function NoLogs({ title }: { title: string }) { - return ( -
- {title} -
- ); -} - function BlankState({ isLoading, onRefresh }: { isLoading?: boolean; onRefresh?: () => void }) { if (isLoading) return ; diff --git a/apps/webapp/app/components/navigation/SideMenu.tsx b/apps/webapp/app/components/navigation/SideMenu.tsx index 86678bc1ca..76c974d596 100644 --- a/apps/webapp/app/components/navigation/SideMenu.tsx +++ b/apps/webapp/app/components/navigation/SideMenu.tsx @@ -269,7 +269,7 @@ export function SideMenu({ to={v3DeploymentsPath(organization, project, environment)} data-action="deployments" /> - {(isAdmin || user.isImpersonating) && ( + {(user.admin || user.isImpersonating || featureFlags.hasLogsPageAccess) && ( = {}; + let parsedAttributes: Record = {}; let rawAttributesString = ""; - try { - if (log.metadata) { - parsedMetadata = JSON.parse(log.metadata) as Record; - } - } catch { - // Ignore parse errors - } try { - // Handle attributes which could be a JSON object or string - if (log.attributes) { - if (typeof log.attributes === "string") { - parsedAttributes = JSON.parse(log.attributes) as Record; - rawAttributesString = log.attributes; - } else if (typeof log.attributes === "object") { - parsedAttributes = log.attributes as Record; - rawAttributesString = JSON.stringify(log.attributes); - } + // Handle attributes_text which is a string + if (log.attributes_text) { + parsedAttributes = JSON.parse(log.attributes_text) as Record; + rawAttributesString = log.attributes_text; } } catch { // Ignore parse errors @@ -97,10 +98,8 @@ export class LogDetailPresenter { status: log.status, duration: typeof log.duration === "number" ? log.duration : Number(log.duration), level: kindToLevel(log.kind, log.status), - metadata: parsedMetadata, attributes: parsedAttributes, // Raw strings for display - rawMetadata: log.metadata, rawAttributes: rawAttributesString, }; } diff --git a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts index 0b5f2c175a..35ba43117f 100644 --- a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts @@ -2,12 +2,11 @@ import { z } from "zod"; import { type ClickHouse, type LogsListResult } from "@internal/clickhouse"; import { MachinePresetName } from "@trigger.dev/core/v3"; import { - type PrismaClient, type PrismaClientOrTransaction, type TaskRunStatus, TaskRunStatus as TaskRunStatusEnum, - TaskTriggerSource, } from "@trigger.dev/database"; +import { getConfiguredEventRepository } from "~/v3/eventRepository/index.server"; // Create a schema that validates TaskRunStatus enum values const TaskRunStatusSchema = z.array(z.nativeEnum(TaskRunStatusEnum)); @@ -18,11 +17,12 @@ import { findDisplayableEnvironment } from "~/models/runtimeEnvironment.server"; import { getAllTaskIdentifiers } from "~/models/task.server"; import { RunsRepository } from "~/services/runsRepository/runsRepository.server"; import { ServiceValidationError } from "~/v3/services/baseService.server"; +import { kindToLevel, type LogLevel, LogLevelSchema } from "~/utils/logUtils"; +import { BasePresenter } from "~/presenters/v3/basePresenter.server"; import { convertDateToClickhouseDateTime, convertClickhouseDateTime64ToJsDate, } from "~/v3/eventRepository/clickhouseEventRepository.server"; -import { kindToLevel, type LogLevel, LogLevelSchema } from "~/utils/logUtils"; export type { LogLevel }; @@ -131,9 +131,7 @@ function decodeCursor(cursor: string): LogCursor | null { } // Convert display level to ClickHouse kinds and statuses -function levelToKindsAndStatuses( - level: LogLevel -): { kinds?: string[]; statuses?: string[] } { +function levelToKindsAndStatuses(level: LogLevel): { kinds?: string[]; statuses?: string[] } { switch (level) { case "DEBUG": return { kinds: ["DEBUG_EVENT", "LOG_DEBUG"] }; @@ -150,7 +148,6 @@ function levelToKindsAndStatuses( } } - function convertDateToNanoseconds(date: Date): bigint { return BigInt(date.getTime()) * 1_000_000n; } @@ -168,11 +165,13 @@ function formatNanosecondsForClickhouse(ns: bigint): string { return padded.slice(0, 10) + "." + padded.slice(10); } -export class LogsListPresenter { +export class LogsListPresenter extends BasePresenter { constructor( private readonly replica: PrismaClientOrTransaction, private readonly clickhouse: ClickHouse - ) {} + ) { + super(undefined, replica); + } public async call( organizationId: string, @@ -242,10 +241,7 @@ export class LogsListPresenter { (search !== undefined && search !== "") || !time.isDefault; - const possibleTasksAsync = getAllTaskIdentifiers( - this.replica, - environmentId - ); + const possibleTasksAsync = getAllTaskIdentifiers(this.replica, environmentId); const bulkActionsAsync = this.replica.bulkActionGroup.findMany({ select: { @@ -264,31 +260,26 @@ export class LogsListPresenter { take: 20, }); - const [possibleTasks, bulkActions, displayableEnvironment] = - await Promise.all([ - possibleTasksAsync, - bulkActionsAsync, - findDisplayableEnvironment(environmentId, userId), - ]); - - if ( - bulkId && - !bulkActions.some((bulkAction) => bulkAction.friendlyId === bulkId) - ) { - const selectedBulkAction = - await this.replica.bulkActionGroup.findFirst({ - select: { - friendlyId: true, - type: true, - createdAt: true, - name: true, - }, - where: { - friendlyId: bulkId, - projectId, - environmentId, - }, - }); + const [possibleTasks, bulkActions, displayableEnvironment] = await Promise.all([ + possibleTasksAsync, + bulkActionsAsync, + findDisplayableEnvironment(environmentId, userId), + ]); + + if (bulkId && !bulkActions.some((bulkAction) => bulkAction.friendlyId === bulkId)) { + const selectedBulkAction = await this.replica.bulkActionGroup.findFirst({ + select: { + friendlyId: true, + type: true, + createdAt: true, + name: true, + }, + where: { + friendlyId: bulkId, + projectId, + environmentId, + }, + }); if (selectedBulkAction) { bulkActions.push(selectedBulkAction); @@ -371,7 +362,22 @@ export class LogsListPresenter { } } - const queryBuilder = this.clickhouse.taskEventsV2.logsListQueryBuilder(); + // Determine which store to use based on organization configuration + const { store } = await getConfiguredEventRepository(organizationId); + + // Throw error if postgres is detected + if (store === "postgres") { + throw new ServiceValidationError( + "Logs are not available for PostgreSQL event store. Please contact support." + ); + } + + // Get the appropriate query builder based on store type + const isClickhouseV2 = store === "clickhouse_v2"; + + const queryBuilder = isClickhouseV2 + ? this.clickhouse.taskEventsV2.logsListQueryBuilder() + : this.clickhouse.taskEvents.logsListQueryBuilder(); queryBuilder.prewhere("environment_id = {environmentId: String}", { environmentId, @@ -382,12 +388,17 @@ export class LogsListPresenter { }); queryBuilder.where("project_id = {projectId: String}", { projectId }); - // Time filters - inserted_at in PREWHERE for partition pruning, start_time in WHERE + // Time filters - inserted_at in PREWHERE only for v2, start_time in WHERE for both if (effectiveFrom) { const fromNs = convertDateToNanoseconds(effectiveFrom); - queryBuilder.prewhere("inserted_at >= {insertedAtStart: DateTime64(3)}", { - insertedAtStart: convertDateToClickhouseDateTime(effectiveFrom), - }); + + // Only use inserted_at for partition pruning if v2 + if (isClickhouseV2) { + queryBuilder.prewhere("inserted_at >= {insertedAtStart: DateTime64(3)}", { + insertedAtStart: convertDateToClickhouseDateTime(effectiveFrom), + }); + } + queryBuilder.where("start_time >= {fromTime: String}", { fromTime: formatNanosecondsForClickhouse(fromNs), }); @@ -396,9 +407,14 @@ export class LogsListPresenter { if (effectiveTo) { const clampedTo = effectiveTo > new Date() ? new Date() : effectiveTo; const toNs = convertDateToNanoseconds(clampedTo); - queryBuilder.prewhere("inserted_at <= {insertedAtEnd: DateTime64(3)}", { - insertedAtEnd: convertDateToClickhouseDateTime(clampedTo), - }); + + // Only use inserted_at for partition pruning if v2 + if (isClickhouseV2) { + queryBuilder.prewhere("inserted_at <= {insertedAtEnd: DateTime64(3)}", { + insertedAtEnd: convertDateToClickhouseDateTime(clampedTo), + }); + } + queryBuilder.where("start_time <= {toTime: String}", { toTime: formatNanosecondsForClickhouse(toNs), }); @@ -428,7 +444,6 @@ export class LogsListPresenter { ); } - if (levels && levels.length > 0) { const conditions: string[] = []; const params: Record = {}; @@ -477,7 +492,6 @@ export class LogsListPresenter { queryBuilder.where("NOT (kind = 'SPAN' AND status = 'PARTIAL')"); - // Cursor pagination const decodedCursor = cursor ? decodeCursor(cursor) : null; if (decodedCursor) { @@ -525,11 +539,11 @@ export class LogsListPresenter { let displayMessage = log.message; // For error logs with status ERROR, try to extract error message from attributes - if (log.status === "ERROR" && log.attributes) { + if (log.status === "ERROR" && log.attributes_text) { try { - let attributes = log.attributes as ErrorAttributes; + const attributes = JSON.parse(log.attributes_text) as ErrorAttributes; - if (attributes?.error?.message && typeof attributes.error.message === 'string') { + if (attributes?.error?.message && typeof attributes.error.message === "string") { displayMessage = attributes.error.message; } } catch { diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx index aad6a2be53..44fbd437f5 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx @@ -1,5 +1,6 @@ import { type LoaderFunctionArgs , redirect} from "@remix-run/server-runtime"; import { type MetaFunction, useFetcher, useNavigation, useLocation } from "@remix-run/react"; +import { ServiceValidationError } from "~/v3/services/baseService.server"; import { TypedAwait, typeddefer, @@ -14,7 +15,7 @@ import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; import { getRunFiltersFromRequest } from "~/presenters/RunFilters.server"; import { LogsListPresenter } from "~/presenters/v3/LogsListPresenter.server"; import type { LogLevel } from "~/utils/logUtils"; -import { $replica } from "~/db.server"; +import { $replica, prisma } from "~/db.server"; import { clickhouseClient } from "~/services/clickhouseInstance.server"; import { setRootOnlyFilterPreference, @@ -38,6 +39,7 @@ import { ResizablePanelGroup, } from "~/components/primitives/Resizable"; import { Switch } from "~/components/primitives/Switch"; +import { FEATURE_FLAG, validateFeatureFlagValue } from "~/v3/featureFlags.server"; // Valid log levels for filtering const validLevels: LogLevel[] = ["TRACE", "DEBUG", "INFO", "WARN", "ERROR", "CANCELLED"]; @@ -56,17 +58,59 @@ export const meta: MetaFunction = () => { ]; }; +async function hasLogsPageAccess( + userId: string, + isAdmin: boolean, + isImpersonating: boolean, + organizationSlug: string +): Promise { + if (isAdmin || isImpersonating) { + return true; + } + + // Check organization feature flags + const organization = await prisma.organization.findFirst({ + where: { + slug: organizationSlug, + members: { some: { userId } }, + }, + select: { + featureFlags: true, + }, + }); + + if (!organization?.featureFlags) { + return false; + } + + const flags = organization.featureFlags as Record; + const hasLogsPageAccessResult = validateFeatureFlagValue( + FEATURE_FLAG.hasLogsPageAccess, + flags.hasLogsPageAccess + ); + + return hasLogsPageAccessResult.success && hasLogsPageAccessResult.data === true; +} + export const loader = async ({ request, params }: LoaderFunctionArgs) => { const user = await requireUser(request); const userId = user.id; const isAdmin = user.admin || user.isImpersonating; - if (!isAdmin) { - throw redirect("/"); - } const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params); + const canAccess = await hasLogsPageAccess( + userId, + user.admin, + user.isImpersonating, + organizationSlug + ); + + if (!canAccess) { + throw redirect("/"); + } + const project = await findProjectBySlug(organizationSlug, projectParam, userId); if (!project) { throw new Response("Project not found", { status: 404 }); @@ -86,7 +130,8 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { const showDebug = url.searchParams.get("showDebug") === "true"; const presenter = new LogsListPresenter($replica, clickhouseClient); - const list = presenter.call(project.organizationId, environment.id, { + + const listPromise = presenter.call(project.organizationId, environment.id, { userId, projectId: project.id, ...filters, @@ -94,6 +139,11 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { levels, includeDebugLogs: isAdmin && showDebug, defaultPeriod: "1h", + }).catch((error) => { + if (error instanceof ServiceValidationError) { + return { error: "Failed to load logs. Please refresh and try again." }; + } + throw error; }); const session = await setRootOnlyFilterPreference(filters.rootOnly, request); @@ -101,7 +151,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { return typeddefer( { - data: list, + data: listPromise, rootOnlyDefault: filters.rootOnly, filters, isAdmin, @@ -149,10 +199,20 @@ export default function Page() { } > - {(list) => { + {(result) => { + // Check if result contains an error + if ("error" in result) { + return ( +
+ + {result.error} + +
+ ); + } return ( ["data"]>; + list: Exclude["data"]>, { error: string }>; //exclude error, it is handled rootOnlyDefault: boolean; isAdmin: boolean; showDebug: boolean; @@ -307,7 +367,6 @@ function LogsList({ {/* Table */} { const presenter = new LogDetailPresenter($replica, clickhouseClient); - const result = await presenter.call({ - environmentId: environment.id, - organizationId: project.organizationId, - projectId: project.id, - spanId, - traceId, - startTime, - }); + let result; + try { + result = await presenter.call({ + environmentId: environment.id, + organizationId: project.organizationId, + projectId: project.id, + spanId, + traceId, + startTime, + }); + } catch (error) { + if (error instanceof ServiceValidationError) { + throw new Response(error.message, { status: 400 }); + } + throw error; + } if (!result) { throw new Response("Log not found", { status: 404 }); diff --git a/apps/webapp/app/v3/eventRepository/index.server.ts b/apps/webapp/app/v3/eventRepository/index.server.ts index a8f66dab70..cb211e2b02 100644 --- a/apps/webapp/app/v3/eventRepository/index.server.ts +++ b/apps/webapp/app/v3/eventRepository/index.server.ts @@ -5,7 +5,7 @@ import { clickhouseEventRepositoryV2, } from "./clickhouseEventRepositoryInstance.server"; import { IEventRepository, TraceEventOptions } from "./eventRepository.types"; -import { $replica, prisma } from "~/db.server"; +import { prisma } from "~/db.server"; import { logger } from "~/services/logger.server"; import { FEATURE_FLAG, flags } from "../featureFlags.server"; import { getTaskEventStore } from "../taskEventStore.server"; @@ -24,6 +24,50 @@ export function resolveEventRepositoryForStore(store: string | undefined): IEven return eventRepository; } + export const EVENT_STORE_TYPES = { + POSTGRES: "postgres", + CLICKHOUSE: "clickhouse", + CLICKHOUSE_V2: "clickhouse_v2", + } as const; + +export type EventStoreType = typeof EVENT_STORE_TYPES[keyof typeof EVENT_STORE_TYPES]; + +export async function getConfiguredEventRepository( + organizationId: string +): Promise<{ repository: IEventRepository; store: EventStoreType }> { + const organization = await prisma.organization.findFirst({ + select: { + id: true, + featureFlags: true, + }, + where: { + id: organizationId, + }, + }); + + if (!organization) { + throw new Error("Organization not found when configuring event repository"); + } + + // resolveTaskEventRepositoryFlag checks: + // 1. organization.featureFlags (highest priority) + // 2. global feature flags (via flags() function) + // 3. env.EVENT_REPOSITORY_DEFAULT_STORE (fallback) + const taskEventStore = await resolveTaskEventRepositoryFlag( + (organization.featureFlags as Record | null) ?? undefined + ); + + if (taskEventStore === EVENT_STORE_TYPES.CLICKHOUSE_V2) { + return { repository: clickhouseEventRepositoryV2, store: EVENT_STORE_TYPES.CLICKHOUSE_V2 }; + } + + if (taskEventStore === EVENT_STORE_TYPES.CLICKHOUSE) { + return { repository: clickhouseEventRepository, store: EVENT_STORE_TYPES.CLICKHOUSE }; + } + + return { repository: eventRepository, store: EVENT_STORE_TYPES.POSTGRES }; +} + export async function getEventRepository( featureFlags: Record | undefined, parentStore: string | undefined @@ -92,20 +136,6 @@ async function resolveTaskEventRepositoryFlag( return "clickhouse"; } - if (env.EVENT_REPOSITORY_CLICKHOUSE_ROLLOUT_PERCENT) { - const rolloutPercent = env.EVENT_REPOSITORY_CLICKHOUSE_ROLLOUT_PERCENT; - - const randomNumber = Math.random(); - - if (randomNumber < rolloutPercent) { - // Use the default store when rolling out (could be clickhouse or clickhouse_v2) - if (env.EVENT_REPOSITORY_DEFAULT_STORE === "clickhouse_v2") { - return "clickhouse_v2"; - } - return "clickhouse"; - } - } - return flag; } diff --git a/apps/webapp/app/v3/featureFlags.server.ts b/apps/webapp/app/v3/featureFlags.server.ts index 81dff31ffa..605c11defc 100644 --- a/apps/webapp/app/v3/featureFlags.server.ts +++ b/apps/webapp/app/v3/featureFlags.server.ts @@ -6,6 +6,7 @@ export const FEATURE_FLAG = { runsListRepository: "runsListRepository", taskEventRepository: "taskEventRepository", hasQueryAccess: "hasQueryAccess", + hasLogsPageAccess: "hasLogsPageAccess", } as const; const FeatureFlagCatalog = { @@ -13,6 +14,7 @@ const FeatureFlagCatalog = { [FEATURE_FLAG.runsListRepository]: z.enum(["clickhouse", "postgres"]), [FEATURE_FLAG.taskEventRepository]: z.enum(["clickhouse", "clickhouse_v2", "postgres"]), [FEATURE_FLAG.hasQueryAccess]: z.coerce.boolean(), + [FEATURE_FLAG.hasLogsPageAccess]: z.coerce.boolean(), }; type FeatureFlagKey = keyof typeof FeatureFlagCatalog; diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts index f6a014da5c..4f98cfbca1 100644 --- a/internal-packages/clickhouse/src/index.ts +++ b/internal-packages/clickhouse/src/index.ts @@ -23,8 +23,10 @@ import { getTraceSummaryQueryBuilderV2, insertTaskEvents, insertTaskEventsV2, - getLogsListQueryBuilder, - getLogDetailQueryBuilder, + getLogsListQueryBuilderV2, + getLogDetailQueryBuilderV2, + getLogsListQueryBuilderV1, + getLogDetailQueryBuilderV1, } from "./taskEvents.js"; import { Logger, type LogLevel } from "@trigger.dev/core/logger"; import type { Agent as HttpAgent } from "http"; @@ -210,6 +212,8 @@ export class ClickHouse { traceSummaryQueryBuilder: getTraceSummaryQueryBuilder(this.reader), traceDetailedSummaryQueryBuilder: getTraceDetailedSummaryQueryBuilder(this.reader), spanDetailsQueryBuilder: getSpanDetailsQueryBuilder(this.reader), + logsListQueryBuilder: getLogsListQueryBuilderV1(this.reader, this.logsQuerySettings?.list), + logDetailQueryBuilder: getLogDetailQueryBuilderV1(this.reader, this.logsQuerySettings?.detail), }; } @@ -219,8 +223,8 @@ export class ClickHouse { traceSummaryQueryBuilder: getTraceSummaryQueryBuilderV2(this.reader), traceDetailedSummaryQueryBuilder: getTraceDetailedSummaryQueryBuilderV2(this.reader), spanDetailsQueryBuilder: getSpanDetailsQueryBuilderV2(this.reader), - logsListQueryBuilder: getLogsListQueryBuilder(this.reader, this.logsQuerySettings?.list), - logDetailQueryBuilder: getLogDetailQueryBuilder(this.reader, this.logsQuerySettings?.detail), + logsListQueryBuilder: getLogsListQueryBuilderV2(this.reader, this.logsQuerySettings?.list), + logDetailQueryBuilder: getLogDetailQueryBuilderV2(this.reader, this.logsQuerySettings?.detail), }; } } diff --git a/internal-packages/clickhouse/src/taskEvents.ts b/internal-packages/clickhouse/src/taskEvents.ts index f526cdf0b6..fa64a908dd 100644 --- a/internal-packages/clickhouse/src/taskEvents.ts +++ b/internal-packages/clickhouse/src/taskEvents.ts @@ -249,13 +249,12 @@ export const LogsListResult = z.object({ kind: z.string(), status: z.string(), duration: z.number().or(z.string()), - metadata: z.string(), - attributes: z.any(), + attributes_text: z.string(), }); export type LogsListResult = z.output; -export function getLogsListQueryBuilder(ch: ClickhouseReader, settings?: ClickHouseSettings) { +export function getLogsListQueryBuilderV2(ch: ClickhouseReader, settings?: ClickHouseSettings) { return ch.queryBuilderFast({ name: "getLogsList", table: "trigger_dev.task_events_v2", @@ -273,8 +272,7 @@ export function getLogsListQueryBuilder(ch: ClickhouseReader, settings?: ClickHo "kind", "status", "duration", - "metadata", - "attributes" + "attributes_text" ], settings, }); @@ -295,13 +293,12 @@ export const LogDetailV2Result = z.object({ kind: z.string(), status: z.string(), duration: z.number().or(z.string()), - metadata: z.string(), - attributes: z.any() + attributes_text: z.string() }); export type LogDetailV2Result = z.output; -export function getLogDetailQueryBuilder(ch: ClickhouseReader, settings?: ClickHouseSettings) { +export function getLogDetailQueryBuilderV2(ch: ClickhouseReader, settings?: ClickHouseSettings) { return ch.queryBuilderFast({ name: "getLogDetail", table: "trigger_dev.task_events_v2", @@ -319,8 +316,59 @@ export function getLogDetailQueryBuilder(ch: ClickhouseReader, settings?: ClickH "kind", "status", "duration", - "metadata", - "attributes", + "attributes_text", + ], + settings, + }); +} + +// ============================================================================ +// Logs List Query Builders for V1 (task_events_v1) +// ============================================================================ + +export function getLogsListQueryBuilderV1(ch: ClickhouseReader, settings?: ClickHouseSettings) { + return ch.queryBuilderFast({ + name: "getLogsListV1", + table: "trigger_dev.task_events_v1", + columns: [ + "environment_id", + "organization_id", + "project_id", + "task_identifier", + "run_id", + "start_time", + "trace_id", + "span_id", + "parent_span_id", + { name: "message", expression: "LEFT(message, 512)" }, + "kind", + "status", + "duration", + "attributes_text" + ], + settings, + }); +} + +export function getLogDetailQueryBuilderV1(ch: ClickhouseReader, settings?: ClickHouseSettings) { + return ch.queryBuilderFast({ + name: "getLogDetailV1", + table: "trigger_dev.task_events_v1", + columns: [ + "environment_id", + "organization_id", + "project_id", + "task_identifier", + "run_id", + "start_time", + "trace_id", + "span_id", + "parent_span_id", + "message", + "kind", + "status", + "duration", + "attributes_text", ], settings, });