Big compile time correctness step (but still more to go)
Some checks are pending
/ setup (push) Waiting to run
Some checks are pending
/ setup (push) Waiting to run
This commit is contained in:
parent
4f05192229
commit
97ad905f6b
13 changed files with 826 additions and 242 deletions
23
README.md
23
README.md
|
|
@ -22,6 +22,29 @@ python3 scripts/generate_cargo_toml.py
|
|||
scripts/generate_proto_for_ide.sh
|
||||
```
|
||||
|
||||
### Compiling
|
||||
```bash
|
||||
bazel build //...
|
||||
```
|
||||
|
||||
**Bullet-proof compile-time correctness** is essential for production reliability. Backend protobuf changes must cause predictable frontend compilation failures, preventing runtime errors. Our three-pronged approach ensures this:
|
||||
|
||||
1. **Complete Type Chain**: Proto → Rust → OpenAPI → TypeScript → Components
|
||||
- Each step uses generated types, maintaining accuracy across the entire pipeline
|
||||
- Breaking changes at any layer cause compilation failures in dependent layers
|
||||
|
||||
2. **Consistent Data Transformation**: Service boundary layer transforms API responses to dashboard types
|
||||
- Canonical frontend interfaces isolated from backend implementation details
|
||||
- Transformations handle protobuf nullability and normalize data shapes
|
||||
- Components never directly access generated API types
|
||||
|
||||
3. **Strict TypeScript Configuration**: Enforces explicit null handling and prevents implicit `any` types
|
||||
- `strictNullChecks` catches undefined property access patterns
|
||||
- `noImplicitAny` surfaces type safety gaps
|
||||
- Runtime type errors become compile-time failures
|
||||
|
||||
This system guarantees that backend interface changes are caught during TypeScript compilation, not in production.
|
||||
|
||||
### Testing
|
||||
|
||||
DataBuild core testing:
|
||||
|
|
|
|||
|
|
@ -770,7 +770,7 @@ async fn handle_tasks_command(matches: &ArgMatches, event_log_uri: &str) -> Resu
|
|||
if !detail.target_partitions.is_empty() {
|
||||
println!("\nTarget partitions:");
|
||||
for partition in &detail.target_partitions {
|
||||
println!(" - {}", partition);
|
||||
println!(" - {}", partition.str);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -946,7 +946,7 @@ async fn handle_builds_command(matches: &ArgMatches, event_log_uri: &str) -> Res
|
|||
if !detail.requested_partitions.is_empty() {
|
||||
println!("\nRequested partitions:");
|
||||
for partition in &detail.requested_partitions {
|
||||
println!(" - {}", partition);
|
||||
println!(" - {}", partition.str);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -64,6 +64,7 @@ ts_project(
|
|||
"layout.ts",
|
||||
"pages.ts",
|
||||
"services.ts",
|
||||
"types.ts",
|
||||
"utils.ts",
|
||||
],
|
||||
allow_js = True,
|
||||
|
|
|
|||
|
|
@ -10,23 +10,42 @@ import {
|
|||
GraphAnalysis
|
||||
} from './pages';
|
||||
import { decodePartitionRef } from './utils';
|
||||
import {
|
||||
TypedComponent,
|
||||
LayoutWrapperAttrs,
|
||||
RecentActivityAttrs,
|
||||
BuildStatusAttrs,
|
||||
PartitionStatusAttrs,
|
||||
PartitionsListAttrs,
|
||||
JobsListAttrs,
|
||||
JobMetricsAttrs,
|
||||
GraphAnalysisAttrs
|
||||
} from './types';
|
||||
|
||||
export const appName = "databuild";
|
||||
|
||||
// Wrapper components that include layout
|
||||
const LayoutWrapper = (component: any) => ({
|
||||
view: (vnode: any) => m(Layout, m(component, vnode.attrs))
|
||||
});
|
||||
// Wrapper components that include layout - now with type safety
|
||||
function createLayoutWrapper<TAttrs>(component: TypedComponent<TAttrs>): m.Component<TAttrs> {
|
||||
return {
|
||||
oninit: component.oninit ? (vnode: m.Vnode<TAttrs>) => component.oninit!.call(component, vnode) : undefined,
|
||||
oncreate: component.oncreate ? (vnode: m.VnodeDOM<TAttrs>) => component.oncreate!.call(component, vnode) : undefined,
|
||||
onupdate: component.onupdate ? (vnode: m.VnodeDOM<TAttrs>) => component.onupdate!.call(component, vnode) : undefined,
|
||||
onbeforeremove: component.onbeforeremove ? (vnode: m.VnodeDOM<TAttrs>) => component.onbeforeremove!.call(component, vnode) : undefined,
|
||||
onremove: component.onremove ? (vnode: m.VnodeDOM<TAttrs>) => component.onremove!.call(component, vnode) : undefined,
|
||||
onbeforeupdate: component.onbeforeupdate ? (vnode: m.Vnode<TAttrs>, old: m.VnodeDOM<TAttrs>) => component.onbeforeupdate!.call(component, vnode, old) : undefined,
|
||||
view: (vnode: m.Vnode<TAttrs>) => m(Layout, [component.view.call(component, vnode)])
|
||||
};
|
||||
}
|
||||
|
||||
// Route definitions
|
||||
// Route definitions with type safety
|
||||
const routes = {
|
||||
'/': LayoutWrapper(RecentActivity),
|
||||
'/builds/:id': LayoutWrapper(BuildStatus),
|
||||
'/partitions': LayoutWrapper(PartitionsList),
|
||||
'/partitions/:base64_ref': LayoutWrapper(PartitionStatus),
|
||||
'/jobs': LayoutWrapper(JobsList),
|
||||
'/jobs/:label': LayoutWrapper(JobMetrics),
|
||||
'/analyze': LayoutWrapper(GraphAnalysis),
|
||||
'/': createLayoutWrapper<RecentActivityAttrs>(RecentActivity),
|
||||
'/builds/:id': createLayoutWrapper<BuildStatusAttrs>(BuildStatus),
|
||||
'/partitions': createLayoutWrapper<PartitionsListAttrs>(PartitionsList),
|
||||
'/partitions/:base64_ref': createLayoutWrapper<PartitionStatusAttrs>(PartitionStatus),
|
||||
'/jobs': createLayoutWrapper<JobsListAttrs>(JobsList),
|
||||
'/jobs/:label': createLayoutWrapper<JobMetricsAttrs>(JobMetrics),
|
||||
'/analyze': createLayoutWrapper<GraphAnalysisAttrs>(GraphAnalysis),
|
||||
};
|
||||
|
||||
if (typeof window !== "undefined") {
|
||||
|
|
|
|||
|
|
@ -1,9 +1,20 @@
|
|||
import m from 'mithril';
|
||||
import { DashboardService, pollingManager, formatTime, formatDateTime, formatDuration, formatDate, RecentActivitySummary } from './services';
|
||||
import { encodePartitionRef, decodePartitionRef, encodeJobLabel, decodeJobLabel, BuildStatusBadge, PartitionStatusBadge, EventTypeBadge } from './utils';
|
||||
import {
|
||||
TypedComponent,
|
||||
RecentActivityAttrs,
|
||||
BuildStatusAttrs,
|
||||
PartitionStatusAttrs,
|
||||
PartitionsListAttrs,
|
||||
JobsListAttrs,
|
||||
JobMetricsAttrs,
|
||||
GraphAnalysisAttrs,
|
||||
getTypedRouteParams
|
||||
} from './types';
|
||||
|
||||
// Page scaffold components
|
||||
export const RecentActivity = {
|
||||
export const RecentActivity: TypedComponent<RecentActivityAttrs> = {
|
||||
data: null as RecentActivitySummary | null,
|
||||
loading: true,
|
||||
error: null as string | null,
|
||||
|
|
@ -30,7 +41,7 @@ export const RecentActivity = {
|
|||
});
|
||||
},
|
||||
|
||||
oninit() {
|
||||
oninit(vnode: m.Vnode<RecentActivityAttrs>) {
|
||||
// Load initial data - Mithril will automatically redraw after promise resolves
|
||||
this.loadData();
|
||||
|
||||
|
|
@ -42,12 +53,12 @@ export const RecentActivity = {
|
|||
}
|
||||
},
|
||||
|
||||
onremove() {
|
||||
onremove(vnode: m.VnodeDOM<RecentActivityAttrs>) {
|
||||
// Clean up polling when component is removed
|
||||
pollingManager.stopPolling('recent-activity');
|
||||
},
|
||||
|
||||
view: function() {
|
||||
view: function(vnode: m.Vnode<RecentActivityAttrs>) {
|
||||
|
||||
if (this.loading && !this.data) {
|
||||
return m('div.container.mx-auto.p-4', [
|
||||
|
|
@ -190,7 +201,7 @@ export const RecentActivity = {
|
|||
])
|
||||
]),
|
||||
m('tbody',
|
||||
data.recentBuilds.map(build =>
|
||||
data.recentBuilds.map((build: any) =>
|
||||
m('tr.hover', [
|
||||
m('td', [
|
||||
m('a.link.link-primary.font-mono.text-sm', {
|
||||
|
|
@ -243,7 +254,7 @@ export const RecentActivity = {
|
|||
])
|
||||
]),
|
||||
m('tbody',
|
||||
data.recentPartitions.map(partition =>
|
||||
data.recentPartitions.map((partition: any) =>
|
||||
m('tr.hover', [
|
||||
m('td', [
|
||||
m('a.link.link-primary.font-mono.text-sm.break-all', {
|
||||
|
|
@ -271,20 +282,20 @@ export const RecentActivity = {
|
|||
}
|
||||
};
|
||||
|
||||
export const BuildStatus = {
|
||||
export const BuildStatus: TypedComponent<BuildStatusAttrs> = {
|
||||
data: null as any | null,
|
||||
loading: true,
|
||||
error: null as string | null,
|
||||
partitionStatuses: new Map<string, any>(),
|
||||
buildId: '',
|
||||
|
||||
oninit(vnode: any) {
|
||||
oninit(vnode: m.Vnode<BuildStatusAttrs>) {
|
||||
this.buildId = vnode.attrs.id;
|
||||
this.loadBuild();
|
||||
this.startPolling();
|
||||
},
|
||||
|
||||
onremove() {
|
||||
onremove(vnode: m.VnodeDOM<BuildStatusAttrs>) {
|
||||
pollingManager.stopPolling(`build-status-${this.buildId}`);
|
||||
},
|
||||
|
||||
|
|
@ -307,12 +318,12 @@ export const BuildStatus = {
|
|||
for (const partition_ref of buildResponse.requested_partitions) {
|
||||
try {
|
||||
const partition_status = await apiClient.apiV1PartitionsPartitionRefStatusGet({
|
||||
partition_ref: partition_ref
|
||||
partition_ref: partition_ref.str
|
||||
});
|
||||
console.log(`Loaded status for partition ${partition_ref}:`, partition_status);
|
||||
this.partitionStatuses.set(partition_ref, partition_status);
|
||||
console.log(`Loaded status for partition ${partition_ref.str}:`, partition_status);
|
||||
this.partitionStatuses.set(partition_ref.str, partition_status);
|
||||
} catch (e) {
|
||||
console.warn(`Failed to load status for partition ${partition_ref}:`, e);
|
||||
console.warn(`Failed to load status for partition ${partition_ref.str}:`, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -373,16 +384,16 @@ export const BuildStatus = {
|
|||
}
|
||||
},
|
||||
|
||||
oncreate() {
|
||||
oncreate(vnode: m.VnodeDOM<BuildStatusAttrs>) {
|
||||
(window as any).mermaid.init();
|
||||
},
|
||||
|
||||
onupdate() {
|
||||
onupdate(vnode: m.VnodeDOM<BuildStatusAttrs>) {
|
||||
(window as any).mermaid.init();
|
||||
},
|
||||
|
||||
|
||||
view() {
|
||||
view(vnode: m.Vnode<BuildStatusAttrs>) {
|
||||
// Loading/error states similar to RecentActivity component
|
||||
if (this.loading && !this.data) {
|
||||
return m('div.container.mx-auto.p-4', [
|
||||
|
|
@ -516,7 +527,7 @@ export const BuildStatus = {
|
|||
}
|
||||
};
|
||||
|
||||
export const PartitionsList = {
|
||||
export const PartitionsList: TypedComponent<PartitionsListAttrs> = {
|
||||
data: null as any | null,
|
||||
loading: true,
|
||||
error: null as string | null,
|
||||
|
|
@ -573,11 +584,11 @@ export const PartitionsList = {
|
|||
);
|
||||
},
|
||||
|
||||
oninit() {
|
||||
oninit(vnode: m.Vnode<PartitionsListAttrs>) {
|
||||
this.loadPartitions();
|
||||
},
|
||||
|
||||
view() {
|
||||
view(vnode: m.Vnode<PartitionsListAttrs>) {
|
||||
if (this.loading && !this.data) {
|
||||
return m('div.container.mx-auto.p-4', [
|
||||
m('div.flex.flex-col.justify-center.items-center.min-h-96', [
|
||||
|
|
@ -701,7 +712,7 @@ export const PartitionsList = {
|
|||
}
|
||||
};
|
||||
|
||||
export const PartitionStatus = {
|
||||
export const PartitionStatus: TypedComponent<PartitionStatusAttrs> = {
|
||||
data: null as any | null,
|
||||
events: null as any | null,
|
||||
loading: true,
|
||||
|
|
@ -802,12 +813,12 @@ export const PartitionStatus = {
|
|||
}
|
||||
},
|
||||
|
||||
oninit(vnode: any) {
|
||||
oninit(vnode: m.Vnode<PartitionStatusAttrs>) {
|
||||
this.partitionRef = decodePartitionRef(vnode.attrs.base64_ref);
|
||||
this.loadPartition();
|
||||
},
|
||||
|
||||
view() {
|
||||
view(vnode: m.Vnode<PartitionStatusAttrs>) {
|
||||
if (this.loading && !this.data) {
|
||||
return m('div.container.mx-auto.p-4', [
|
||||
m('div.flex.flex-col.justify-center.items-center.min-h-96', [
|
||||
|
|
@ -991,14 +1002,14 @@ export const PartitionStatus = {
|
|||
}
|
||||
};
|
||||
|
||||
export const JobsList = {
|
||||
export const JobsList: TypedComponent<JobsListAttrs> = {
|
||||
jobs: [] as any[],
|
||||
searchTerm: '',
|
||||
loading: false,
|
||||
error: null as string | null,
|
||||
searchTimeout: null as NodeJS.Timeout | null,
|
||||
|
||||
oninit(vnode: any) {
|
||||
oninit(vnode: m.Vnode<JobsListAttrs>) {
|
||||
JobsList.loadJobs();
|
||||
},
|
||||
|
||||
|
|
@ -1028,7 +1039,7 @@ export const JobsList = {
|
|||
);
|
||||
},
|
||||
|
||||
view: () => {
|
||||
view: (vnode: m.Vnode<JobsListAttrs>) => {
|
||||
if (JobsList.loading) {
|
||||
return m('div.container.mx-auto.p-4', [
|
||||
m('div.flex.justify-center.items-center.h-64', [
|
||||
|
|
@ -1115,13 +1126,13 @@ export const JobsList = {
|
|||
}
|
||||
};
|
||||
|
||||
export const JobMetrics = {
|
||||
export const JobMetrics: TypedComponent<JobMetricsAttrs> = {
|
||||
jobLabel: '',
|
||||
metrics: null as any,
|
||||
loading: false,
|
||||
error: null as string | null,
|
||||
|
||||
oninit(vnode: any) {
|
||||
oninit(vnode: m.Vnode<JobMetricsAttrs>) {
|
||||
JobMetrics.jobLabel = decodeJobLabel(vnode.attrs.label);
|
||||
JobMetrics.loadJobMetrics();
|
||||
},
|
||||
|
|
@ -1145,7 +1156,7 @@ export const JobMetrics = {
|
|||
}
|
||||
},
|
||||
|
||||
view: () => {
|
||||
view: (vnode: m.Vnode<JobMetricsAttrs>) => {
|
||||
if (JobMetrics.loading) {
|
||||
return m('div.container.mx-auto.p-4', [
|
||||
m('div.flex.justify-center.items-center.h-64', [
|
||||
|
|
@ -1283,8 +1294,8 @@ export const JobMetrics = {
|
|||
}
|
||||
};
|
||||
|
||||
export const GraphAnalysis = {
|
||||
view: () => m('div.container.mx-auto.p-4', [
|
||||
export const GraphAnalysis: TypedComponent<GraphAnalysisAttrs> = {
|
||||
view: (vnode: m.Vnode<GraphAnalysisAttrs>) => m('div.container.mx-auto.p-4', [
|
||||
m('h1.text-3xl.font-bold.mb-4', 'Graph Analysis'),
|
||||
m('div.card.bg-base-100.shadow-xl', [
|
||||
m('div.card-body', [
|
||||
|
|
|
|||
181
databuild/dashboard/types.ts
Normal file
181
databuild/dashboard/types.ts
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
import m from 'mithril';
|
||||
import {
|
||||
ActivityResponse,
|
||||
ActivityApiResponse,
|
||||
BuildSummary,
|
||||
BuildDetailResponse,
|
||||
PartitionSummary,
|
||||
PartitionDetailResponse,
|
||||
PartitionEventsResponse,
|
||||
JobSummary,
|
||||
JobMetricsResponse,
|
||||
JobDailyStats,
|
||||
JobRunSummary
|
||||
} from '../client/typescript_generated/src/index';
|
||||
|
||||
// Generic typed component interface that extends Mithril's component
|
||||
// Uses intersection type to allow arbitrary properties while ensuring type safety for lifecycle methods
|
||||
export interface TypedComponent<TAttrs = {}> extends Record<string, any> {
|
||||
oninit?(vnode: m.Vnode<TAttrs>): void;
|
||||
oncreate?(vnode: m.VnodeDOM<TAttrs>): void;
|
||||
onupdate?(vnode: m.VnodeDOM<TAttrs>): void;
|
||||
onbeforeremove?(vnode: m.VnodeDOM<TAttrs>): Promise<any> | void;
|
||||
onremove?(vnode: m.VnodeDOM<TAttrs>): void;
|
||||
onbeforeupdate?(vnode: m.Vnode<TAttrs>, old: m.VnodeDOM<TAttrs>): boolean | void;
|
||||
view(vnode: m.Vnode<TAttrs>): m.Children;
|
||||
}
|
||||
|
||||
// Helper type for typed vnodes
|
||||
export type TypedVnode<TAttrs = {}> = m.Vnode<TAttrs>;
|
||||
export type TypedVnodeDOM<TAttrs = {}> = m.VnodeDOM<TAttrs>;
|
||||
|
||||
// Route parameter types
|
||||
export interface RouteParams {
|
||||
[key: string]: string;
|
||||
}
|
||||
|
||||
export interface BuildRouteParams extends RouteParams {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface PartitionRouteParams extends RouteParams {
|
||||
base64_ref: string;
|
||||
}
|
||||
|
||||
export interface JobRouteParams extends RouteParams {
|
||||
label: string;
|
||||
}
|
||||
|
||||
// Component attribute interfaces that reference OpenAPI types
|
||||
|
||||
export interface RecentActivityAttrs {
|
||||
// No external attrs needed - component manages its own data loading
|
||||
}
|
||||
|
||||
export interface BuildStatusAttrs {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface PartitionStatusAttrs {
|
||||
base64_ref: string;
|
||||
}
|
||||
|
||||
export interface PartitionsListAttrs {
|
||||
// No external attrs needed - component manages its own data loading
|
||||
}
|
||||
|
||||
export interface JobsListAttrs {
|
||||
// No external attrs needed - component manages its own data loading
|
||||
}
|
||||
|
||||
export interface JobMetricsAttrs {
|
||||
label: string;
|
||||
}
|
||||
|
||||
export interface GraphAnalysisAttrs {
|
||||
// No external attrs needed for now
|
||||
}
|
||||
|
||||
// Badge component attribute interfaces with OpenAPI type constraints
|
||||
|
||||
export interface BuildStatusBadgeAttrs {
|
||||
status: string; // This should be constrained to BuildSummary status values
|
||||
size?: 'xs' | 'sm' | 'md' | 'lg';
|
||||
class?: string;
|
||||
}
|
||||
|
||||
export interface PartitionStatusBadgeAttrs {
|
||||
status: string; // This should be constrained to PartitionSummary status values
|
||||
size?: 'xs' | 'sm' | 'md' | 'lg';
|
||||
class?: string;
|
||||
}
|
||||
|
||||
export interface EventTypeBadgeAttrs {
|
||||
eventType: string; // This should be constrained to known event types
|
||||
size?: 'xs' | 'sm' | 'md' | 'lg';
|
||||
class?: string;
|
||||
}
|
||||
|
||||
// Layout wrapper attributes
|
||||
export interface LayoutWrapperAttrs {
|
||||
// Layout wrapper will pass through attributes to wrapped component
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
// Data types for component state (using OpenAPI types)
|
||||
export interface RecentActivityData {
|
||||
data: ActivityResponse | null;
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
}
|
||||
|
||||
export interface BuildStatusData {
|
||||
data: BuildDetailResponse | null;
|
||||
partitionStatuses: Map<string, any>;
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
buildId: string;
|
||||
}
|
||||
|
||||
export interface PartitionStatusData {
|
||||
data: PartitionDetailResponse | null;
|
||||
events: PartitionEventsResponse | null;
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
partitionRef: string;
|
||||
buildHistory: any[];
|
||||
}
|
||||
|
||||
export interface JobsListData {
|
||||
jobs: JobSummary[];
|
||||
searchTerm: string;
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
searchTimeout: NodeJS.Timeout | null;
|
||||
}
|
||||
|
||||
export interface JobMetricsData {
|
||||
jobLabel: string;
|
||||
metrics: JobMetricsResponse | null;
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
}
|
||||
|
||||
// Utility type for creating typed components
|
||||
export type CreateTypedComponent<TAttrs> = TypedComponent<TAttrs>;
|
||||
|
||||
// Type guards and validators using OpenAPI type information
|
||||
export function isActivityResponse(data: any): data is ActivityResponse {
|
||||
return data &&
|
||||
typeof data.active_builds_count === 'number' &&
|
||||
typeof data.graph_name === 'string' &&
|
||||
Array.isArray(data.recent_builds) &&
|
||||
Array.isArray(data.recent_partitions) &&
|
||||
typeof data.system_status === 'string' &&
|
||||
typeof data.total_partitions_count === 'number';
|
||||
}
|
||||
|
||||
export function isBuildSummary(data: any): data is BuildSummary {
|
||||
return data &&
|
||||
typeof data.build_request_id === 'string' &&
|
||||
typeof data.status_name === 'string' &&
|
||||
typeof data.requested_at === 'number';
|
||||
}
|
||||
|
||||
export function isPartitionSummary(data: any): data is PartitionSummary {
|
||||
return data &&
|
||||
typeof data.partition_ref === 'string' &&
|
||||
typeof data.last_updated === 'number';
|
||||
}
|
||||
|
||||
// Helper function to create type-safe Mithril components
|
||||
export function createTypedComponent<TAttrs>(
|
||||
component: TypedComponent<TAttrs>
|
||||
): m.Component<TAttrs> {
|
||||
return component as m.Component<TAttrs>;
|
||||
}
|
||||
|
||||
// Helper for type-safe route handling
|
||||
export function getTypedRouteParams<T extends RouteParams>(vnode: m.Vnode<T>): T {
|
||||
return vnode.attrs;
|
||||
}
|
||||
|
|
@ -23,12 +23,19 @@ export function decodeJobLabel(encoded: string): string {
|
|||
}
|
||||
|
||||
import m from 'mithril';
|
||||
import {
|
||||
TypedComponent,
|
||||
BuildStatusBadgeAttrs,
|
||||
PartitionStatusBadgeAttrs,
|
||||
EventTypeBadgeAttrs,
|
||||
createTypedComponent
|
||||
} from './types';
|
||||
|
||||
// Mithril components for status badges - encapsulates both logic and presentation
|
||||
|
||||
export const BuildStatusBadge = {
|
||||
view(vnode: any) {
|
||||
const { status, size = 'sm', ...attrs } = vnode.attrs;
|
||||
export const BuildStatusBadge: TypedComponent<BuildStatusBadgeAttrs> = {
|
||||
view(vnode: m.Vnode<BuildStatusBadgeAttrs>) {
|
||||
const { status, size = 'sm', class: className, ...attrs } = vnode.attrs;
|
||||
const normalizedStatus = status.toLowerCase();
|
||||
|
||||
let badgeClass = 'badge-neutral';
|
||||
|
|
@ -42,15 +49,15 @@ export const BuildStatusBadge = {
|
|||
badgeClass = 'badge-error';
|
||||
}
|
||||
|
||||
return m(`span.badge.badge-${size}.${badgeClass}`, attrs, status);
|
||||
return m(`span.badge.badge-${size}.${badgeClass}`, { class: className, ...attrs }, status);
|
||||
}
|
||||
};
|
||||
|
||||
export const PartitionStatusBadge = {
|
||||
view(vnode: any) {
|
||||
const { status, size = 'sm', ...attrs } = vnode.attrs;
|
||||
export const PartitionStatusBadge: TypedComponent<PartitionStatusBadgeAttrs> = {
|
||||
view(vnode: m.Vnode<PartitionStatusBadgeAttrs>) {
|
||||
const { status, size = 'sm', class: className, ...attrs } = vnode.attrs;
|
||||
if (!status) {
|
||||
return m(`span.badge.badge-${size}.badge-neutral`, attrs, 'Unknown');
|
||||
return m(`span.badge.badge-${size}.badge-neutral`, { class: className, ...attrs }, 'Unknown');
|
||||
}
|
||||
|
||||
const normalizedStatus = status.toLowerCase();
|
||||
|
|
@ -66,13 +73,13 @@ export const PartitionStatusBadge = {
|
|||
badgeClass = 'badge-error';
|
||||
}
|
||||
|
||||
return m(`span.badge.badge-${size}.${badgeClass}`, attrs, status);
|
||||
return m(`span.badge.badge-${size}.${badgeClass}`, { class: className, ...attrs }, status);
|
||||
}
|
||||
};
|
||||
|
||||
export const EventTypeBadge = {
|
||||
view(vnode: any) {
|
||||
const { eventType, size = 'sm', ...attrs } = vnode.attrs;
|
||||
export const EventTypeBadge: TypedComponent<EventTypeBadgeAttrs> = {
|
||||
view(vnode: m.Vnode<EventTypeBadgeAttrs>) {
|
||||
const { eventType, size = 'sm', class: className, ...attrs } = vnode.attrs;
|
||||
|
||||
let badgeClass = 'badge-ghost';
|
||||
let displayName = eventType;
|
||||
|
|
@ -96,6 +103,6 @@ export const EventTypeBadge = {
|
|||
break;
|
||||
}
|
||||
|
||||
return m(`span.badge.badge-${size}.${badgeClass}`, attrs, displayName);
|
||||
return m(`span.badge.badge-${size}.${badgeClass}`, { class: className, ...attrs }, displayName);
|
||||
}
|
||||
};
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
use crate::*;
|
||||
use crate::event_log::{BuildEventLog, BuildEventLogError, Result};
|
||||
use crate::service::{BuildDetailResponse, BuildTimelineEvent as ServiceBuildTimelineEvent};
|
||||
use crate::{BuildDetailResponse, BuildTimelineEvent as ServiceBuildTimelineEvent};
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashMap;
|
||||
use serde::Serialize;
|
||||
|
|
@ -300,7 +300,7 @@ impl BuildsRepository {
|
|||
build_request_id: build_info.build_request_id,
|
||||
status_code: build_info.status as i32,
|
||||
status_name: build_info.status.to_display_string(),
|
||||
requested_partitions: build_info.requested_partitions.into_iter().map(|p| p.str).collect(),
|
||||
requested_partitions: build_info.requested_partitions,
|
||||
total_jobs: build_info.total_jobs as u32,
|
||||
completed_jobs: build_info.completed_jobs as u32,
|
||||
failed_jobs: build_info.failed_jobs as u32,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
use crate::*;
|
||||
use crate::event_log::{BuildEventLog, Result};
|
||||
use crate::service::{JobDetailResponse, JobRunDetail as ServiceJobRunDetail};
|
||||
use crate::{JobDetailResponse, JobRunDetail as ServiceJobRunDetail};
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashMap;
|
||||
use serde::Serialize;
|
||||
|
|
@ -307,7 +307,7 @@ impl JobsRepository {
|
|||
.map(|run| ServiceJobRunDetail {
|
||||
job_run_id: run.job_run_id,
|
||||
build_request_id: run.build_request_id,
|
||||
target_partitions: run.target_partitions.into_iter().map(|p| p.str).collect(),
|
||||
target_partitions: run.target_partitions,
|
||||
status_code: run.status as i32,
|
||||
status_name: run.status.to_display_string(),
|
||||
started_at: run.started_at,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
use crate::*;
|
||||
use crate::event_log::{BuildEventLog, BuildEventLogError, Result};
|
||||
use crate::service::{TaskDetailResponse, TaskTimelineEvent as ServiceTaskTimelineEvent};
|
||||
use crate::{TaskDetailResponse, TaskTimelineEvent as ServiceTaskTimelineEvent};
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashMap;
|
||||
use serde::Serialize;
|
||||
|
|
@ -324,7 +324,7 @@ impl TasksRepository {
|
|||
build_request_id: task_info.build_request_id,
|
||||
status_code: task_info.status as i32,
|
||||
status_name: task_info.status.to_display_string(),
|
||||
target_partitions: task_info.target_partitions.into_iter().map(|p| p.str).collect(),
|
||||
target_partitions: task_info.target_partitions,
|
||||
scheduled_at: task_info.scheduled_at,
|
||||
started_at: task_info.started_at,
|
||||
completed_at: task_info.completed_at,
|
||||
|
|
|
|||
|
|
@ -137,12 +137,7 @@ pub struct TaskCancelResponse {
|
|||
}
|
||||
|
||||
// List endpoints request/response types
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct BuildsListResponse {
|
||||
pub builds: Vec<BuildSummary>,
|
||||
pub total_count: u32,
|
||||
pub has_more: bool,
|
||||
}
|
||||
// Removed: duplicate of crate::BuildsListResponse from proto
|
||||
|
||||
// Wrapper structs for API responses that contain protobuf data + service metadata
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
|
|
@ -187,58 +182,14 @@ pub struct PaginationInfo {
|
|||
pub offset: Option<u32>,
|
||||
}
|
||||
|
||||
// Legacy types kept for backward compatibility (will be removed eventually)
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct BuildSummary {
|
||||
pub build_request_id: String,
|
||||
pub status: String,
|
||||
pub requested_partitions: Vec<String>,
|
||||
pub created_at: i64,
|
||||
pub updated_at: i64,
|
||||
}
|
||||
// Removed: Legacy types that duplicate proto definitions
|
||||
// - BuildSummary (use crate::BuildSummary from proto)
|
||||
// - PartitionsListResponse (use crate::PartitionsListResponse from proto)
|
||||
// - PartitionSummary (use crate::PartitionSummary from proto)
|
||||
|
||||
// TODO snake cased response
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct PartitionsListResponse {
|
||||
pub partitions: Vec<PartitionSummary>,
|
||||
pub total_count: u32,
|
||||
pub has_more: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct PartitionSummary {
|
||||
pub partition_ref: String,
|
||||
pub status: String,
|
||||
pub updated_at: i64,
|
||||
pub build_request_id: Option<String>,
|
||||
}
|
||||
|
||||
// TODO camel cased results
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ActivityResponse {
|
||||
pub active_builds_count: u32,
|
||||
pub recent_builds: Vec<BuildSummary>,
|
||||
pub recent_partitions: Vec<PartitionSummary>,
|
||||
pub total_partitions_count: u32,
|
||||
pub system_status: String,
|
||||
pub graph_name: String,
|
||||
}
|
||||
|
||||
// Job-related request/response types
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct JobsListResponse {
|
||||
pub jobs: Vec<JobSummary>,
|
||||
pub total_count: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct JobSummary {
|
||||
pub job_label: String,
|
||||
pub success_rate: f64,
|
||||
pub avg_duration_ms: Option<i64>,
|
||||
pub recent_runs: u32,
|
||||
pub last_run: Option<i64>,
|
||||
}
|
||||
// Removed: JobsListResponse and JobSummary (use crate:: proto versions)
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct JobMetricsResponse {
|
||||
|
|
@ -466,27 +417,7 @@ impl BuildGraphService {
|
|||
pub type ServiceState = Arc<BuildGraphService>;
|
||||
|
||||
// Repository-based response types
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct PartitionDetailResponse {
|
||||
pub partition_ref: String,
|
||||
pub status_code: i32,
|
||||
pub status_name: String,
|
||||
pub last_updated: i64,
|
||||
pub builds_count: u32,
|
||||
pub last_successful_build: Option<String>,
|
||||
pub invalidation_count: u32,
|
||||
pub timeline: Vec<PartitionTimelineEvent>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct PartitionTimelineEvent {
|
||||
pub timestamp: i64,
|
||||
pub status_code: i32,
|
||||
pub status_name: String,
|
||||
pub message: String,
|
||||
pub build_request_id: String,
|
||||
pub job_run_id: Option<String>,
|
||||
}
|
||||
// Removed: PartitionDetailResponse and PartitionTimelineEvent (use crate:: proto versions)
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct JobsRepositoryListResponse {
|
||||
|
|
@ -507,82 +438,9 @@ pub struct JobRepositorySummary {
|
|||
pub recent_builds: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct JobDetailResponse {
|
||||
pub job_label: String,
|
||||
pub total_runs: u32,
|
||||
pub successful_runs: u32,
|
||||
pub failed_runs: u32,
|
||||
pub cancelled_runs: u32,
|
||||
pub average_partitions_per_run: f64,
|
||||
pub last_run_timestamp: i64,
|
||||
pub last_run_status_code: i32,
|
||||
pub last_run_status_name: String,
|
||||
pub recent_builds: Vec<String>,
|
||||
pub runs: Vec<JobRunDetail>,
|
||||
}
|
||||
// Removed: JobDetailResponse, JobRunDetail, TasksListResponse, TaskSummary (use crate:: proto versions)
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct JobRunDetail {
|
||||
pub job_run_id: String,
|
||||
pub build_request_id: String,
|
||||
pub target_partitions: Vec<String>,
|
||||
pub status_code: i32,
|
||||
pub status_name: String,
|
||||
pub started_at: Option<i64>,
|
||||
pub completed_at: Option<i64>,
|
||||
pub duration_ms: Option<i64>,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct TasksListResponse {
|
||||
pub tasks: Vec<TaskSummary>,
|
||||
pub total_count: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct TaskSummary {
|
||||
pub job_run_id: String,
|
||||
pub job_label: String,
|
||||
pub build_request_id: String,
|
||||
pub status: String,
|
||||
pub target_partitions: Vec<String>,
|
||||
pub scheduled_at: i64,
|
||||
pub started_at: Option<i64>,
|
||||
pub completed_at: Option<i64>,
|
||||
pub duration_ms: Option<i64>,
|
||||
pub cancelled: bool,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct TaskDetailResponse {
|
||||
pub job_run_id: String,
|
||||
pub job_label: String,
|
||||
pub build_request_id: String,
|
||||
pub status_code: i32,
|
||||
pub status_name: String,
|
||||
pub target_partitions: Vec<String>,
|
||||
pub scheduled_at: i64,
|
||||
pub started_at: Option<i64>,
|
||||
pub completed_at: Option<i64>,
|
||||
pub duration_ms: Option<i64>,
|
||||
pub cancelled: bool,
|
||||
pub cancel_reason: Option<String>,
|
||||
pub message: String,
|
||||
pub timeline: Vec<TaskTimelineEvent>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct TaskTimelineEvent {
|
||||
pub timestamp: i64,
|
||||
pub status_code: Option<i32>,
|
||||
pub status_name: Option<String>,
|
||||
pub message: String,
|
||||
pub event_type: String,
|
||||
pub cancel_reason: Option<String>,
|
||||
}
|
||||
// Removed: TaskDetailResponse and TaskTimelineEvent (use crate:: proto versions)
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct BuildsRepositoryListResponse {
|
||||
|
|
@ -606,31 +464,4 @@ pub struct BuildRepositorySummary {
|
|||
pub cancelled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct BuildDetailResponse {
|
||||
pub build_request_id: String,
|
||||
pub status_code: i32,
|
||||
pub status_name: String,
|
||||
pub requested_partitions: Vec<String>,
|
||||
pub total_jobs: u32,
|
||||
pub completed_jobs: u32,
|
||||
pub failed_jobs: u32,
|
||||
pub cancelled_jobs: u32,
|
||||
pub requested_at: i64,
|
||||
pub started_at: Option<i64>,
|
||||
pub completed_at: Option<i64>,
|
||||
pub duration_ms: Option<i64>,
|
||||
pub cancelled: bool,
|
||||
pub cancel_reason: Option<String>,
|
||||
pub timeline: Vec<BuildTimelineEvent>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct BuildTimelineEvent {
|
||||
pub timestamp: i64,
|
||||
pub status_code: Option<i32>,
|
||||
pub status_name: Option<String>,
|
||||
pub message: String,
|
||||
pub event_type: String,
|
||||
pub cancel_reason: Option<String>,
|
||||
}
|
||||
// Removed: BuildDetailResponse and BuildTimelineEvent (use crate:: proto versions)
|
||||
|
|
@ -10,5 +10,6 @@
|
|||
- Plan for external worker dispatch (e.g. k8s pod per build, or launch in container service)
|
||||
- k8s can use [jobs](https://kubernetes.io/docs/concepts/workloads/controllers/job/)
|
||||
- Should we have meaningful exit codes? E.g. "retry-able error", etc?
|
||||
- Fully joinable build/job IDs - ensure all execution logs / metrics are joinable to build request ID?
|
||||
- Triggers?
|
||||
- How do we handle task logging?
|
||||
|
|
|
|||
510
plans/web-app-compile-time-correctness.md
Normal file
510
plans/web-app-compile-time-correctness.md
Normal file
|
|
@ -0,0 +1,510 @@
|
|||
# Web App Compile-Time Correctness Plan
|
||||
|
||||
## Problem Statement
|
||||
|
||||
The DataBuild web application currently has a type safety blindspot where backend protobuf changes can cause runtime failures in the frontend without any compile-time warnings. While we achieved end-to-end type generation (Proto → Rust → OpenAPI → TypeScript), inconsistent data transformation patterns and loose TypeScript configuration allow type mismatches to slip through.
|
||||
|
||||
**Specific observed failures:**
|
||||
- `status.toLowerCase()` crashes when status objects are passed instead of strings
|
||||
- `status?.status` accesses non-existent properties on protobuf response objects
|
||||
- Partitions page fails silently due to unhandled nullability
|
||||
- Inconsistent data shapes flowing through components
|
||||
|
||||
## Root Cause Analysis
|
||||
|
||||
1. **Mixed Data Contracts**: Some components expect `{ status: string }` while APIs return `{ status_code: number, status_name: string }`
|
||||
2. **Inconsistent Transformations**: Data shape changes happen ad-hoc throughout the component tree
|
||||
3. **Protobuf Nullability**: Generated types are honest about optional fields, but TypeScript config allows unsafe access
|
||||
4. **Service Boundary Leakage**: Backend implementation details leak into frontend components
|
||||
|
||||
## Solution: Three-Pronged Approach
|
||||
|
||||
### Option 2: Consistent Data Transformation (Primary)
|
||||
- Define canonical dashboard types separate from generated API types
|
||||
- Transform data at service boundaries, never in components
|
||||
- Single source of truth for data shapes within the frontend
|
||||
|
||||
### Option 4: Generated Type Enforcement (Supporting)
|
||||
- Use generated protobuf types in service layer for accurate contracts
|
||||
- Leverage protobuf's honest nullability information
|
||||
- Maintain type safety chain from backend to service boundary
|
||||
|
||||
### Option 3: Stricter TypeScript Configuration (Foundation)
|
||||
- Enable strict null checks to catch undefined access patterns
|
||||
- Prevent implicit any types that mask runtime errors
|
||||
- Force explicit handling of protobuf's optional fields
|
||||
|
||||
## Implementation Plan
|
||||
|
||||
### Phase 1: TypeScript Configuration Hardening
|
||||
|
||||
**Goal**: Enable strict type checking to surface existing issues
|
||||
|
||||
**Tasks**:
|
||||
1. Update `tsconfig.json` with strict configuration:
|
||||
```json
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"exactOptionalPropertyTypes": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. Run TypeScript compilation to identify all type errors
|
||||
|
||||
3. Create tracking issue for each compilation error
|
||||
|
||||
**Success Criteria**: TypeScript build passes with strict configuration enabled
|
||||
|
||||
**Estimated Time**: 1-2 days
|
||||
|
||||
### Phase 1.5: Verification of Strict Configuration
|
||||
|
||||
**Goal**: Prove strict TypeScript catches the specific issues we identified
|
||||
|
||||
**Tasks**:
|
||||
1. Create test cases that reproduce original failures:
|
||||
```typescript
|
||||
// Test file: dashboard/verification-tests.ts
|
||||
const mockResponse = { status_code: 1, status_name: "COMPLETED" };
|
||||
|
||||
// These should now cause TypeScript compilation errors:
|
||||
const test1 = mockResponse.status?.toLowerCase(); // undefined property access
|
||||
const test2 = mockResponse.status?.status; // nested undefined access
|
||||
```
|
||||
|
||||
2. Run TypeScript compilation and verify these cause errors:
|
||||
- Document which strict rules catch which specific issues
|
||||
- Confirm `strictNullChecks` prevents undefined property access
|
||||
- Verify `noImplicitAny` surfaces type gaps
|
||||
|
||||
3. Test protobuf nullable field handling:
|
||||
```typescript
|
||||
interface TestPartitionSummary {
|
||||
last_updated?: number; // optional field from protobuf
|
||||
}
|
||||
|
||||
// This should require explicit null checking:
|
||||
const timestamp = partition.last_updated.toString(); // Should error
|
||||
```
|
||||
|
||||
**Success Criteria**:
|
||||
- All identified runtime failures now cause compile-time errors
|
||||
- Clear mapping between strict TypeScript rules and caught issues
|
||||
- Zero false positives in existing working code
|
||||
|
||||
**Estimated Time**: 0.5 days
|
||||
|
||||
### Phase 2: Define Dashboard Data Contracts
|
||||
|
||||
**Goal**: Create canonical frontend types independent of backend schema
|
||||
|
||||
**Tasks**:
|
||||
1. Define dashboard types in `dashboard/types.ts`:
|
||||
```typescript
|
||||
// Dashboard-optimized types
|
||||
interface DashboardBuild {
|
||||
build_request_id: string;
|
||||
status: string; // Always human-readable name
|
||||
requested_partitions: string[]; // Always string values
|
||||
total_jobs: number;
|
||||
completed_jobs: number;
|
||||
failed_jobs: number;
|
||||
cancelled_jobs: number;
|
||||
requested_at: number;
|
||||
started_at: number | null;
|
||||
completed_at: number | null;
|
||||
duration_ms: number | null;
|
||||
cancelled: boolean;
|
||||
}
|
||||
|
||||
interface DashboardPartition {
|
||||
partition_ref: string; // Always string value
|
||||
status: string; // Always human-readable name
|
||||
last_updated: number | null;
|
||||
build_requests: string[];
|
||||
}
|
||||
|
||||
interface DashboardJob {
|
||||
job_label: string;
|
||||
total_runs: number;
|
||||
successful_runs: number;
|
||||
failed_runs: number;
|
||||
cancelled_runs: number;
|
||||
last_run_timestamp: number;
|
||||
last_run_status: string; // Always human-readable name
|
||||
average_partitions_per_run: number;
|
||||
recent_builds: string[];
|
||||
}
|
||||
```
|
||||
|
||||
2. Update component attribute interfaces to use dashboard types
|
||||
|
||||
3. Document the rationale for each transformation decision
|
||||
|
||||
**Success Criteria**: All dashboard types are self-contained and UI-optimized
|
||||
|
||||
**Estimated Time**: 2-3 days
|
||||
|
||||
### Phase 3: Service Layer Transformation
|
||||
|
||||
**Goal**: Create consistent transformation boundaries between API and dashboard
|
||||
|
||||
**Tasks**:
|
||||
1. Implement transformation functions in `services.ts`:
|
||||
```typescript
|
||||
// Transform API responses to dashboard types
|
||||
function transformBuildDetail(apiResponse: BuildDetailResponse): DashboardBuild {
|
||||
return {
|
||||
build_request_id: apiResponse.build_request_id,
|
||||
status: apiResponse.status_name,
|
||||
requested_partitions: apiResponse.requested_partitions.map(p => p.str),
|
||||
total_jobs: apiResponse.total_jobs,
|
||||
completed_jobs: apiResponse.completed_jobs,
|
||||
failed_jobs: apiResponse.failed_jobs,
|
||||
cancelled_jobs: apiResponse.cancelled_jobs,
|
||||
requested_at: apiResponse.requested_at,
|
||||
started_at: apiResponse.started_at ?? null,
|
||||
completed_at: apiResponse.completed_at ?? null,
|
||||
duration_ms: apiResponse.duration_ms ?? null,
|
||||
cancelled: apiResponse.cancelled,
|
||||
};
|
||||
}
|
||||
|
||||
function transformPartitionSummary(apiResponse: PartitionSummary): DashboardPartition {
|
||||
return {
|
||||
partition_ref: apiResponse.partition_ref.str,
|
||||
status: apiResponse.status_name,
|
||||
last_updated: apiResponse.last_updated ?? null,
|
||||
build_requests: apiResponse.build_requests,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
2. Update all service methods to use transformation functions
|
||||
|
||||
3. Add type guards for runtime validation:
|
||||
```typescript
|
||||
function isValidBuildResponse(data: unknown): data is BuildDetailResponse {
|
||||
return typeof data === 'object' &&
|
||||
data !== null &&
|
||||
'build_request_id' in data &&
|
||||
'status_name' in data;
|
||||
}
|
||||
```
|
||||
|
||||
4. Handle API errors with proper typing
|
||||
|
||||
**Success Criteria**: All API data flows through consistent transformation layer
|
||||
|
||||
**Estimated Time**: 3-4 days
|
||||
|
||||
### Phase 3.5: Transformation Validation
|
||||
|
||||
**Goal**: Prove transformation functions prevent observed failures and handle edge cases
|
||||
|
||||
**Tasks**:
|
||||
1. Create comprehensive unit tests for transformation functions:
|
||||
```typescript
|
||||
// Test file: dashboard/transformation-tests.ts
|
||||
describe('transformBuildDetail', () => {
|
||||
it('handles status objects correctly', () => {
|
||||
const apiResponse = { status_code: 1, status_name: 'COMPLETED' };
|
||||
const result = transformBuildDetail(apiResponse);
|
||||
expect(typeof result.status).toBe('string');
|
||||
expect(result.status).toBe('COMPLETED');
|
||||
});
|
||||
|
||||
it('handles null optional fields', () => {
|
||||
const apiResponse = { started_at: null, completed_at: undefined };
|
||||
const result = transformBuildDetail(apiResponse);
|
||||
expect(result.started_at).toBe(null);
|
||||
expect(result.completed_at).toBe(null);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
2. Test edge cases and malformed responses:
|
||||
- Missing required fields
|
||||
- Null values where not expected
|
||||
- Wrong data types in API responses
|
||||
- Verify type guards catch invalid responses
|
||||
|
||||
3. Validate PartitionRef transformations:
|
||||
```typescript
|
||||
it('converts PartitionRef objects to strings', () => {
|
||||
const apiResponse = { partition_ref: { str: 'test-partition' } };
|
||||
const result = transformPartitionSummary(apiResponse);
|
||||
expect(typeof result.partition_ref).toBe('string');
|
||||
expect(result.partition_ref).toBe('test-partition');
|
||||
});
|
||||
```
|
||||
|
||||
4. Test transformation against real protobuf response shapes:
|
||||
- Use actual OpenAPI generated types in tests
|
||||
- Verify transformations work with current API schema
|
||||
- Document transformation rationale for each field
|
||||
|
||||
**Success Criteria**:
|
||||
- All transformation functions have >90% test coverage
|
||||
- Edge cases and null handling verified
|
||||
- Real API response shapes handled correctly
|
||||
- Type guards prevent invalid data from reaching components
|
||||
|
||||
**Estimated Time**: 1 day
|
||||
|
||||
### Phase 4: Component Migration
|
||||
|
||||
**Goal**: Update all components to use dashboard types exclusively
|
||||
|
||||
**Tasks**:
|
||||
1. Update component implementations to use dashboard types:
|
||||
- Remove direct `.status_code`/`.status_name` access
|
||||
- Use transformed string status values
|
||||
- Handle null values explicitly where needed
|
||||
|
||||
2. Fix specific identified issues:
|
||||
- Line 472: `status?.status` → use `status` directly
|
||||
- Badge components: Ensure they receive strings
|
||||
- Partition list: Use consistent partition type
|
||||
|
||||
3. Update component attribute interfaces to match dashboard types
|
||||
|
||||
4. Add runtime assertions where needed:
|
||||
```typescript
|
||||
if (!status) {
|
||||
console.warn('Missing status in component');
|
||||
return m('span', 'Unknown Status');
|
||||
}
|
||||
```
|
||||
|
||||
**Success Criteria**: All components compile and work with dashboard types
|
||||
|
||||
**Estimated Time**: 2-3 days
|
||||
|
||||
### Phase 4.5: Continuous Component Verification
|
||||
|
||||
**Goal**: Verify components work correctly with dashboard types throughout migration
|
||||
|
||||
**Tasks**:
|
||||
1. After each component migration, run verification tests:
|
||||
```typescript
|
||||
// Component-specific tests
|
||||
describe('BuildDetailComponent', () => {
|
||||
it('renders status as string correctly', () => {
|
||||
const dashboardBuild: DashboardBuild = {
|
||||
status: 'COMPLETED', // Transformed string, not object
|
||||
// ... other fields
|
||||
};
|
||||
const component = m(BuildDetailComponent, { build: dashboardBuild });
|
||||
// Verify no runtime errors with .toLowerCase()
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
2. Test component attribute interfaces match usage:
|
||||
- Verify TypeScript compilation passes for each component
|
||||
- Check that vnode.attrs typing prevents invalid property access
|
||||
- Test null handling in component rendering
|
||||
|
||||
3. Integration tests with real transformed data:
|
||||
- Use actual service layer transformation outputs
|
||||
- Verify components render correctly with dashboard types
|
||||
- Test error states and missing data scenarios
|
||||
|
||||
**Success Criteria**:
|
||||
- Each migrated component passes TypeScript compilation
|
||||
- No runtime errors when using transformed dashboard types
|
||||
- Components gracefully handle null/undefined dashboard fields
|
||||
|
||||
**Estimated Time**: 0.5 days (distributed across Phase 4)
|
||||
|
||||
### Phase 5: Schema Change Simulation & Integration Testing
|
||||
|
||||
**Goal**: Verify end-to-end compile-time correctness with simulated backend changes
|
||||
|
||||
**Tasks**:
|
||||
1. **Automated Schema Change Testing**:
|
||||
```bash
|
||||
# Create test script: scripts/test-schema-changes.sh
|
||||
|
||||
# Test 1: Add new required field to protobuf
|
||||
# - Modify databuild.proto temporarily
|
||||
# - Regenerate Rust types and OpenAPI schema
|
||||
# - Verify TypeScript compilation fails predictably
|
||||
# - Document exact error messages
|
||||
|
||||
# Test 2: Remove existing field
|
||||
# - Remove field from protobuf definition
|
||||
# - Verify transformation functions catch missing fields
|
||||
# - Confirm components fail compilation when accessing removed field
|
||||
|
||||
# Test 3: Change field type (string → object)
|
||||
# - Modify status field structure in protobuf
|
||||
# - Verify transformation layer prevents type mismatches
|
||||
# - Confirm this catches issues like original status.toLowerCase() failure
|
||||
```
|
||||
|
||||
2. **Full Build Cycle Verification**:
|
||||
- Proto change → `bazel build //databuild:openapi_spec_generator`
|
||||
- OpenAPI regeneration → `bazel build //databuild/client:typescript_client`
|
||||
- TypeScript compilation → `bazel build //databuild/dashboard:*`
|
||||
- Document each failure point and error messages
|
||||
|
||||
3. **End-to-End Type Safety Validation**:
|
||||
```typescript
|
||||
// Create comprehensive integration tests
|
||||
describe('End-to-End Type Safety', () => {
|
||||
it('prevents runtime failures from schema changes', async () => {
|
||||
// Test actual API calls with transformed responses
|
||||
const service = DashboardService.getInstance();
|
||||
const activity = await service.getRecentActivity();
|
||||
|
||||
// Verify transformed types prevent original failures
|
||||
activity.recentBuilds.forEach(build => {
|
||||
expect(typeof build.status).toBe('string');
|
||||
expect(() => build.status.toLowerCase()).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
4. **Regression Testing for Original Failures**:
|
||||
- Test status.toLowerCase() with transformed data
|
||||
- Test status?.status access patterns
|
||||
- Test partition.str access with transformed partition refs
|
||||
- Verify null handling in timestamp fields
|
||||
|
||||
5. **Real Data Flow Testing**:
|
||||
- New build creation → status updates → completion
|
||||
- Partition status changes using dashboard types
|
||||
- Job execution monitoring with transformed data
|
||||
- Error states and edge cases
|
||||
|
||||
**Success Criteria**:
|
||||
- Schema changes cause predictable TypeScript compilation failures
|
||||
- Transformation layer prevents all identified runtime failures
|
||||
- Full build cycle catches type mismatches at each stage
|
||||
- Zero runtime type errors with dashboard types
|
||||
- Original failure scenarios now impossible with strict types
|
||||
|
||||
**Estimated Time**: 2-3 days
|
||||
|
||||
### Phase 6: Documentation & Monitoring
|
||||
|
||||
**Goal**: Establish practices to maintain type safety over time
|
||||
|
||||
**Tasks**:
|
||||
1. Document transformation patterns:
|
||||
- When to create new dashboard types
|
||||
- How to handle protobuf schema changes
|
||||
- Service layer responsibilities
|
||||
|
||||
2. Add runtime monitoring:
|
||||
- Log transformation failures
|
||||
- Track API response shape mismatches
|
||||
- Monitor for unexpected null values
|
||||
|
||||
3. Create development guidelines:
|
||||
- Never use generated types directly in components
|
||||
- Always transform at service boundaries
|
||||
- Handle nullability explicitly
|
||||
|
||||
4. Set up CI checks:
|
||||
- Strict TypeScript compilation in build pipeline
|
||||
- Automated schema change detection tests
|
||||
- Integration test suite for type safety validation
|
||||
- Pre-commit hooks for TypeScript compilation
|
||||
|
||||
5. **Create Ongoing Verification Tools**:
|
||||
```bash
|
||||
# CI script: scripts/verify-type-safety.sh
|
||||
# - Run schema change simulation tests
|
||||
# - Verify transformation tests pass
|
||||
# - Check strict TypeScript compilation
|
||||
# - Validate component integration tests
|
||||
```
|
||||
|
||||
**Success Criteria**:
|
||||
- Team has clear practices for maintaining type safety
|
||||
- CI pipeline catches type safety regressions automatically
|
||||
- Schema change testing is automated and repeatable
|
||||
- Documentation provides concrete examples and rationale
|
||||
|
||||
**Estimated Time**: 2 days
|
||||
|
||||
## Risk Mitigation
|
||||
|
||||
### High-Impact Risks
|
||||
|
||||
1. **Breaking Change Volume**: Strict TypeScript may reveal many existing issues
|
||||
- *Mitigation*: Implement incrementally, fix issues in phases
|
||||
- *Rollback*: Keep loose config as backup during transition
|
||||
|
||||
2. **Performance Impact**: Additional transformation layer overhead
|
||||
- *Mitigation*: Profile transformation functions, optimize hot paths
|
||||
- *Monitoring*: Track bundle size and runtime performance
|
||||
|
||||
3. **Developer Learning Curve**: Team needs to adapt to strict null checks
|
||||
- *Mitigation*: Provide training on handling optional types
|
||||
- *Support*: Create examples and best practices documentation
|
||||
|
||||
### Medium-Impact Risks
|
||||
|
||||
1. **API Response Changes**: Backend might return unexpected data shapes
|
||||
- *Mitigation*: Add runtime validation in service layer
|
||||
- *Detection*: Monitor for transformation failures
|
||||
|
||||
2. **Third-party Type Conflicts**: Generated types might conflict with other libraries
|
||||
- *Mitigation*: Use type aliases and careful imports
|
||||
- *Testing*: Verify integration with existing dependencies
|
||||
|
||||
## Success Metrics
|
||||
|
||||
### Compile-Time Safety
|
||||
- [ ] Zero `any` types in dashboard code
|
||||
- [ ] All protobuf optional fields handled explicitly
|
||||
- [ ] TypeScript strict mode enabled and passing
|
||||
- [ ] Component attribute interfaces match usage
|
||||
|
||||
### Runtime Reliability
|
||||
- [ ] Zero "undefined is not a function" errors
|
||||
- [ ] Zero "cannot read property of undefined" errors
|
||||
- [ ] All API error states handled gracefully
|
||||
- [ ] Consistent data shapes across all components
|
||||
|
||||
### Development Experience
|
||||
- [ ] Backend schema changes cause predictable frontend compilation results
|
||||
- [ ] Clear error messages when types don't match
|
||||
- [ ] Consistent patterns for handling new data types
|
||||
- [ ] Fast iteration cycle maintained
|
||||
|
||||
## Future Considerations
|
||||
|
||||
### Schema Evolution Strategy
|
||||
- Plan for handling breaking vs non-breaking backend changes
|
||||
- Consider versioning approach for dashboard types
|
||||
- Establish deprecation process for old data shapes
|
||||
|
||||
### Tooling Enhancements
|
||||
- Consider code generation for transformation functions
|
||||
- Explore runtime schema validation libraries
|
||||
- Investigate GraphQL for stronger API contracts
|
||||
|
||||
### Performance Optimization
|
||||
- Profile transformation layer performance
|
||||
- Consider caching strategies for transformed data
|
||||
- Optimize bundle size impact of strict typing
|
||||
|
||||
---
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
This plan prioritizes compile-time correctness while maintaining development velocity. The phased approach allows for incremental progress and risk mitigation, while the three-pronged strategy (Options 2+3+4) provides comprehensive type safety from protobuf definitions through to component rendering.
|
||||
|
||||
The key insight is that true compile-time correctness requires both accurate type definitions AND consistent data transformation patterns enforced by strict TypeScript configuration.
|
||||
Loading…
Reference in a new issue