From 5da38839593efd1701644823af4da91589cb0d2e Mon Sep 17 00:00:00 2001 From: deepanshupal09-datazip Date: Wed, 1 Oct 2025 16:31:11 +0530 Subject: [PATCH 01/11] feat: show error logs in test connection in source and destination --- .../modules/common/Modals/EditSourceModal.tsx | 9 +- .../modules/common/Modals/EntityEditModal.tsx | 8 +- .../Modals/TestConnectionFailureModal.tsx | 118 +++++++++++++++--- .../destinations/pages/CreateDestination.tsx | 8 +- .../destinations/pages/DestinationEdit.tsx | 8 +- ui/src/modules/jobs/pages/JobCreation.tsx | 12 +- ui/src/modules/sources/pages/CreateSource.tsx | 8 +- ui/src/modules/sources/pages/SourceEdit.tsx | 8 +- ui/src/store/destinationStore.ts | 6 +- ui/src/store/sourceStore.ts | 11 +- ui/src/types/entityTypes.ts | 13 +- ui/src/types/errorTypes.ts | 6 + ui/src/types/index.ts | 1 + 13 files changed, 176 insertions(+), 40 deletions(-) diff --git a/ui/src/modules/common/Modals/EditSourceModal.tsx b/ui/src/modules/common/Modals/EditSourceModal.tsx index 2a492a0d..c12329b8 100644 --- a/ui/src/modules/common/Modals/EditSourceModal.tsx +++ b/ui/src/modules/common/Modals/EditSourceModal.tsx @@ -48,7 +48,7 @@ const EditSourceModal = () => { const testResult = await sourceService.testSourceConnection(getSourceData()) - if (testResult.data?.status === "SUCCEEDED") { + if (testResult.data?.connection_result.status === "SUCCEEDED") { setTimeout(() => { setShowTestingModal(false) setShowSuccessModal(true) @@ -60,8 +60,13 @@ const EditSourceModal = () => { navigate("/sources") }, 2000) } else { + const testConnectionError = { + message: testResult.data?.connection_result.message || "", + logs: testResult.data?.logs || [], + } + setShowTestingModal(false) - setSourceTestConnectionError(testResult.data?.message || "") + setSourceTestConnectionError(testConnectionError) setShowFailureModal(true) } } catch (error) { diff --git a/ui/src/modules/common/Modals/EntityEditModal.tsx b/ui/src/modules/common/Modals/EntityEditModal.tsx index 4000b706..3b37f7f5 100644 --- a/ui/src/modules/common/Modals/EntityEditModal.tsx +++ b/ui/src/modules/common/Modals/EntityEditModal.tsx @@ -69,7 +69,7 @@ const EntityEditModal = ({ entityType }: EntityEditModalProps) => { ? await sourceService.testSourceConnection(getEntityData()) : await destinationService.testDestinationConnection(getEntityData()) - if (testResult.data?.status === "SUCCEEDED") { + if (testResult.data?.connection_result.status === "SUCCEEDED") { setTimeout(() => { setShowTestingModal(false) setShowSuccessModal(true) @@ -82,8 +82,12 @@ const EntityEditModal = ({ entityType }: EntityEditModalProps) => { navigate(navigatePath) }, 2000) } else { + const testConnectionError = { + message: testResult.data?.connection_result.message || "", + logs: testResult.data?.logs || [], + } setShowTestingModal(false) - setTestConnectionError(testResult.data?.message || "") + setTestConnectionError(testConnectionError) setShowFailureModal(true) } } catch (error) { diff --git a/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx b/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx index 402ca57b..71abbc1a 100644 --- a/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx +++ b/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx @@ -1,9 +1,13 @@ import { useNavigate } from "react-router-dom" -import { Modal } from "antd" -import { Info } from "@phosphor-icons/react" +import { message, Modal } from "antd" +import { CopySimpleIcon } from "@phosphor-icons/react" import { useAppStore } from "../../../store" import ErrorIcon from "../../../assets/ErrorIcon.svg" +import { useState } from "react" +import clsx from "clsx" +import { getLogTextColor } from "../../../utils/utils" +import { getLogLevelClass } from "../../../utils/utils" const TestConnectionFailureModal = ({ fromSources, @@ -16,14 +20,17 @@ const TestConnectionFailureModal = ({ sourceTestConnectionError, destinationTestConnectionError, } = useAppStore() + const [isExpanded, setIsExpanded] = useState(false) const navigate = useNavigate() const handleCancel = () => { setShowFailureModal(false) + setIsExpanded(false) } const handleBackToPath = () => { setShowFailureModal(false) + setIsExpanded(false) if (fromSources) { navigate("/sources") } else { @@ -31,15 +38,39 @@ const TestConnectionFailureModal = ({ } } + const handleReadMore = () => setIsExpanded(!isExpanded) + + const handleCopyLogs = async () => { + try { + await navigator.clipboard.writeText( + JSON.stringify( + fromSources + ? sourceTestConnectionError?.logs || [] + : destinationTestConnectionError?.logs || [], + null, + 4, + ), + ) + message.success("Logs copied to clipboard!") + } catch { + message.error("Failed to copy logs") + } + } + return ( -
+
-
+

Failed

-

+

Your test connection has failed

-
- - - {fromSources - ? sourceTestConnectionError - : destinationTestConnectionError} - +
+
+
Error
+ {isExpanded && ( + + )} +
+
+ {!isExpanded ? ( + + {fromSources + ? sourceTestConnectionError?.message || "" + : destinationTestConnectionError?.message || ""} + + ) : ( + + + {(fromSources + ? sourceTestConnectionError?.logs || [] + : destinationTestConnectionError?.logs || [] + ).map((jobLog, index) => ( + + + + + ))} + +
+ + {jobLog.level} + + + {jobLog.message} +
+ )} + + {!isExpanded && ( + + )} +
diff --git a/ui/src/modules/destinations/pages/CreateDestination.tsx b/ui/src/modules/destinations/pages/CreateDestination.tsx index 4eff78bc..c6c66983 100644 --- a/ui/src/modules/destinations/pages/CreateDestination.tsx +++ b/ui/src/modules/destinations/pages/CreateDestination.tsx @@ -362,7 +362,7 @@ const CreateDestination = forwardRef< await destinationService.testDestinationConnection(newDestinationData) setShowTestingModal(false) - if (testResult.data?.status === "SUCCEEDED") { + if (testResult.data?.connection_result.status === "SUCCEEDED") { setShowSuccessModal(true) setTimeout(() => { setShowSuccessModal(false) @@ -371,7 +371,11 @@ const CreateDestination = forwardRef< .catch(error => console.error("Error adding destination:", error)) }, 1000) } else { - setDestinationTestConnectionError(testResult.data?.message || "") + const testConnectionError = { + message: testResult.data?.connection_result.message || "", + logs: testResult.data?.logs || [], + } + setDestinationTestConnectionError(testConnectionError) setShowFailureModal(true) } } catch (error) { diff --git a/ui/src/modules/destinations/pages/DestinationEdit.tsx b/ui/src/modules/destinations/pages/DestinationEdit.tsx index 6e9417e4..84926c9a 100644 --- a/ui/src/modules/destinations/pages/DestinationEdit.tsx +++ b/ui/src/modules/destinations/pages/DestinationEdit.tsx @@ -298,7 +298,7 @@ const DestinationEdit: React.FC = ({ setShowTestingModal(true) const testResult = await destinationService.testDestinationConnection(getDestinationData()) - if (testResult.data?.status === "SUCCEEDED") { + if (testResult.data?.connection_result.status === "SUCCEEDED") { setTimeout(() => { setShowTestingModal(false) setShowSuccessModal(true) @@ -309,8 +309,12 @@ const DestinationEdit: React.FC = ({ saveDestination() }, 2000) } else { + const testConnectionError = { + message: testResult.data?.connection_result.message || "", + logs: testResult.data?.logs || [], + } setShowTestingModal(false) - setDestinationTestConnectionError(testResult.data?.message || "") + setDestinationTestConnectionError(testConnectionError) setShowFailureModal(true) } } diff --git a/ui/src/modules/jobs/pages/JobCreation.tsx b/ui/src/modules/jobs/pages/JobCreation.tsx index b113462b..6ada1b69 100644 --- a/ui/src/modules/jobs/pages/JobCreation.tsx +++ b/ui/src/modules/jobs/pages/JobCreation.tsx @@ -5,7 +5,7 @@ import { ArrowLeft, ArrowRight, DownloadSimple } from "@phosphor-icons/react" import { v4 as uuidv4 } from "uuid" import { useAppStore } from "../../../store" -import { destinationService, sourceService, jobService } from "../../../api" +import { destinationService, jobService, sourceService } from "../../../api" import { JobBase, JobCreationSteps } from "../../../types" import { @@ -155,18 +155,22 @@ const JobCreation: React.FC = () => { setTimeout(() => { setShowTestingModal(false) - if (testResult.data?.status === "SUCCEEDED") { + if (testResult.data?.connection_result.status === "SUCCEEDED") { setShowSuccessModal(true) setTimeout(() => { setShowSuccessModal(false) setCurrentStep(nextStep) }, 1000) } else { + const testConnectionError = { + message: testResult.data?.connection_result.message || "", + logs: testResult.data?.logs || [], + } setIsFromSources(isSource) if (isSource) { - setSourceTestConnectionError(testResult.data?.message || "") + setSourceTestConnectionError(testConnectionError) } else { - setDestinationTestConnectionError(testResult.data?.message || "") + setDestinationTestConnectionError(testConnectionError) } setShowFailureModal(true) } diff --git a/ui/src/modules/sources/pages/CreateSource.tsx b/ui/src/modules/sources/pages/CreateSource.tsx index 7a510394..f53ad029 100644 --- a/ui/src/modules/sources/pages/CreateSource.tsx +++ b/ui/src/modules/sources/pages/CreateSource.tsx @@ -272,7 +272,7 @@ const CreateSource = forwardRef( const testResult = await sourceService.testSourceConnection(newSourceData) setShowTestingModal(false) - if (testResult.data?.status === "SUCCEEDED") { + if (testResult.data?.connection_result.status === "SUCCEEDED") { setShowSuccessModal(true) setTimeout(() => { setShowSuccessModal(false) @@ -285,7 +285,11 @@ const CreateSource = forwardRef( }) }, 1000) } else { - setSourceTestConnectionError(testResult.data?.message || "") + const testConnectionError = { + message: testResult.data?.connection_result.message || "", + logs: testResult.data?.logs || [], + } + setSourceTestConnectionError(testConnectionError) setShowFailureModal(true) } } catch (error) { diff --git a/ui/src/modules/sources/pages/SourceEdit.tsx b/ui/src/modules/sources/pages/SourceEdit.tsx index a7c2cb2a..de6bb7d1 100644 --- a/ui/src/modules/sources/pages/SourceEdit.tsx +++ b/ui/src/modules/sources/pages/SourceEdit.tsx @@ -273,7 +273,7 @@ const SourceEdit: React.FC = ({ setShowTestingModal(true) const testResult = await sourceService.testSourceConnection(getSourceData()) - if (testResult.data?.status === "SUCCEEDED") { + if (testResult.data?.connection_result.status === "SUCCEEDED") { setTimeout(() => { setShowTestingModal(false) }, 1000) @@ -287,8 +287,12 @@ const SourceEdit: React.FC = ({ saveSource() }, 2200) } else { + const testConnectionError = { + message: testResult.data?.connection_result.message || "", + logs: testResult.data?.logs || [], + } setShowTestingModal(false) - setSourceTestConnectionError(testResult.data?.message || "") + setSourceTestConnectionError(testConnectionError) setShowFailureModal(true) } } diff --git a/ui/src/store/destinationStore.ts b/ui/src/store/destinationStore.ts index 7925d3d4..843283ac 100644 --- a/ui/src/store/destinationStore.ts +++ b/ui/src/store/destinationStore.ts @@ -1,5 +1,5 @@ import { StateCreator } from "zustand" -import type { APIResponse } from "../types" +import type { APIResponse, TestConnectionError } from "../types" import type { EntityBase } from "../types" import type { Entity } from "../types" import { destinationService } from "../api" @@ -8,8 +8,8 @@ export interface DestinationSlice { destinations: Entity[] isLoadingDestinations: boolean destinationsError: string | null - destinationTestConnectionError: string | null - setDestinationTestConnectionError: (error: string | null) => void + destinationTestConnectionError: TestConnectionError | null + setDestinationTestConnectionError: (error: TestConnectionError | null) => void fetchDestinations: () => Promise addDestination: (destination: EntityBase) => Promise diff --git a/ui/src/store/sourceStore.ts b/ui/src/store/sourceStore.ts index 1abc9c44..0b17a323 100644 --- a/ui/src/store/sourceStore.ts +++ b/ui/src/store/sourceStore.ts @@ -1,12 +1,17 @@ import { StateCreator } from "zustand" -import type { APIResponse, Entity, EntityBase } from "../types" +import type { + APIResponse, + Entity, + EntityBase, + TestConnectionError, +} from "../types" import { sourceService } from "../api" export interface SourceSlice { sources: Entity[] sourcesError: string | null isLoadingSources: boolean - sourceTestConnectionError: string | null - setSourceTestConnectionError: (error: string | null) => void + sourceTestConnectionError: TestConnectionError | null + setSourceTestConnectionError: (error: TestConnectionError | null) => void fetchSources: () => Promise addSource: (source: EntityBase) => Promise> updateSource: (id: string, source: EntityBase) => Promise> diff --git a/ui/src/types/entityTypes.ts b/ui/src/types/entityTypes.ts index a83c6cc7..043182ad 100644 --- a/ui/src/types/entityTypes.ts +++ b/ui/src/types/entityTypes.ts @@ -36,8 +36,11 @@ export interface EntityTestRequest { config: string } export interface EntityTestResponse { - message: string - status: "FAILED" | "SUCCEEDED" + connection_result: { + message: string + status: "FAILED" | "SUCCEEDED" + } + logs: LogEntry[] } export type EntityType = "source" | "destination" @@ -52,3 +55,9 @@ export interface EntitySavedModalProps { fromJobFlow: boolean entityName?: string } + +export interface LogEntry { + level: string + time: string + message: string +} diff --git a/ui/src/types/errorTypes.ts b/ui/src/types/errorTypes.ts index a656b689..8d9120a0 100644 --- a/ui/src/types/errorTypes.ts +++ b/ui/src/types/errorTypes.ts @@ -1,4 +1,5 @@ import { ReactNode } from "react" +import { LogEntry } from "./entityTypes" export interface Props { children: ReactNode @@ -9,3 +10,8 @@ export interface State { hasError: boolean error: Error | null } + +export interface TestConnectionError { + message: string + logs: LogEntry[] +} diff --git a/ui/src/types/index.ts b/ui/src/types/index.ts index ace0b738..a9d90039 100644 --- a/ui/src/types/index.ts +++ b/ui/src/types/index.ts @@ -8,3 +8,4 @@ export * from "./sourceTypes" export * from "./destinationTypes" export * from "./formTypes" export * from "./modalTypes" +export * from "./errorTypes" From b63becef4e562168e79bda3d29d82a02435bfeec Mon Sep 17 00:00:00 2001 From: deepanshupal09-datazip Date: Wed, 1 Oct 2025 16:42:51 +0530 Subject: [PATCH 02/11] fix: fix test connection fail modal --- ui/src/modules/common/Modals/TestConnectionFailureModal.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx b/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx index 71abbc1a..3f55735f 100644 --- a/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx +++ b/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx @@ -96,15 +96,15 @@ const TestConnectionFailureModal = ({
{!isExpanded ? ( - +
{fromSources ? sourceTestConnectionError?.message || "" : destinationTestConnectionError?.message || ""} - +
) : ( From a20e009a1fe64d30bfc50e86f8660308fa79b15e Mon Sep 17 00:00:00 2001 From: deepanshupal09-datazip Date: Mon, 6 Oct 2025 11:29:56 +0530 Subject: [PATCH 03/11] fix: resolve comments --- .../modules/common/Modals/EditSourceModal.tsx | 204 ------------------ .../modules/common/Modals/EntityEditModal.tsx | 6 +- .../Modals/TestConnectionFailureModal.tsx | 7 +- .../destinations/pages/CreateDestination.tsx | 6 +- .../destinations/pages/DestinationEdit.tsx | 6 +- ui/src/modules/jobs/pages/JobCreation.tsx | 6 +- ui/src/modules/sources/pages/CreateSource.tsx | 11 +- ui/src/modules/sources/pages/SourceEdit.tsx | 6 +- ui/src/types/entityTypes.ts | 4 +- ui/src/utils/constants.ts | 7 +- 10 files changed, 46 insertions(+), 217 deletions(-) delete mode 100644 ui/src/modules/common/Modals/EditSourceModal.tsx diff --git a/ui/src/modules/common/Modals/EditSourceModal.tsx b/ui/src/modules/common/Modals/EditSourceModal.tsx deleted file mode 100644 index c12329b8..00000000 --- a/ui/src/modules/common/Modals/EditSourceModal.tsx +++ /dev/null @@ -1,204 +0,0 @@ -import { Button, Modal, Table, message } from "antd" -import { CheckCircle, Warning } from "@phosphor-icons/react" -import { formatDistanceToNow } from "date-fns" -import { useNavigate } from "react-router-dom" - -import { sourceService } from "../../../api" -import { useAppStore } from "../../../store" -import { getConnectorImage } from "../../../utils/utils" - -const EditSourceModal = () => { - const navigate = useNavigate() - const { - showEditSourceModal, - setShowEditSourceModal, - showSuccessModal, - setShowSuccessModal, - selectedSource, - updateSource, - setShowTestingModal, - setShowFailureModal, - setSourceTestConnectionError, - } = useAppStore() - - const getSourceData = () => { - const configStr = - typeof selectedSource?.config === "string" - ? selectedSource?.config - : JSON.stringify(selectedSource?.config) - - const sourceData = { - name: selectedSource?.name, - type: selectedSource?.type, - version: selectedSource?.version, - config: configStr, - } - return sourceData - } - - const handleEdit = async () => { - if (!selectedSource?.id) { - message.error("Source ID is missing") - return - } - - try { - setShowEditSourceModal(false) - setShowTestingModal(true) - const testResult = - await sourceService.testSourceConnection(getSourceData()) - - if (testResult.data?.connection_result.status === "SUCCEEDED") { - setTimeout(() => { - setShowTestingModal(false) - setShowSuccessModal(true) - }, 1000) - - setTimeout(async () => { - setShowSuccessModal(false) - await updateSource(selectedSource.id.toString(), selectedSource) - navigate("/sources") - }, 2000) - } else { - const testConnectionError = { - message: testResult.data?.connection_result.message || "", - logs: testResult.data?.logs || [], - } - - setShowTestingModal(false) - setSourceTestConnectionError(testConnectionError) - setShowFailureModal(true) - } - } catch (error) { - message.error("Failed to update source") - console.error(error) - } - } - - return ( - <> - - - - } - open={showEditSourceModal} - onCancel={() => setShowEditSourceModal(false)} - footer={[ - , - , - ]} - centered - width="38%" - > -
-

- Due to the editing, the jobs are going to get affected -

-

- Editing this source will affect the following jobs that are - associated with this source and as a result will fail immediately. - Do you still want to edit the source? -

-
-
-
( - - {activate ? "Active" : "Inactive"} - - ), - }, - { - title: "Last runtime", - dataIndex: "last_run_time", - key: "last_run_time", - render: (text: string) => ( - - {text !== undefined - ? formatDistanceToNow(new Date(text), { - addSuffix: true, - }) - : "-"} - - ), - }, - { - title: "Destination", - dataIndex: "destination_name", - key: "destination_name", - render: (destination_name: string, record: any) => ( -
- {record.destination_type} - {destination_name || "N/A"} -
- ), - }, - ]} - dataSource={selectedSource?.jobs} - pagination={false} - rowKey="key" - scroll={{ y: 300 }} - /> - - - - {/* Success Modal */} - -
- -
- Changes are saved successfully -
-
-
- - ) -} - -export default EditSourceModal diff --git a/ui/src/modules/common/Modals/EntityEditModal.tsx b/ui/src/modules/common/Modals/EntityEditModal.tsx index 3b37f7f5..ab01fdc5 100644 --- a/ui/src/modules/common/Modals/EntityEditModal.tsx +++ b/ui/src/modules/common/Modals/EntityEditModal.tsx @@ -8,6 +8,7 @@ import { sourceService } from "../../../api" import { destinationService } from "../../../api/services/destinationService" import { EntityEditModalProps } from "../../../types" import { getConnectorImage } from "../../../utils/utils" +import { TEST_CONNECTION_STATUS } from "../../../utils/constants" const EntityEditModal = ({ entityType }: EntityEditModalProps) => { const navigate = useNavigate() @@ -69,7 +70,10 @@ const EntityEditModal = ({ entityType }: EntityEditModalProps) => { ? await sourceService.testSourceConnection(getEntityData()) : await destinationService.testDestinationConnection(getEntityData()) - if (testResult.data?.connection_result.status === "SUCCEEDED") { + if ( + testResult.data?.connection_result.status === + TEST_CONNECTION_STATUS.SUCCEEDED + ) { setTimeout(() => { setShowTestingModal(false) setShowSuccessModal(true) diff --git a/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx b/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx index 3f55735f..5056cc10 100644 --- a/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx +++ b/ui/src/modules/common/Modals/TestConnectionFailureModal.tsx @@ -1,13 +1,12 @@ +import { useState } from "react" import { useNavigate } from "react-router-dom" import { message, Modal } from "antd" import { CopySimpleIcon } from "@phosphor-icons/react" +import clsx from "clsx" import { useAppStore } from "../../../store" import ErrorIcon from "../../../assets/ErrorIcon.svg" -import { useState } from "react" -import clsx from "clsx" -import { getLogTextColor } from "../../../utils/utils" -import { getLogLevelClass } from "../../../utils/utils" +import { getLogTextColor, getLogLevelClass } from "../../../utils/utils" const TestConnectionFailureModal = ({ fromSources, diff --git a/ui/src/modules/destinations/pages/CreateDestination.tsx b/ui/src/modules/destinations/pages/CreateDestination.tsx index c6c66983..a5438e57 100644 --- a/ui/src/modules/destinations/pages/CreateDestination.tsx +++ b/ui/src/modules/destinations/pages/CreateDestination.tsx @@ -28,6 +28,7 @@ import { CONNECTOR_TYPES, DESTINATION_INTERNAL_TYPES, SETUP_TYPES, + TEST_CONNECTION_STATUS, transformErrors, } from "../../../utils/constants" import EndpointTitle from "../../../utils/EndpointTitle" @@ -362,7 +363,10 @@ const CreateDestination = forwardRef< await destinationService.testDestinationConnection(newDestinationData) setShowTestingModal(false) - if (testResult.data?.connection_result.status === "SUCCEEDED") { + if ( + testResult.data?.connection_result.status === + TEST_CONNECTION_STATUS.SUCCEEDED + ) { setShowSuccessModal(true) setTimeout(() => { setShowSuccessModal(false) diff --git a/ui/src/modules/destinations/pages/DestinationEdit.tsx b/ui/src/modules/destinations/pages/DestinationEdit.tsx index 84926c9a..6b2e51a3 100644 --- a/ui/src/modules/destinations/pages/DestinationEdit.tsx +++ b/ui/src/modules/destinations/pages/DestinationEdit.tsx @@ -32,6 +32,7 @@ import { ENTITY_TYPES, DISPLAYED_JOBS_COUNT, transformErrors, + TEST_CONNECTION_STATUS, } from "../../../utils/constants" import DocumentationPanel from "../../common/components/DocumentationPanel" import StepTitle from "../../common/components/StepTitle" @@ -298,7 +299,10 @@ const DestinationEdit: React.FC = ({ setShowTestingModal(true) const testResult = await destinationService.testDestinationConnection(getDestinationData()) - if (testResult.data?.connection_result.status === "SUCCEEDED") { + if ( + testResult.data?.connection_result.status === + TEST_CONNECTION_STATUS.SUCCEEDED + ) { setTimeout(() => { setShowTestingModal(false) setShowSuccessModal(true) diff --git a/ui/src/modules/jobs/pages/JobCreation.tsx b/ui/src/modules/jobs/pages/JobCreation.tsx index 6ada1b69..a37c50a8 100644 --- a/ui/src/modules/jobs/pages/JobCreation.tsx +++ b/ui/src/modules/jobs/pages/JobCreation.tsx @@ -16,6 +16,7 @@ import { DESTINATION_INTERNAL_TYPES, JOB_CREATION_STEPS, JOB_STEP_NUMBERS, + TEST_CONNECTION_STATUS, } from "../../../utils/constants" // Internal imports from components @@ -155,7 +156,10 @@ const JobCreation: React.FC = () => { setTimeout(() => { setShowTestingModal(false) - if (testResult.data?.connection_result.status === "SUCCEEDED") { + if ( + testResult.data?.connection_result.status === + TEST_CONNECTION_STATUS.SUCCEEDED + ) { setShowSuccessModal(true) setTimeout(() => { setShowSuccessModal(false) diff --git a/ui/src/modules/sources/pages/CreateSource.tsx b/ui/src/modules/sources/pages/CreateSource.tsx index f53ad029..918414da 100644 --- a/ui/src/modules/sources/pages/CreateSource.tsx +++ b/ui/src/modules/sources/pages/CreateSource.tsx @@ -19,7 +19,11 @@ import { handleSpecResponse, withAbortController, } from "../../../utils/utils" -import { CONNECTOR_TYPES, transformErrors } from "../../../utils/constants" +import { + CONNECTOR_TYPES, + TEST_CONNECTION_STATUS, + transformErrors, +} from "../../../utils/constants" import EndpointTitle from "../../../utils/EndpointTitle" import FormField from "../../../utils/FormField" import DocumentationPanel from "../../common/components/DocumentationPanel" @@ -272,7 +276,10 @@ const CreateSource = forwardRef( const testResult = await sourceService.testSourceConnection(newSourceData) setShowTestingModal(false) - if (testResult.data?.connection_result.status === "SUCCEEDED") { + if ( + testResult.data?.connection_result.status === + TEST_CONNECTION_STATUS.SUCCEEDED + ) { setShowSuccessModal(true) setTimeout(() => { setShowSuccessModal(false) diff --git a/ui/src/modules/sources/pages/SourceEdit.tsx b/ui/src/modules/sources/pages/SourceEdit.tsx index de6bb7d1..6a15337f 100644 --- a/ui/src/modules/sources/pages/SourceEdit.tsx +++ b/ui/src/modules/sources/pages/SourceEdit.tsx @@ -37,6 +37,7 @@ import { connectorTypeMap, DISPLAYED_JOBS_COUNT, transformErrors, + TEST_CONNECTION_STATUS, } from "../../../utils/constants" import ObjectFieldTemplate from "../../common/components/Form/ObjectFieldTemplate" import CustomFieldTemplate from "../../common/components/Form/CustomFieldTemplate" @@ -273,7 +274,10 @@ const SourceEdit: React.FC = ({ setShowTestingModal(true) const testResult = await sourceService.testSourceConnection(getSourceData()) - if (testResult.data?.connection_result.status === "SUCCEEDED") { + if ( + testResult.data?.connection_result.status === + TEST_CONNECTION_STATUS.SUCCEEDED + ) { setTimeout(() => { setShowTestingModal(false) }, 1000) diff --git a/ui/src/types/entityTypes.ts b/ui/src/types/entityTypes.ts index 043182ad..0865cc29 100644 --- a/ui/src/types/entityTypes.ts +++ b/ui/src/types/entityTypes.ts @@ -38,11 +38,13 @@ export interface EntityTestRequest { export interface EntityTestResponse { connection_result: { message: string - status: "FAILED" | "SUCCEEDED" + status: TestConnectionStatus } logs: LogEntry[] } +export type TestConnectionStatus = "FAILED" | "SUCCEEDED" + export type EntityType = "source" | "destination" export interface EntityEditModalProps { diff --git a/ui/src/utils/constants.ts b/ui/src/utils/constants.ts index cfa92981..ee8d88c2 100644 --- a/ui/src/utils/constants.ts +++ b/ui/src/utils/constants.ts @@ -1,5 +1,5 @@ import { GitCommit, LinktreeLogo, Path } from "@phosphor-icons/react" -import { JobCreationSteps, NavItem } from "../types" +import { JobCreationSteps, NavItem, TestConnectionStatus } from "../types" import { getResponsivePageSize } from "./utils" export const PARTITIONING_COLUMNS = [ @@ -250,3 +250,8 @@ export const LABELS = { folderType: "Iceberg DB", }, } as const + +export const TEST_CONNECTION_STATUS: Record = { + SUCCEEDED: "SUCCEEDED", + FAILED: "FAILED", +} as const From 194a8b65275e4d640982792f0d8dfeebcbd2a925 Mon Sep 17 00:00:00 2001 From: Sarthak Kumar Shailendra <69191344+sarthak-kumar-shailendra@users.noreply.github.com> Date: Sun, 5 Oct 2025 12:40:55 +0000 Subject: [PATCH 04/11] fix: using save icon instead of download icon --- ui/src/modules/jobs/pages/JobCreation.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ui/src/modules/jobs/pages/JobCreation.tsx b/ui/src/modules/jobs/pages/JobCreation.tsx index b68dab90..5bbf6592 100644 --- a/ui/src/modules/jobs/pages/JobCreation.tsx +++ b/ui/src/modules/jobs/pages/JobCreation.tsx @@ -1,7 +1,7 @@ import { useState, useRef } from "react" import { useNavigate, Link, useLocation } from "react-router-dom" import { message } from "antd" -import { ArrowLeft, ArrowRight, DownloadSimple } from "@phosphor-icons/react" +import { ArrowLeft, ArrowRight, FloppyDisk } from "@phosphor-icons/react" import { v4 as uuidv4 } from "uuid" import { useAppStore } from "../../../store" @@ -509,7 +509,7 @@ const JobCreation: React.FC = () => { onClick={handleSaveJob} className="flex items-center justify-center gap-2 rounded-md border border-gray-400 px-4 py-1 font-light hover:bg-[#ebebeb]" > - + Save Job From 89034f57277242f7e2ae2d8bb4f710ac0d4a88fe Mon Sep 17 00:00:00 2001 From: Taraka Swathi Date: Fri, 10 Oct 2025 11:38:37 +0530 Subject: [PATCH 05/11] chore: add comments (#224) * chore: add comments * chore: :rotating_light: lint fix --- ui/public/vite.svg | 1 - ui/src/api/services/analyticsService.ts | 7 +++++++ ui/src/api/services/destinationService.ts | 3 +++ ui/src/api/services/jobService.ts | 3 ++- ui/src/api/services/sourceService.ts | 7 ++++--- ui/src/modules/auth/pages/Login.tsx | 1 + ui/src/modules/common/Modals/DeleteModal.tsx | 1 + ui/src/modules/common/components/DocumentationPanel.tsx | 9 +++++---- .../common/components/Form/ArrayFieldTemplate.tsx | 4 ++++ .../common/components/Form/BooleanSwitchWidget.tsx | 3 +++ .../common/components/Form/CustomFieldTemplate.tsx | 6 +++++- .../modules/common/components/Form/CustomRadioWidget.tsx | 4 ++++ .../common/components/Form/ObjectFieldTemplate.tsx | 2 +- ui/src/modules/common/components/Layout.tsx | 1 + ui/src/modules/destinations/pages/CreateDestination.tsx | 3 +++ ui/src/modules/destinations/pages/DestinationEdit.tsx | 1 + ui/src/modules/destinations/pages/Destinations.tsx | 1 + ui/src/modules/jobs/pages/JobCreation.tsx | 7 +++++++ ui/src/modules/jobs/pages/JobHistory.tsx | 1 + ui/src/modules/jobs/pages/Jobs.tsx | 3 ++- .../modules/jobs/pages/streams/StreamConfiguration.tsx | 6 +++++- ui/src/modules/jobs/pages/streams/StreamHeader.tsx | 2 ++ ui/src/modules/sources/pages/CreateSource.tsx | 4 ++-- ui/src/routes/index.tsx | 2 ++ ui/src/utils/constants.ts | 1 + ui/src/utils/utils.ts | 7 +++++++ 26 files changed, 75 insertions(+), 15 deletions(-) delete mode 100644 ui/public/vite.svg diff --git a/ui/public/vite.svg b/ui/public/vite.svg deleted file mode 100644 index e7b8dfb1..00000000 --- a/ui/public/vite.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/ui/src/api/services/analyticsService.ts b/ui/src/api/services/analyticsService.ts index e3212c94..1c555514 100644 --- a/ui/src/api/services/analyticsService.ts +++ b/ui/src/api/services/analyticsService.ts @@ -1,6 +1,11 @@ +/** + * AnalyticsService handles sending analytics events + */ + import api from "../axios" import axios from "axios" +// endpoint which handles rate limiting and forwards the events to mixpanel const ANALYTICS_ENDPOINT = "https://analytics.olake.io/mp/track" const sendAnalyticsEvent = async ( @@ -57,6 +62,7 @@ const getSystemInfo = async () => { } } +// returns a unique user id for the user to track them across sessions const getTelemetryID = async (): Promise => { try { const response = await api.get("/telemetry-id") @@ -77,6 +83,7 @@ export const trackEvent = async ( return } + // if user is already logged in we'll get the username from local storage const username = localStorage.getItem("username") const systemInfo = await getSystemInfo() diff --git a/ui/src/api/services/destinationService.ts b/ui/src/api/services/destinationService.ts index 09afd4f9..e8345177 100644 --- a/ui/src/api/services/destinationService.ts +++ b/ui/src/api/services/destinationService.ts @@ -11,6 +11,7 @@ import { getConnectorInLowerCase } from "../../utils/utils" // TODO: Make it parquet on all places const normalizeDestinationType = (type: string): string => { + //destination connector typemap const typeMap: Record = { "amazon s3": "s3", "apache iceberg": "iceberg", @@ -93,6 +94,7 @@ export const destinationService = { source_type: source_type, source_version: source_version, }, + //timeout is 0 as test connection takes more time as it needs to connect to the destination { timeout: 0 }, ) return { @@ -136,6 +138,7 @@ export const destinationService = { source_type: source_type, source_version: source_version, }, + //timeout is 300000 as spec takes more time as it needs to fetch the spec from the destination { timeout: 300000, signal }, ) return response.data diff --git a/ui/src/api/services/jobService.ts b/ui/src/api/services/jobService.ts index 7d392727..754077ff 100644 --- a/ui/src/api/services/jobService.ts +++ b/ui/src/api/services/jobService.ts @@ -101,7 +101,7 @@ export const jobService = { const response = await api.post>( `${API_CONFIG.ENDPOINTS.JOBS(API_CONFIG.PROJECT_ID)}/${jobId}/tasks/${taskId}/logs`, { file_path: filePath }, - { timeout: 0 }, + { timeout: 0 }, // Disable timeout for this request since it can take longer ) return response.data } catch (error) { @@ -110,6 +110,7 @@ export const jobService = { } }, + //This either pauses or resumes the job activateJob: async ( jobId: string, activate: boolean, diff --git a/ui/src/api/services/sourceService.ts b/ui/src/api/services/sourceService.ts index 3f4d3a10..d36c9d64 100644 --- a/ui/src/api/services/sourceService.ts +++ b/ui/src/api/services/sourceService.ts @@ -79,7 +79,7 @@ export const sourceService = { version: source.version, config: source.config, }, - { timeout: 0 }, + { timeout: 0 }, // Disable timeout for this request since it can take longer ) return { success: response.data.success, @@ -101,7 +101,7 @@ export const sourceService = { const response = await api.get>( `${API_CONFIG.ENDPOINTS.SOURCES(API_CONFIG.PROJECT_ID)}/versions/?type=${type}`, { - timeout: 0, + timeout: 0, // Disable timeout for this request since it can take longer }, ) return response.data @@ -123,7 +123,7 @@ export const sourceService = { type: type.toLowerCase(), version, }, - { timeout: 300000, signal }, + { timeout: 300000, signal }, //timeout is 300000 as spec takes more time as it needs to fetch the spec from olake ) return response.data } catch (error) { @@ -132,6 +132,7 @@ export const sourceService = { } }, + //fetches source specific streams getSourceStreams: async ( name: string, type: string, diff --git a/ui/src/modules/auth/pages/Login.tsx b/ui/src/modules/auth/pages/Login.tsx index 7682fb55..8950834e 100644 --- a/ui/src/modules/auth/pages/Login.tsx +++ b/ui/src/modules/auth/pages/Login.tsx @@ -25,6 +25,7 @@ const Login: React.FC = () => { duration: 3, className: "font-medium", }) + // clear form in the case of error form.resetFields() } setLoading(false) diff --git a/ui/src/modules/common/Modals/DeleteModal.tsx b/ui/src/modules/common/Modals/DeleteModal.tsx index 696a51c3..8763136c 100644 --- a/ui/src/modules/common/Modals/DeleteModal.tsx +++ b/ui/src/modules/common/Modals/DeleteModal.tsx @@ -7,6 +7,7 @@ import { Entity } from "../../../types" import { DeleteModalProps } from "../../../types/modalTypes" import { getConnectorImage } from "../../../utils/utils" +//Entity Delete Modal const DeleteModal = ({ fromSource }: DeleteModalProps) => { const { showDeleteModal, diff --git a/ui/src/modules/common/components/DocumentationPanel.tsx b/ui/src/modules/common/components/DocumentationPanel.tsx index b4c008f1..bf181ac8 100644 --- a/ui/src/modules/common/components/DocumentationPanel.tsx +++ b/ui/src/modules/common/components/DocumentationPanel.tsx @@ -5,7 +5,7 @@ import { CornersOut, CaretRight, Info, - ArrowSquareOut, + ArrowSquareOutIcon, } from "@phosphor-icons/react" import { DocumentationPanelProps } from "../../../types" @@ -44,6 +44,7 @@ const DocumentationPanel: React.FC = ({ if (!iframe) return const handleLoad = () => { + // as the theme for ui is light themed we need to show only light theme in docs website as the default theme is dark // Post message to iframe for theming iframe.contentWindow?.postMessage({ theme: "light" }, "https://olake.io") @@ -78,7 +79,7 @@ const DocumentationPanel: React.FC = ({ className="flex items-center" onClick={openInNewTab} icon={ - @@ -144,7 +145,7 @@ const DocumentationPanel: React.FC = ({ @@ -174,7 +174,7 @@ const DocumentationPanel: React.FC = ({
- { const { items, canAdd, onAddClick } = props @@ -25,7 +25,7 @@ const ArrayFieldTemplate = (props: ArrayFieldTemplateProps) => { type="text" danger onClick={item.onDropIndexClick(item.index)} - icon={} + icon={} /> )}
@@ -40,7 +40,7 @@ const ArrayFieldTemplate = (props: ArrayFieldTemplateProps) => { size="middle" className="px-3" onClick={onAddClick} - icon={} + icon={} > Add diff --git a/ui/src/modules/common/components/Form/CustomFieldTemplate.tsx b/ui/src/modules/common/components/Form/CustomFieldTemplate.tsx index 25a2833c..0348f1fe 100644 --- a/ui/src/modules/common/components/Form/CustomFieldTemplate.tsx +++ b/ui/src/modules/common/components/Form/CustomFieldTemplate.tsx @@ -1,5 +1,5 @@ import { FieldTemplateProps } from "@rjsf/utils" -import { Info, Plus, Trash } from "@phosphor-icons/react" +import { InfoIcon, PlusIcon, TrashIcon } from "@phosphor-icons/react" import { Tooltip, Button } from "antd" import { useState, useEffect } from "react" @@ -42,7 +42,7 @@ function KeyValueRow({
) @@ -81,7 +81,7 @@ function NewKeyValueRow({
) @@ -225,7 +225,7 @@ export default function CustomFieldTemplate(props: FieldTemplateProps) { title={description || rawDescription} placement="right" > - + )} diff --git a/ui/src/modules/common/components/Layout.tsx b/ui/src/modules/common/components/Layout.tsx index bfc6f2aa..98733778 100644 --- a/ui/src/modules/common/components/Layout.tsx +++ b/ui/src/modules/common/components/Layout.tsx @@ -2,7 +2,12 @@ import { useState } from "react" import clsx from "clsx" import { NavLink, Link, useNavigate } from "react-router-dom" import { LayoutProps } from "antd" -import { CaretLeft, Info, X, SignOut } from "@phosphor-icons/react" +import { + CaretLeftIcon, + InfoIcon, + SignOutIcon, + XIcon, +} from "@phosphor-icons/react" import { useAppStore } from "../../../store" import { NAV_ITEMS } from "../../../utils/constants" @@ -17,13 +22,13 @@ const UpdateNotification: React.FC<{ onClose: () => void }> = ({ onClose }) => ( onClick={onClose} className="absolute right-2 top-2 rounded-full p-1 hover:bg-gray-200" > -
- - @@ -124,7 +129,7 @@ const Sidebar: React.FC<{ collapsed ? "rotate-180" : "rotate-0", )} > - +
diff --git a/ui/src/modules/destinations/components/DestinationEmptyState.tsx b/ui/src/modules/destinations/components/DestinationEmptyState.tsx index 3af708eb..f2fa1d10 100644 --- a/ui/src/modules/destinations/components/DestinationEmptyState.tsx +++ b/ui/src/modules/destinations/components/DestinationEmptyState.tsx @@ -1,5 +1,5 @@ import { Button } from "antd" -import { PlayCircle, Plus } from "@phosphor-icons/react" +import { PlayCircleIcon, PlusIcon } from "@phosphor-icons/react" import { DestinationTutorialYTLink } from "../../../utils/constants" import FirstDestination from "../../../assets/FirstDestination.svg" @@ -29,7 +29,7 @@ const DestinationEmptyState = ({ className="border-1 mb-12 border-[1px] border-[#D9D9D9] bg-white px-6 py-4 text-black" onClick={handleCreateDestination} > - + New Destination
@@ -48,7 +48,7 @@ const DestinationEmptyState = ({
- + OLake/ Tutorial
diff --git a/ui/src/modules/destinations/components/DestinationTable.tsx b/ui/src/modules/destinations/components/DestinationTable.tsx index f2b25d8e..b8d0e97e 100644 --- a/ui/src/modules/destinations/components/DestinationTable.tsx +++ b/ui/src/modules/destinations/components/DestinationTable.tsx @@ -1,6 +1,10 @@ import { useState } from "react" import { Table, Input, Button, Dropdown, Pagination } from "antd" -import { DotsThree, PencilSimpleLine, Trash } from "@phosphor-icons/react" +import { + DotsThreeIcon, + PencilSimpleLineIcon, + TrashIcon, +} from "@phosphor-icons/react" import { DestinationTableProps, Entity } from "../../../types" import { getConnectorImage } from "../../../utils/utils" @@ -31,13 +35,13 @@ const DestinationTable: React.FC = ({ items: [ { key: "edit", - icon: , + icon: , label: "Edit", onClick: () => onEdit(String(record.id)), }, { key: "delete", - icon: , + icon: , label: "Delete", danger: true, onClick: () => onDelete(record), @@ -49,7 +53,7 @@ const DestinationTable: React.FC = ({ >
diff --git a/ui/src/modules/jobs/components/JobConfiguration.tsx b/ui/src/modules/jobs/components/JobConfiguration.tsx index a9aeafe6..5e7d2af8 100644 --- a/ui/src/modules/jobs/components/JobConfiguration.tsx +++ b/ui/src/modules/jobs/components/JobConfiguration.tsx @@ -1,6 +1,6 @@ import { useEffect, useState } from "react" import { Input, Select, Radio, Tooltip } from "antd" -import { Info } from "@phosphor-icons/react" +import { InfoIcon } from "@phosphor-icons/react" import parser from "cron-parser" import { useLocation } from "react-router-dom" @@ -215,7 +215,7 @@ const JobConfiguration: React.FC = ({
- diff --git a/ui/src/modules/jobs/components/JobEmptyState.tsx b/ui/src/modules/jobs/components/JobEmptyState.tsx index 173a7430..0ff2ee94 100644 --- a/ui/src/modules/jobs/components/JobEmptyState.tsx +++ b/ui/src/modules/jobs/components/JobEmptyState.tsx @@ -1,5 +1,5 @@ import { Button } from "antd" -import { GitCommit, PlayCircle } from "@phosphor-icons/react" +import { GitCommitIcon, PlayCircleIcon } from "@phosphor-icons/react" import { JobTutorialYTLink } from "../../../utils/constants" import FirstJob from "../../../assets/FirstJob.svg" @@ -27,7 +27,7 @@ const JobEmptyState = ({ className="mb-12 bg-brand-blue text-sm" onClick={handleCreateJob} > - + Create your first Job
@@ -46,7 +46,7 @@ const JobEmptyState = ({
- + OLake/ Tutorial
diff --git a/ui/src/modules/jobs/components/JobTable.tsx b/ui/src/modules/jobs/components/JobTable.tsx index 1d17b835..498bae6a 100644 --- a/ui/src/modules/jobs/components/JobTable.tsx +++ b/ui/src/modules/jobs/components/JobTable.tsx @@ -3,14 +3,14 @@ import { useNavigate } from "react-router-dom" import { formatDistanceToNow } from "date-fns" import { Table, Input, Button, Dropdown, Pagination } from "antd" import { - ArrowsClockwise, - ClockCounterClockwise, - DotsThree, - Gear, - Pause, - PencilSimple, - Play, - Trash, + ArrowsClockwiseIcon, + ClockCounterClockwiseIcon, + DotsThreeIcon, + GearIcon, + PauseIcon, + PencilSimpleIcon, + PlayIcon, + TrashIcon, XIcon, } from "@phosphor-icons/react" @@ -67,13 +67,13 @@ const JobTable: React.FC = ({ ? [ { key: "edit", - icon: , + icon: , label: "Edit", onClick: () => onEdit(record.id.toString()), }, { key: "delete", - icon: , + icon: , label: "Delete", danger: true, onClick: () => onDelete(record.id.toString()), @@ -82,22 +82,22 @@ const JobTable: React.FC = ({ : [ { key: "sync", - icon: , + icon: , label: "Sync now", onClick: () => onSync(record.id.toString()), }, { key: "edit", - icon: , + icon: , label: "Edit Streams", onClick: () => onEdit(record.id.toString()), }, { key: "pause", icon: record.activate ? ( - + ) : ( - + ), label: record.activate ? "Pause job" : "Resume job", onClick: () => onPause(record.id.toString(), record.activate), @@ -111,19 +111,19 @@ const JobTable: React.FC = ({ }, { key: "history", - icon: , + icon: , label: "Job Logs & History", onClick: () => handleViewHistory(record.id.toString()), }, { key: "settings", - icon: , + icon: , label: "Job settings", onClick: () => handleViewSettings(record.id.toString()), }, { key: "delete", - icon: , + icon: , label: "Delete", danger: true, onClick: () => onDelete(record.id.toString()), @@ -139,7 +139,7 @@ const JobTable: React.FC = ({
@@ -547,7 +551,7 @@ const JobCreation: React.FC = () => { onClick={handleNext} > {currentStep === JOB_CREATION_STEPS.STREAMS ? "Create Job" : "Next"} - + diff --git a/ui/src/modules/jobs/pages/JobEdit.tsx b/ui/src/modules/jobs/pages/JobEdit.tsx index 0398b490..25eaec1d 100644 --- a/ui/src/modules/jobs/pages/JobEdit.tsx +++ b/ui/src/modules/jobs/pages/JobEdit.tsx @@ -2,7 +2,7 @@ import { useState, useEffect } from "react" import clsx from "clsx" import { useNavigate, Link, useParams } from "react-router-dom" import { message } from "antd" -import { ArrowLeft, ArrowRight } from "@phosphor-icons/react" +import { ArrowLeftIcon, ArrowRightIcon } from "@phosphor-icons/react" import { useAppStore } from "../../../store" import { jobService } from "../../../api" @@ -449,7 +449,7 @@ const JobEdit: React.FC = () => { to="/jobs" className="flex items-center gap-2 p-1.5 hover:rounded-md hover:bg-gray-100 hover:text-black" > - +
{jobName ? (jobName === "-" ? " " : jobName) : "New Job"} @@ -577,7 +577,7 @@ const JobEdit: React.FC = () => { : "Finish" : "Next"} {currentStep !== JOB_CREATION_STEPS.STREAMS && ( - + )}
diff --git a/ui/src/modules/jobs/pages/JobHistory.tsx b/ui/src/modules/jobs/pages/JobHistory.tsx index 2cb834d8..ea061c62 100644 --- a/ui/src/modules/jobs/pages/JobHistory.tsx +++ b/ui/src/modules/jobs/pages/JobHistory.tsx @@ -3,10 +3,10 @@ import clsx from "clsx" import { useParams, useNavigate, Link } from "react-router-dom" import { Table, Button, Input, Spin, message, Pagination, Tooltip } from "antd" import { - ArrowLeft, - ArrowRight, - ArrowsClockwise, - Eye, + ArrowLeftIcon, + ArrowRightIcon, + ArrowsClockwiseIcon, + EyeIcon, } from "@phosphor-icons/react" import { useAppStore } from "../../../store" @@ -126,7 +126,7 @@ const JobHistory: React.FC = () => { render: (_: any, record: any) => (
diff --git a/ui/src/modules/jobs/pages/JobLogs.tsx b/ui/src/modules/jobs/pages/JobLogs.tsx index 6e61eb0f..ed839051 100644 --- a/ui/src/modules/jobs/pages/JobLogs.tsx +++ b/ui/src/modules/jobs/pages/JobLogs.tsx @@ -2,7 +2,11 @@ import { useEffect, useState } from "react" import clsx from "clsx" import { useParams, useNavigate, Link, useSearchParams } from "react-router-dom" import { Input, Spin, message, Button, Tooltip } from "antd" -import { ArrowLeft, ArrowRight, ArrowsClockwise } from "@phosphor-icons/react" +import { + ArrowLeftIcon, + ArrowRightIcon, + ArrowsClockwiseIcon, +} from "@phosphor-icons/react" import { useAppStore } from "../../../store" import { @@ -122,7 +126,7 @@ const JobLogs: React.FC = () => { to={`/jobs/${jobId}/history`} className="flex items-center gap-2 p-1.5 hover:rounded-md hover:bg-gray-100 hover:text-black" > - +
@@ -166,7 +170,7 @@ const JobLogs: React.FC = () => { />
diff --git a/ui/src/modules/jobs/pages/JobSettings.tsx b/ui/src/modules/jobs/pages/JobSettings.tsx index 8d7cb23d..f3990ac1 100644 --- a/ui/src/modules/jobs/pages/JobSettings.tsx +++ b/ui/src/modules/jobs/pages/JobSettings.tsx @@ -1,7 +1,7 @@ import { useState, useEffect } from "react" import { useParams, Link, useNavigate } from "react-router-dom" import { Input, Button, Switch, message, Select, Radio, Tooltip } from "antd" -import { Info, ArrowLeft } from "@phosphor-icons/react" +import { InfoIcon, ArrowLeftIcon } from "@phosphor-icons/react" import parser from "cron-parser" import { useAppStore } from "../../../store" @@ -273,7 +273,7 @@ const JobSettings: React.FC = () => { to="/jobs" className="flex items-center gap-2 p-1.5 hover:rounded-md hover:bg-gray-100 hover:text-black" > - +
{job?.name}
@@ -341,7 +341,7 @@ const JobSettings: React.FC = () => { Cron Expression - diff --git a/ui/src/modules/jobs/pages/Jobs.tsx b/ui/src/modules/jobs/pages/Jobs.tsx index a81bd1d6..6811b970 100644 --- a/ui/src/modules/jobs/pages/Jobs.tsx +++ b/ui/src/modules/jobs/pages/Jobs.tsx @@ -1,7 +1,7 @@ import { useState, useEffect } from "react" import { useNavigate } from "react-router-dom" import { Button, Tabs, Empty, message, Spin } from "antd" -import { GitCommit, Plus } from "@phosphor-icons/react" +import { GitCommitIcon, PlusIcon } from "@phosphor-icons/react" import { useAppStore } from "../../../store" import { jobService } from "../../../api" @@ -179,14 +179,14 @@ const Jobs: React.FC = () => {
- +

Jobs

diff --git a/ui/src/modules/jobs/pages/SchemaConfiguration.tsx b/ui/src/modules/jobs/pages/SchemaConfiguration.tsx index ba136e1f..6fec26f3 100644 --- a/ui/src/modules/jobs/pages/SchemaConfiguration.tsx +++ b/ui/src/modules/jobs/pages/SchemaConfiguration.tsx @@ -14,7 +14,11 @@ import FilterButton from "../components/FilterButton" import StepTitle from "../../common/components/StepTitle" import StreamsCollapsibleList from "./streams/StreamsCollapsibleList" import StreamConfiguration from "./streams/StreamConfiguration" -import { ArrowSquareOut, Info, PencilSimple } from "@phosphor-icons/react" +import { + ArrowSquareOutIcon, + InfoIcon, + PencilSimpleIcon, +} from "@phosphor-icons/react" import { DESTINATION_INTERNAL_TYPES, DESTINATATION_DATABASE_TOOLTIP_TEXT, @@ -602,7 +606,7 @@ const SchemaConfiguration: React.FC = ({
- +
@@ -624,7 +628,7 @@ const SchemaConfiguration: React.FC = ({ title="Edit" placement="top" > - setShowDestinationDatabaseModal(true)} /> @@ -637,7 +641,7 @@ const SchemaConfiguration: React.FC = ({ rel="noopener noreferrer" className="flex items-center text-gray-600 transition-colors hover:text-primary" > - +
diff --git a/ui/src/modules/jobs/pages/streams/StreamConfiguration.tsx b/ui/src/modules/jobs/pages/streams/StreamConfiguration.tsx index 79e5cffd..4a0e821e 100644 --- a/ui/src/modules/jobs/pages/streams/StreamConfiguration.tsx +++ b/ui/src/modules/jobs/pages/streams/StreamConfiguration.tsx @@ -12,14 +12,14 @@ import { Tooltip, } from "antd" import { - ColumnsPlusRight, - GridFour, - Info, - Lightning, - Plus, - SlidersHorizontal, - X, - ArrowSquareOut, + ColumnsPlusRightIcon, + GridFourIcon, + InfoIcon, + LightningIcon, + PlusIcon, + SlidersHorizontalIcon, + XIcon, + ArrowSquareOutIcon, } from "@phosphor-icons/react" import { @@ -624,7 +624,7 @@ const StreamConfiguration = ({ {!isSelected && (
- + Select the stream to configure Normalization
)} @@ -777,7 +777,7 @@ const StreamConfiguration = ({
{!isSelected && (
- + Select the stream to configure Data Filter
)} @@ -797,7 +797,7 @@ const StreamConfiguration = ({
Partitioning regex:
- + - + {isSelected ? ( @@ -852,7 +852,7 @@ const StreamConfiguration = ({ ) : (
- + Select the stream to configure Partitioning
)} @@ -896,7 +896,7 @@ const StreamConfiguration = ({
@@ -48,7 +48,7 @@ const SourceEmptyState = ({
- + OLake/ Tutorial
diff --git a/ui/src/modules/sources/components/SourceTable.tsx b/ui/src/modules/sources/components/SourceTable.tsx index f3f6ce7b..74645908 100644 --- a/ui/src/modules/sources/components/SourceTable.tsx +++ b/ui/src/modules/sources/components/SourceTable.tsx @@ -1,6 +1,10 @@ import React, { useState } from "react" import { Table, Input, Button, Dropdown, Pagination } from "antd" -import { DotsThree, PencilSimpleLine, Trash } from "@phosphor-icons/react" +import { + DotsThreeIcon, + PencilSimpleLineIcon, + TrashIcon, +} from "@phosphor-icons/react" import { Entity, SourceTableProps } from "../../../types" import { getConnectorImage, getConnectorLabel } from "../../../utils/utils" @@ -45,13 +49,13 @@ const SourceTable: React.FC = ({ items: [ { key: "edit", - icon: , + icon: , label: "Edit", onClick: () => onEdit(record.id.toString()), }, { key: "delete", - icon: , + icon: , label: "Delete", danger: true, onClick: () => onDelete(record), @@ -63,7 +67,7 @@ const SourceTable: React.FC = ({ >
@@ -484,7 +484,7 @@ const SourceEdit: React.FC = ({ } className="flex items-center gap-2 rounded-md bg-primary px-4 py-2 text-white hover:bg-primary-600" > - + Edit Source
@@ -523,7 +523,7 @@ const SourceEdit: React.FC = ({
- + Capture information
@@ -575,7 +575,7 @@ const SourceEdit: React.FC = ({ OLake Version: * - @@ -586,7 +586,7 @@ const SourceEdit: React.FC = ({ rel="noopener noreferrer" className="flex items-center text-primary hover:text-primary/80" > - + {loadingVersions ? ( @@ -609,7 +609,7 @@ const SourceEdit: React.FC = ({ /> ) : (
- + No versions available
)} @@ -619,7 +619,7 @@ const SourceEdit: React.FC = ({
- +
Endpoint config
{loading ? ( diff --git a/ui/src/modules/sources/pages/Sources.tsx b/ui/src/modules/sources/pages/Sources.tsx index 0d617974..21f090b7 100644 --- a/ui/src/modules/sources/pages/Sources.tsx +++ b/ui/src/modules/sources/pages/Sources.tsx @@ -1,7 +1,7 @@ import { useState, useEffect } from "react" import { useNavigate } from "react-router-dom" import { Button, Tabs, Empty, message, Spin } from "antd" -import { LinktreeLogo, Plus } from "@phosphor-icons/react" +import { LinktreeLogoIcon, PlusIcon } from "@phosphor-icons/react" import { useAppStore } from "../../../store" import analyticsService from "../../../api/services/analyticsService" @@ -101,14 +101,14 @@ const Sources: React.FC = () => {
- +

Sources

diff --git a/ui/src/utils/EndpointTitle.tsx b/ui/src/utils/EndpointTitle.tsx index 07f2011b..15339b5a 100644 --- a/ui/src/utils/EndpointTitle.tsx +++ b/ui/src/utils/EndpointTitle.tsx @@ -1,11 +1,11 @@ -import { GenderNeuter } from "@phosphor-icons/react" +import { GenderNeuterIcon } from "@phosphor-icons/react" import { EndpointTitleProps } from "../types" const EndpointTitle = ({ title = "Endpoint config" }: EndpointTitleProps) => (
- +
{title}
diff --git a/ui/src/utils/constants.ts b/ui/src/utils/constants.ts index 1e99975e..391f36a6 100644 --- a/ui/src/utils/constants.ts +++ b/ui/src/utils/constants.ts @@ -1,4 +1,8 @@ -import { GitCommit, LinktreeLogo, Path } from "@phosphor-icons/react" +import { + GitCommitIcon, + LinktreeLogoIcon, + PathIcon, +} from "@phosphor-icons/react" import { JobCreationSteps, NavItem, TestConnectionStatus } from "../types" import { getResponsivePageSize } from "./utils" @@ -121,9 +125,9 @@ export const LOCALSTORAGE_TOKEN_KEY = "token" export const LOCALSTORAGE_USERNAME_KEY = "username" export const NAV_ITEMS: NavItem[] = [ - { path: "/jobs", label: "Jobs", icon: GitCommit }, - { path: "/sources", label: "Sources", icon: LinktreeLogo }, - { path: "/destinations", label: "Destinations", icon: Path }, + { path: "/jobs", label: "Jobs", icon: GitCommitIcon }, + { path: "/sources", label: "Sources", icon: LinktreeLogoIcon }, + { path: "/destinations", label: "Destinations", icon: PathIcon }, ] export const sourceTabs = [ From 4cf993871e1d8d001fc115f68934676ed34cdebb Mon Sep 17 00:00:00 2001 From: deepanshupal09-datazip Date: Wed, 29 Oct 2025 13:09:16 +0530 Subject: [PATCH 10/11] feat: append only mode (#232) * feat: add append only mode in ui * fix: data filter bug * fix: fix color of append only mode switch * fix: add custom option in all streams ingestion mode change * fix: use clsx for conditional styling * fix: fix icons --- .../Modals/IngestionModeChangeModal.tsx | 56 +++++ .../jobs/pages/SchemaConfiguration.tsx | 64 +++++- .../pages/streams/StreamConfiguration.tsx | 66 +++++- .../jobs/pages/streams/StreamHeader.tsx | 2 +- .../pages/streams/StreamsCollapsibleList.tsx | 200 ++++++++++++------ ui/src/store/modalStore.ts | 11 +- ui/src/types/commonTypes.ts | 6 + ui/src/types/modalTypes.ts | 6 + ui/src/types/streamTypes.ts | 9 +- ui/src/utils/utils.ts | 25 ++- 10 files changed, 373 insertions(+), 72 deletions(-) create mode 100644 ui/src/modules/common/Modals/IngestionModeChangeModal.tsx diff --git a/ui/src/modules/common/Modals/IngestionModeChangeModal.tsx b/ui/src/modules/common/Modals/IngestionModeChangeModal.tsx new file mode 100644 index 00000000..4a3ebab1 --- /dev/null +++ b/ui/src/modules/common/Modals/IngestionModeChangeModal.tsx @@ -0,0 +1,56 @@ +import { Button, Modal } from "antd" +import { useAppStore } from "../../../store" +import { IngestionModeChangeModalProps } from "../../../types/modalTypes" + +const IngestionModeChangeModal = ({ + onConfirm, + ingestionMode, +}: IngestionModeChangeModalProps) => { + const { showIngestionModeChangeModal, setShowIngestionModeChangeModal } = + useAppStore() + + return ( + +
+
+ Switch to {ingestionMode} for all tables ? +
+ +
+
+ All tables will be switched to {ingestionMode} mode, +
+
+ You can change mode for individual tables +
+
+ +
+ + +
+
+
+ ) +} + +export default IngestionModeChangeModal diff --git a/ui/src/modules/jobs/pages/SchemaConfiguration.tsx b/ui/src/modules/jobs/pages/SchemaConfiguration.tsx index 6fec26f3..88ed096b 100644 --- a/ui/src/modules/jobs/pages/SchemaConfiguration.tsx +++ b/ui/src/modules/jobs/pages/SchemaConfiguration.tsx @@ -5,6 +5,7 @@ import { sourceService } from "../../../api" import { useAppStore } from "../../../store" import { CombinedStreamsData, + IngestionMode, SchemaConfigurationProps, SelectedStream, StreamData, @@ -45,7 +46,8 @@ const SchemaConfiguration: React.FC = ({ onLoadingChange, }) => { const prevSourceConfig = useRef(sourceConfig) - const { setShowDestinationDatabaseModal } = useAppStore() + const { setShowDestinationDatabaseModal, ingestionMode, setIngestionMode } = + useAppStore() const [searchText, setSearchText] = useState("") const [selectedFilters, setSelectedFilters] = useState([ "All tables", @@ -358,6 +360,7 @@ const SchemaConfiguration: React.FC = ({ normalization: false, filter: "", disabled: false, + append_mode: ingestionMode === IngestionMode.APPEND, }, ] changed = true @@ -435,6 +438,63 @@ const SchemaConfiguration: React.FC = ({ }) } + const handleIngestionModeChange = ( + streamName: string, + namespace: string, + appendMode: boolean, + ) => { + setApiResponse(prev => { + if (!prev) return prev + + const streamExistsInSelected = prev.selected_streams[namespace]?.some( + s => s.stream_name === streamName, + ) + + if (!streamExistsInSelected) return prev + + const updatedSelectedStreams = { + ...prev.selected_streams, + [namespace]: prev.selected_streams[namespace].map(s => + s.stream_name === streamName ? { ...s, append_mode: appendMode } : s, + ), + } + + const updated = { + ...prev, + selected_streams: updatedSelectedStreams, + } + + setSelectedStreams(updated) + return updated + }) + } + + const handleAllIngestionModeChange = (ingestionMode: IngestionMode) => { + const appendMode = ingestionMode === IngestionMode.APPEND + setIngestionMode(ingestionMode) + setApiResponse(prev => { + if (!prev) return prev + + // Update all streams with the same append mode + const updateSelectedStreams = Object.fromEntries( + Object.entries(prev.selected_streams).map(([namespace, streams]) => [ + namespace, + streams.map(stream => ({ + ...stream, + append_mode: appendMode, + })), + ]), + ) + + const updated = { + ...prev, + selected_streams: updateSelectedStreams, + } + setSelectedStreams(updated) + return updated + }) + } + const filteredStreams = useMemo(() => { if (!apiResponse?.streams) return [] let tempFilteredStreams = [...apiResponse.streams] @@ -687,6 +747,7 @@ const SchemaConfiguration: React.FC = ({ // Pass it to the parent component setSelectedStreams(fullData as CombinedStreamsData) }} + onIngestionModeChange={handleAllIngestionModeChange} /> ) : loading ? (
@@ -740,6 +801,7 @@ const SchemaConfiguration: React.FC = ({ fromJobEditFlow={fromJobEditFlow} initialSelectedStreams={apiResponse || undefined} destinationType={destinationType} + onIngestionModeChange={handleIngestionModeChange} /> ) : null}
diff --git a/ui/src/modules/jobs/pages/streams/StreamConfiguration.tsx b/ui/src/modules/jobs/pages/streams/StreamConfiguration.tsx index 4a0e821e..d95ad5eb 100644 --- a/ui/src/modules/jobs/pages/streams/StreamConfiguration.tsx +++ b/ui/src/modules/jobs/pages/streams/StreamConfiguration.tsx @@ -14,12 +14,12 @@ import { import { ColumnsPlusRightIcon, GridFourIcon, - InfoIcon, LightningIcon, PlusIcon, SlidersHorizontalIcon, XIcon, ArrowSquareOutIcon, + InfoIcon, } from "@phosphor-icons/react" import { @@ -30,6 +30,7 @@ import { MultiFilterCondition, CombinedStreamsData, SyncMode, + IngestionMode, } from "../../../../types" import { @@ -57,9 +58,11 @@ const StreamConfiguration = ({ fromJobEditFlow = false, initialSelectedStreams, destinationType = DESTINATION_INTERNAL_TYPES.S3, + onIngestionModeChange, }: ExtendedStreamConfigurationProps) => { const [activeTab, setActiveTab] = useState("config") const [syncMode, setSyncMode] = useState(stream.stream.sync_mode) + const [appendMode, setAppendMode] = useState(false) const [normalization, setNormalization] = useState(initialNormalization) const [fullLoadFilter, setFullLoadFilter] = useState(false) @@ -141,7 +144,13 @@ const StreamConfiguration = ({ partition_regex: initialPartitionRegex || "", fullLoadFilter: formData.fullLoadFilter || false, })) - }, [stream, initialNormalization]) + + setAppendMode( + initialSelectedStreams?.selected_streams?.[ + stream.stream.namespace || "" + ]?.find(s => s.stream_name === stream.stream.name)?.append_mode || false, + ) + }, [stream, initialNormalization, initialSelectedStreams]) useEffect(() => { // Skip when change originated from local user action @@ -249,6 +258,15 @@ const StreamConfiguration = ({ }) } + const handleIngestionModeChange = (ingestionMode: IngestionMode) => { + setAppendMode(ingestionMode === IngestionMode.APPEND) + onIngestionModeChange( + stream.stream.name, + stream.stream.namespace || "", + ingestionMode === IngestionMode.APPEND, + ) + } + const handleNormalizationChange = (checked: boolean) => { setNormalization(checked) onNormalizationChange( @@ -507,7 +525,13 @@ const StreamConfiguration = ({ ? columnType.find(t => t !== "null") || columnType[0] : columnType - if (primaryType === "string" || primaryType === "timestamp") { + if ( + primaryType === "string" || + primaryType === "timestamp" || + primaryType === "timestamp_micro" || + primaryType === "timestamp_nano" || + primaryType === "timestamp_milli" + ) { // Check if value is already wrapped in quotes if (!value.startsWith('"') && !value.endsWith('"')) { return `"${value}"` @@ -729,6 +753,42 @@ const StreamConfiguration = ({
)}
+ +
+
+ +
+ Specify how the data will be ingested in the destination +
+
+ handleIngestionModeChange(e.target.value)} + > + Upsert + Append + + {!isSelected && ( +
+ + Select the stream to configure ingestion mode +
+ )} +
= ({ "flex w-full items-center justify-between border-b border-solid border-[#e5e7eb] py-3 pl-6", isActiveStream ? "bg-primary-100" - : "bg-white hover:bg-background-primary", + : "border-l border-r bg-white hover:bg-background-primary", )} >
{ + const { setShowIngestionModeChangeModal, ingestionMode, setIngestionMode } = + useAppStore() const [openNamespaces, setOpenNamespaces] = useState<{ [ns: string]: boolean }>({}) @@ -24,6 +32,13 @@ const StreamsCollapsibleList = ({ namespaces: {}, streams: {}, }) + const [targetIngestionMode, setTargetIngestionMode] = useState( + IngestionMode.APPEND, + ) + + useEffect(() => { + setIngestionMode(getIngestionMode(selectedStreams)) + }, [selectedStreams]) useEffect(() => { if (Object.keys(openNamespaces).length === 0) { @@ -223,75 +238,132 @@ const StreamsCollapsibleList = ({ } return ( -
- {Object.keys(groupedStreams).length === 0 ? ( - - ) : ( - <> -
- handleGlobalSyncAll(e.target.checked)} - > - Sync all - -
- {Object.entries(groupedStreams).map(([ns, streams]) => { - return ( -
+
+ {Object.keys(groupedStreams).length === 0 ? ( + + ) : ( + <> +
+ handleGlobalSyncAll(e.target.checked)} > + Sync all + + +
+ {/* Sliding background */}
handleToggleNamespace(ns)} + className={clsx( + "absolute inset-y-0.5 w-[calc(34%)] rounded-sm bg-primary-100 shadow-sm transition-transform duration-300 ease-in-out", + { + "translate-x-0.5": ingestionMode === IngestionMode.UPSERT, + "translate-x-[calc(100%+0px)]": + ingestionMode === IngestionMode.APPEND, + "translate-x-[calc(200%-2px)]": + ingestionMode === IngestionMode.CUSTOM, + }, + )} + /> +
{ + if (ingestionMode !== IngestionMode.UPSERT) { + setTargetIngestionMode(IngestionMode.UPSERT) + setShowIngestionModeChangeModal(true) + } + }} + className={`relative z-10 flex cursor-pointer items-center justify-center rounded-sm p-1 px-4 text-center transition-colors duration-300`} > - handleNamespaceSyncAll(ns, e.target.checked)} - onClick={e => e.stopPropagation()} - className="mr-2" - /> - {ns} - - {openNamespaces[ns] ? ( - - ) : ( - - )} - + All Upsert +
+
{ + if (ingestionMode !== IngestionMode.APPEND) { + setTargetIngestionMode(IngestionMode.APPEND) + setShowIngestionModeChangeModal(true) + } + }} + className={`relative z-10 flex cursor-pointer items-center justify-center rounded-sm p-1 px-4 text-center transition-colors duration-300`} + > + All Append +
+
+ Custom
- {openNamespaces[ns] && ( -
- {streams.map(streamData => ( - - handleStreamSelect(streamName, checked, ns) - } - isSelected={ - checkedStatus.streams[ns]?.[streamData.stream.name] || - false - } - /> - ))} -
- )}
- ) - })} - - )} -
+
+ {Object.entries(groupedStreams).map(([ns, streams]) => { + return ( +
+
handleToggleNamespace(ns)} + > + + handleNamespaceSyncAll(ns, e.target.checked) + } + onClick={e => e.stopPropagation()} + className="mr-2" + /> + {ns} + + {openNamespaces[ns] ? ( + + ) : ( + + )} + +
+ {openNamespaces[ns] && ( +
+ {streams.map(streamData => ( + + handleStreamSelect(streamName, checked, ns) + } + isSelected={ + checkedStatus.streams[ns]?.[ + streamData.stream.name + ] || false + } + /> + ))} +
+ )} +
+ ) + })} + + )} +
+ + ) } diff --git a/ui/src/store/modalStore.ts b/ui/src/store/modalStore.ts index fa3f1edd..46e1e151 100644 --- a/ui/src/store/modalStore.ts +++ b/ui/src/store/modalStore.ts @@ -1,4 +1,5 @@ import { StateCreator } from "zustand" +import { IngestionMode } from "../types/commonTypes" export interface ModalSlice { showTestingModal: boolean @@ -14,6 +15,8 @@ export interface ModalSlice { showEditDestinationModal: boolean showDestinationDatabaseModal: boolean showResetStreamsModal: boolean + showIngestionModeChangeModal: boolean + ingestionMode: IngestionMode setShowTestingModal: (show: boolean) => void setShowSuccessModal: (show: boolean) => void setShowFailureModal: (show: boolean) => void @@ -27,6 +30,8 @@ export interface ModalSlice { setShowEditDestinationModal: (show: boolean) => void setShowDestinationDatabaseModal: (show: boolean) => void setShowResetStreamsModal: (show: boolean) => void + setShowIngestionModeChangeModal: (show: boolean) => void + setIngestionMode: (mode: IngestionMode) => void } export const createModalSlice: StateCreator = set => ({ @@ -43,7 +48,8 @@ export const createModalSlice: StateCreator = set => ({ showEditDestinationModal: false, showDestinationDatabaseModal: false, showResetStreamsModal: false, - + showIngestionModeChangeModal: false, + ingestionMode: IngestionMode.UPSERT, setShowTestingModal: show => set({ showTestingModal: show }), setShowSuccessModal: show => set({ showSuccessModal: show }), setShowFailureModal: show => set({ showFailureModal: show }), @@ -59,4 +65,7 @@ export const createModalSlice: StateCreator = set => ({ setShowDestinationDatabaseModal: show => set({ showDestinationDatabaseModal: show }), setShowResetStreamsModal: show => set({ showResetStreamsModal: show }), + setShowIngestionModeChangeModal: show => + set({ showIngestionModeChangeModal: show }), + setIngestionMode: mode => set({ ingestionMode: mode }), }) diff --git a/ui/src/types/commonTypes.ts b/ui/src/types/commonTypes.ts index 1a4e2d68..a33c1ba6 100644 --- a/ui/src/types/commonTypes.ts +++ b/ui/src/types/commonTypes.ts @@ -79,3 +79,9 @@ export interface CronParseResult { selectedDay?: string customCronExpression?: string } + +export enum IngestionMode { + UPSERT = "Upsert", + APPEND = "Append", + CUSTOM = "Custom", +} diff --git a/ui/src/types/modalTypes.ts b/ui/src/types/modalTypes.ts index 61436c13..7c4a1da8 100644 --- a/ui/src/types/modalTypes.ts +++ b/ui/src/types/modalTypes.ts @@ -1,3 +1,4 @@ +import { IngestionMode } from "./commonTypes" import { StreamsDataStructure } from "./streamTypes" export interface DeleteModalProps { @@ -16,3 +17,8 @@ export interface DestinationDatabaseModalProps { export interface ResetStreamsModalProps { onConfirm: () => void } + +export interface IngestionModeChangeModalProps { + onConfirm: (ingestionMode: IngestionMode) => void + ingestionMode: IngestionMode +} diff --git a/ui/src/types/streamTypes.ts b/ui/src/types/streamTypes.ts index af75b35e..ac7044ee 100644 --- a/ui/src/types/streamTypes.ts +++ b/ui/src/types/streamTypes.ts @@ -1,5 +1,5 @@ import type { CheckboxChangeEvent } from "antd/es/checkbox" -import type { UnknownObject } from "./index" +import type { IngestionMode, UnknownObject } from "./index" export enum SyncMode { FULL_REFRESH = "full_refresh", @@ -94,6 +94,7 @@ export interface SelectedStream { normalization: boolean filter?: string disabled?: boolean + append_mode?: boolean } export interface StreamsDataStructure { @@ -166,6 +167,11 @@ export interface ExtendedStreamConfigurationProps namespace: string, filterValue: string, ) => void + onIngestionModeChange: ( + streamName: string, + namespace: string, + appendMode: boolean, + ) => void } export interface GroupedStreamsCollapsibleListProps { @@ -194,6 +200,7 @@ export interface GroupedStreamsCollapsibleListProps { } > > + onIngestionModeChange: (ingestionMode: IngestionMode) => void } export interface StreamSchemaProps { diff --git a/ui/src/utils/utils.ts b/ui/src/utils/utils.ts index 9959f1a7..769dec86 100644 --- a/ui/src/utils/utils.ts +++ b/ui/src/utils/utils.ts @@ -1,7 +1,7 @@ import { message } from "antd" import parser from "cron-parser" -import { CronParseResult, SelectedStream } from "../types" +import { CronParseResult, IngestionMode, SelectedStream } from "../types" import { DAYS_MAP, DESTINATION_INTERNAL_TYPES, @@ -541,3 +541,26 @@ export const validateStreams = (selections: { streams.some(sel => sel.filter && !validateFilter(sel.filter)), ) } + +export const getIngestionMode = (selectedStreams: { + [key: string]: SelectedStream[] +}): IngestionMode => { + const selectedStreamsObj = getSelectedStreams(selectedStreams) + const allSelectedStreams: SelectedStream[] = [] + + // Flatten all streams from all namespaces + Object.values(selectedStreamsObj).forEach((streams: SelectedStream[]) => { + allSelectedStreams.push(...streams) + }) + + if (allSelectedStreams.length === 0) return IngestionMode.UPSERT + + const appendCount = allSelectedStreams.filter( + s => s.append_mode === true, + ).length + const upsertCount = allSelectedStreams.filter(s => !s.append_mode).length + + if (appendCount === allSelectedStreams.length) return IngestionMode.APPEND + if (upsertCount === allSelectedStreams.length) return IngestionMode.UPSERT + return IngestionMode.CUSTOM +} From ace41214640bb8cd1786e61a960388b3ea237134 Mon Sep 17 00:00:00 2001 From: vikash choudhary Date: Fri, 7 Nov 2025 18:09:30 +0530 Subject: [PATCH 11/11] refactor: backend and removal of temporal worker (#111) * fix(api): refactor raw queries and add constants * chore(api): Adding service layer * chore(api): adding service layer * fix: resolve conflict errors * feat: improve Worker design by passing existing Client * feat: improving error handling a * feat: add info logging * fix: changes after testing * feat(docker): use Docker Go SDK instead of shell commands for container operations * feat: updated GoSec security ci * feat: delete config files * chore: update project name to olake-ui * chore: addressed review comments * chore: creatign dto packaghe * chore: using GetIDFromPath function for id * feat: implement dto layer and validation for request * merge staging * chore: refactor based on self review * chore: refactor * docs: updated api contract * refactor: initialize services once at startup and standardize error logging * fix: remove unnecessary checks * fix: ci issues * fix: ci issues * fix: ci issues * fix: lint issues * fix: refactor unmarshal and validation for req body * chore: refactor dto and nil checks * chore: use errorf * fix: dto and db queries * chore: resovle review comments * chore: remove nil checks and added request uri in error logs * fix: lint issue * chore: refactor bff (#237) * chore: refactored refactor pr * chore: some more refactor related changes * chore: refactor temporal service * chore: remove comments * chore: refactor error throwing * chore: updating handler * chore: rename service and db thing * chore: adding back vikash commit of package name change * chore: adding back vikash commit of package name change * refactor: merge k8s and docker workers (#205) * fix: commands -> check, discover * fix: sync worklfow * refactor: remove worker from olake-ui * chore: remove worker files & add timeout utility * chore: update branch name in worklfow to release an image * fix: remove worker from release.sh * fix: add pre and post sync worker callback * fix: add telemetry event for sync workflow * fix: latest changes * chore: remove stale code * fix: update env var - container_persistence_dir and host_persistence_dir * chore: remove worker job api * chore: cleanup files * fix: fix refactor bff changes * chore: move encryptionkey to app.conf * chore: add unmarshal-and-validate util * chore: add robust error logging (resolve comments) * chore: remove container_persistent_path env var * fix: update sync workflow to match the old worker * chore: add wrong workflow name * chore: add correct workflow name * fix: resolve comments * chore: add constants for app.conf keys * fix: call pause and unpause in activate job service * chore: api success response update * fix: lint issue * fix: source and job dataitems * fix: api according to bff refactor * fix: build errors * chore: consistant logger accorss files * fix: security ci * fix: security ci * fix: panic issue and add unique name check in job creation * feat: added job cancel in job updatea and return error from test connection * chore: move temporal and etl into services folder * chore: use same name in sevice and handlers * fix: source test connection * fix: move result nil check before error in source and destiantion test connection * fix: fix job history retry and destination spec api (#244) * fix: fix job history retry * fix: fix destination spec api * fix: remove hardcoding branch name in build-ui workflow * fix: remove k8s and docker task queue * fix: lint issue * fix: error in login and test destiantion * fix: add retry logic for temporal client init * fix: send id in job edit (#245) * fix: send id in job edit * fix: add modal for spec error, fix double error messages * fix: integration test fix * fix: minor fix * chore: update worker tag in docker-compose for int. test * chore: remove temporal address from constant (#246) * chore: remove temporalAddr constant and make retry simple * chore: remove logs * fix: add cleanup on cancelling fetchSpec API * chore: resolve review comments * chore: remove unused messages * fix: integration test - attempt 1 * chore: added a todo --------- Co-authored-by: Ankit Sharma <111491139+hash-data@users.noreply.github.com> Co-authored-by: hashcode-ankit Co-authored-by: vishal-datazip Co-authored-by: deepanshupal09-datazip --- .github/workflows/build-and-release.yml | 37 +- .github/workflows/security-ci.yaml | 10 +- Dockerfile | 3 + Makefile | 5 +- api-contract.md | 215 +++--- docker-compose.yml | 3 +- release.sh | 54 -- server/README.md | 2 +- server/cmd/temporal-worker/main.go | 68 -- server/conf/app.conf | 1 + server/go.mod | 55 +- server/go.sum | 96 ++- server/internal/constants/constants.go | 40 +- server/internal/constants/messages.go | 20 + .../database/{postgres.go => database.go} | 42 +- server/internal/database/destination.go | 94 +-- server/internal/database/job.go | 202 ++--- server/internal/database/source.go | 82 +-- server/internal/database/user.go | 48 +- server/internal/docker/runner.go | 497 ------------- server/internal/handlers/auth.go | 133 ++-- server/internal/handlers/destination.go | 362 +++------ server/internal/handlers/frontend_handlers.go | 22 - server/internal/handlers/handler.go | 25 + server/internal/handlers/handlers_utils.go | 120 --- server/internal/handlers/job.go | 692 +++++------------- .../auth.go} | 8 +- server/internal/handlers/source.go | 402 ++++------ server/internal/handlers/ui.go | 19 + server/internal/handlers/user.go | 95 ++- server/internal/handlers/utils.go | 49 ++ server/internal/logger/logger.go | 56 -- server/internal/models/db.go | 2 +- server/internal/models/dto/requests.go | 98 +++ server/internal/models/{ => dto}/response.go | 40 +- server/internal/models/dto/validate.go | 47 ++ server/internal/models/requests.go | 83 --- server/internal/services/etl/auth.go | 64 ++ server/internal/services/etl/destination.go | 231 ++++++ server/internal/services/etl/job.go | 399 ++++++++++ server/internal/services/etl/services.go | 26 + server/internal/services/etl/source.go | 249 +++++++ server/internal/services/etl/user.go | 51 ++ server/internal/services/etl/utils.go | 110 +++ server/internal/services/temporal/client.go | 144 ++++ server/internal/services/temporal/execute.go | 210 ++++++ server/internal/services/temporal/utils.go | 102 +++ server/internal/telemetry/job.go | 45 -- server/internal/temporal/README.md | 144 ---- server/internal/temporal/activities.go | 118 --- server/internal/temporal/client.go | 303 -------- server/internal/temporal/types.go | 41 -- server/internal/temporal/worker.go | 56 -- server/internal/temporal/workflows.go | 129 ---- server/main.go | 52 +- server/routes/router.go | 81 +- server/tests/test_utils.go | 12 +- server/utils/docker_utils.go | 9 +- server/utils/encryption.go | 10 +- server/utils/logger/logger.go | 102 +++ server/{internal => utils}/telemetry/auth.go | 6 +- .../telemetry/constants.go | 0 .../telemetry/destination.go | 23 +- server/utils/telemetry/job.go | 53 ++ .../{internal => utils}/telemetry/source.go | 20 +- server/{internal => utils}/telemetry/sync.go | 24 +- .../telemetry/telemetry.go | 19 +- server/utils/utils.go | 129 +--- ui/src/api/axios.ts | 41 ++ ui/src/api/services/analyticsService.ts | 2 +- ui/src/api/services/authService.ts | 26 +- ui/src/api/services/destinationService.ts | 27 +- ui/src/api/services/jobService.ts | 41 +- ui/src/api/services/notificationService.ts | 19 + ui/src/api/services/sourceService.ts | 29 +- .../modules/common/Modals/DeleteJobModal.tsx | 7 +- ui/src/modules/common/Modals/DeleteModal.tsx | 12 +- .../modules/common/Modals/EntityEditModal.tsx | 2 - .../modules/common/Modals/SpecFailedModal.tsx | 95 +++ .../destinations/pages/CreateDestination.tsx | 36 +- .../destinations/pages/DestinationEdit.tsx | 68 +- .../destinations/pages/Destinations.tsx | 5 +- ui/src/modules/jobs/pages/JobCreation.tsx | 13 +- ui/src/modules/jobs/pages/JobEdit.tsx | 18 +- ui/src/modules/jobs/pages/JobHistory.tsx | 74 +- ui/src/modules/jobs/pages/JobLogs.tsx | 16 +- ui/src/modules/jobs/pages/JobSettings.tsx | 6 - ui/src/modules/jobs/pages/Jobs.tsx | 22 +- .../jobs/pages/SchemaConfiguration.tsx | 2 +- ui/src/modules/sources/pages/CreateSource.tsx | 40 +- ui/src/modules/sources/pages/SourceEdit.tsx | 44 +- ui/src/modules/sources/pages/Sources.tsx | 10 +- ui/src/store/destinationStore.ts | 13 +- ui/src/store/modalStore.ts | 4 + ui/src/store/sourceStore.ts | 15 +- ui/src/store/taskStore.ts | 4 +- ui/src/types/destinationTypes.ts | 3 +- ui/src/types/jobTypes.ts | 4 + ui/src/types/sourceTypes.ts | 3 +- ui/src/utils/constants.ts | 2 +- ui/src/utils/utils.ts | 6 +- worker.Dockerfile | 26 - 102 files changed, 3606 insertions(+), 3918 deletions(-) delete mode 100644 server/cmd/temporal-worker/main.go create mode 100644 server/internal/constants/messages.go rename server/internal/database/{postgres.go => database.go} (64%) delete mode 100644 server/internal/docker/runner.go delete mode 100644 server/internal/handlers/frontend_handlers.go create mode 100644 server/internal/handlers/handler.go delete mode 100644 server/internal/handlers/handlers_utils.go rename server/internal/handlers/{auth_middleware.go => middleware/auth.go} (71%) create mode 100644 server/internal/handlers/ui.go create mode 100644 server/internal/handlers/utils.go delete mode 100644 server/internal/logger/logger.go create mode 100644 server/internal/models/dto/requests.go rename server/internal/models/{ => dto}/response.go (70%) create mode 100644 server/internal/models/dto/validate.go delete mode 100644 server/internal/models/requests.go create mode 100644 server/internal/services/etl/auth.go create mode 100644 server/internal/services/etl/destination.go create mode 100644 server/internal/services/etl/job.go create mode 100644 server/internal/services/etl/services.go create mode 100644 server/internal/services/etl/source.go create mode 100644 server/internal/services/etl/user.go create mode 100644 server/internal/services/etl/utils.go create mode 100644 server/internal/services/temporal/client.go create mode 100644 server/internal/services/temporal/execute.go create mode 100644 server/internal/services/temporal/utils.go delete mode 100644 server/internal/telemetry/job.go delete mode 100644 server/internal/temporal/README.md delete mode 100644 server/internal/temporal/activities.go delete mode 100644 server/internal/temporal/client.go delete mode 100644 server/internal/temporal/types.go delete mode 100644 server/internal/temporal/worker.go delete mode 100644 server/internal/temporal/workflows.go create mode 100644 server/utils/logger/logger.go rename server/{internal => utils}/telemetry/auth.go (71%) rename server/{internal => utils}/telemetry/constants.go (100%) rename server/{internal => utils}/telemetry/destination.go (73%) create mode 100644 server/utils/telemetry/job.go rename server/{internal => utils}/telemetry/source.go (69%) rename server/{internal => utils}/telemetry/sync.go (85%) rename server/{internal => utils}/telemetry/telemetry.go (86%) create mode 100644 ui/src/api/services/notificationService.ts create mode 100644 ui/src/modules/common/Modals/SpecFailedModal.tsx delete mode 100644 worker.Dockerfile diff --git a/.github/workflows/build-and-release.yml b/.github/workflows/build-and-release.yml index 6fd1749b..f10da0f7 100644 --- a/.github/workflows/build-and-release.yml +++ b/.github/workflows/build-and-release.yml @@ -1,52 +1,34 @@ name: Olake UI Build And Release + on: - workflow_call: + workflow_dispatch: inputs: - environment: - description: "Environment to build (master, staging, dev)" - required: true - default: "" - type: string version: description: "Version to release" required: true - default: "" - type: string - workflow_dispatch: + workflow_call: inputs: - environment: - description: "Environment to build (master, staging, dev)" - required: true - default: "dev" - type: choice - options: - - master - - staging - - dev version: description: "Version to release" required: true - default: "v0.0.0.dev" type: string + default: "" jobs: build_and_publish_frontend: - name: Build and publish frontend image for ${{ inputs.environment }} - environment: ${{ inputs.environment }} + name: Build and publish frontend image runs-on: ubuntu-latest + environment: Build UI + env: DOCKER_LOGIN: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} DOCKER_REPO: ${{ secrets.DOCKER_REPO || 'olakego' }} - DOCKER_REPO_WORKER: ${{ secrets.DOCKER_REPO_WORKER || 'olakego' }} - ENVIRONMENT: ${{ inputs.environment }} - VERSION: ${{ inputs.version }} + VERSION: ${{ inputs.version || github.event.inputs.version || 'v0.0.0.dev' }} steps: - name: Checkout code uses: actions/checkout@v3 - with: - ref: ${{ inputs.environment == 'master' && 'master' || (inputs.environment == 'staging' && 'staging' || inputs.environment == 'dev' && 'ci/workerReleaseIssues' || 'develop') }} - name: Set up Node.js uses: actions/setup-node@v3 @@ -66,9 +48,8 @@ jobs: - name: Setup environment variables run: | - echo "ENVIRONMENT=${{ env.ENVIRONMENT }}" >> $GITHUB_ENV echo "VERSION=${{ env.VERSION }}" >> $GITHUB_ENV - echo "Building frontend application for $ENVIRONMENT with version $VERSION" + echo "Building frontend for branch $GITHUB_REF_NAME with version $VERSION" - name: Run Release tool run: | diff --git a/.github/workflows/security-ci.yaml b/.github/workflows/security-ci.yaml index cd6d09f6..7217d6f5 100644 --- a/.github/workflows/security-ci.yaml +++ b/.github/workflows/security-ci.yaml @@ -3,11 +3,9 @@ on: push: branches: - "master" - - "feat/bff-api" pull_request: branches: - "*" - - "feat/bff-api" workflow_dispatch: inputs: logLevel: @@ -15,6 +13,7 @@ on: required: true default: "warning" + jobs: govulncheck: name: govulncheck @@ -23,13 +22,13 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-go@v3 with: - check-latest: "true" - go-version: "1.24.x" + go-version: "1.24.x" # Exact version for stability - name: Install govulncheck run: go install golang.org/x/vuln/cmd/govulncheck@latest - name: Run vulnerability checks working-directory: ./server run: govulncheck ./... + gosec: name: GoSec Security Scanner runs-on: ubuntu-latest @@ -37,8 +36,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-go@v3 with: - check-latest: "true" - go-version: "1.24.x" + go-version: "1.24.x" # Removed check-latest to prevent upgrade to 1.25.x - name: install gosec run: curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s -- -b $(go env GOPATH)/bin - name: Run Gosec Security Scanner diff --git a/Dockerfile b/Dockerfile index b1d1931e..4d8fcfd9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,7 @@ # Stage 1: Go Builder (Backend) FROM golang:1.24.2-alpine AS go-builder + # Install git, as it might be needed by go mod download or go build RUN apk add --no-cache git @@ -19,6 +20,8 @@ RUN cd server && go build -ldflags="-w -s" -o /app/olake-server . # Stage 2: Frontend Builder FROM node:20-alpine AS node-builder + +# Reuse build-time arguments during UI build if needed WORKDIR /app/ui # Install pnpm globally diff --git a/Makefile b/Makefile index dd076e99..78863004 100644 --- a/Makefile +++ b/Makefile @@ -51,10 +51,11 @@ BACKEND_ENV_VARS = \ OLAKE_POSTGRES_PORT=5432 \ OLAKE_POSTGRES_DBNAME=postgres \ OLAKE_POSTGRES_SSLMODE=disable \ - LOGS_DIR=./logger/logs \ + LOGS_DIR=$(PWD)/logger/logs \ SESSION_ON=true \ TEMPORAL_ADDRESS=localhost:7233 \ - CONTAINER_REGISTRY_BASE=registry-1.docker.io + CONTAINER_REGISTRY_BASE=registry-1.docker.io \ + PERSISTENT_DIR=$(PWD)/olake-config # Frontend environment variables FRONTEND_ENV_VARS = \ diff --git a/api-contract.md b/api-contract.md index 339a0abe..a9a0fbd3 100644 --- a/api-contract.md +++ b/api-contract.md @@ -3,9 +3,8 @@ ### For now use olake as project id, later on it can be used to make multitenant system ## Base URL - ``` -http://localhost:8000 +http://localhost:8080 ``` ## Authentication @@ -41,7 +40,7 @@ http://localhost:8000 - **Request Body**: ```json { - "email": "string", + "email":"string", "username": "string", "password": "string" } @@ -53,21 +52,20 @@ http://localhost:8000 "success": "boolean", "message": "string", "data": { - "email": "string", - "username": "string" + "email":"string", + "username": "string", } } ``` ### Check Authentication - - **Endpoint**: `/auth` - **Method**: GET - **Description**: Verify if user is authenticated - **Headers**: `Authorization: Bearer ` // we are using cookie currently so frontend take care accordingly - **Response**: ```json - { + { "success": "boolean", "message": "string", "data": { @@ -77,7 +75,28 @@ http://localhost:8000 ``` ## Sources +### Get All Version Of Source +- **Endpoint**: `/api/v1/project/:projectid/sources/versions` +- **Method**: GET +- **Description**: Give spec based on source type +- **Headers**: `Authorization: Bearer ` +- **Request Body**: + ```json + { + "type":"string", + } + ``` +- **Response**: + ```json + { + "success": "boolean", + "message": "string", + "data": { + "version":["string","string"] + } + } + ``` ### Get Spec Of Source - **Endpoint**: `/api/v1/project/:projectid/sources/spec` @@ -87,8 +106,8 @@ http://localhost:8000 - **Request Body**: ```json { - "type": "string", - "version": "string" + "type":"string", + "version": "string", } ``` - **Response**: @@ -118,8 +137,8 @@ http://localhost:8000 ```json { - "type": "string", - "version": "string", + "type":"string", + "version":"string", "config": "json" } ``` @@ -149,9 +168,9 @@ http://localhost:8000 - **Request Body**: ```json { - "name": "string", // we have to make sure in database that it must also unique according to project id (for doubt let us discuss) - "type": "string", - "version": "string", // this field need to be shown on frontend as well, we discussed at time of design as well + "name": "string", // we have to make sure in database that it must also unique according to project id (for doubt let us discuss) + "type": "string", + "version":"string", // this field need to be shown on frontend as well, we discussed at time of design as well "config": "json" } ``` @@ -160,13 +179,12 @@ http://localhost:8000 { "success": "boolean", "message": "string", - "data": { - // whatever received send back - "name": "string", - "type": "string", - "version": "string", - "config": "json" - } + "data": { // whatever received send back + "name": "string", + "type": "string", + "version":"string", + "config": "json" + } } ``` @@ -205,7 +223,6 @@ http://localhost:8000 } ] } - ``` ### Update Source @@ -216,9 +233,9 @@ http://localhost:8000 - **Request Body**: ```json { - "name": "string", - "type": "string", - "version": "string", + "name": "string", + "type": "string", + "version":"string", "config": "json" } ``` @@ -227,39 +244,57 @@ http://localhost:8000 { "success": "boolean", "message": "string", - "data": { - // send same back - "name": "string", - "type": "string", - "version": "string", + "data": { // send same back + "name": "string", + "type": "string", + "version":"string", "config": "json" } } ``` -### Delete Source +### Delete Source - **Endpoint**: `/api/v1/project/:projectid/sources/:id` - **Method**: DELETE - **Description**: Delete a source - **Headers**: `Authorization: Bearer ` - **Response**: - ```json -{ - // Note: it is soft delete not hard delete - "success": "boolean", - "message": "string", - "data": { - "name": "string" // name of source deleted + { // Note: it is soft delete not hard delete + "success": "boolean", + "message": "string", + "data": { + "name" :"string", // name of source deleted + } } -} ``` + ## Destinations +### Get All Version Of Destinations +- **Endpoint**: `/api/v1/project/:projectid/destinations/versions` +- **Method**: GET +- **Description**: Give spec based on source type +- **Headers**: `Authorization: Bearer ` +- **Request Body**: + ```json + { + "type":"string", + } + ``` +- **Response**: + ```json + { + "success": "boolean", + "message": "string", + "data": { + "version":["string","string"] + } + } + ``` ### Destination Spec - - **Endpoint**: `/api/v1/project/:projectid/destinations/spec` - **Method**: GET - **Description**: Give spec based on destination type @@ -267,13 +302,13 @@ http://localhost:8000 - **Request Body**: ```json { - "type": "string", - "version": "string" + "type":"string", + "version": "string", } ``` - **Response**: ```json - { + { "success": "boolean", "message": "string", "data": { @@ -289,7 +324,7 @@ http://localhost:8000 // currently this is not avaialable in olake will build this -### Test Destination +### Test Destination - **Endpoint**: `/api/v1/project/:projectid/destinations/test` - **Method**: POST @@ -332,7 +367,7 @@ http://localhost:8000 "name": "string", "type": "string", "config": "json", - "version": "string" + "version":"string", } ``` - **Response**: @@ -345,7 +380,7 @@ http://localhost:8000 "name": "string", "type": "string", "config": "json", - "version": "string" // to create a job same version of destination and same version of source required + "version":"string", // to create a job same version of destination and same version of source required } } ``` @@ -358,7 +393,7 @@ http://localhost:8000 - **Headers**: `Authorization: Bearer ` - **Response**: ```json - { +{ "success": "boolean", "message": "string", "data": [ @@ -399,7 +434,7 @@ http://localhost:8000 "name": "string", "type": "string", "config": "json", - "version": "string" + "version":"string", } ``` - **Response**: @@ -410,7 +445,7 @@ http://localhost:8000 "data": { "name": "string", "type": "string", - "version": "string", + "version":"string", "config": "json" } } @@ -425,14 +460,14 @@ http://localhost:8000 - **Response**: ```json -{ - // NOTE: this is only soft delete not hard - "success": "boolean", - "message": "string", - "data": { - "name": "string" + + { // NOTE: this is only soft delete not hard + "success": "boolean", + "message": "string", + "data": { + "name": "string", + } } -} ``` ## Jobs @@ -458,10 +493,10 @@ http://localhost:8000 "name": "string", "type": "string", "config": "string", - "version": "string" + "version": "string", }, "frequency": "string", - "streams_config": "json" + "streams_config": "json", } ``` @@ -471,14 +506,14 @@ http://localhost:8000 "success": "boolean", "message": "string", "data": { - // request body as it is + // request body as it is } } ``` ### Get All Jobs -- **Endpoint**: `/api/v1/project/:projectid/jobs` // also use endpoint for filter such as /jobs/dest_id="some_id" or /jobs/source_id="some_id" +- **Endpoint**: `/api/v1/project/:projectid/jobs` // also use endpoint for filter such as /jobs/dest_id="some_id" or /jobs/source_id="some_id" - **Method**: GET - **Description**: Retrieve all jobs - **Headers**: `Authorization: Bearer ` @@ -495,23 +530,24 @@ http://localhost:8000 "name": "string", "type": "string", "config": "json", - "version": "string" + "version": "string", }, "destination": { "name": "string", "type": "string", "config": "json", - "version": "string" + "version": "string", }, - "streams_config": "json", + "streams_config":"json", "frequency": "string", "last_run_time": "timestamp", "last_run_state": "string", "created_at": "timestamp", "updated_at": "timestamp", - "created_by": "string", // username - "updated_by": "string" // username - // can also send state but if it is required + "activate": "boolean", + "created_by": "string", // username + "updated_by": "string", // username + // can also send state but if it is required } ] } @@ -532,17 +568,17 @@ http://localhost:8000 "name": "string", "type": "string", "config": "json", - "version": "string" + "version": "string", }, "destination": { "name": "string", "type": "string", "config": "json", - "version": "string" + "version": "string", }, "frequency": "string", "streams_config": "json", - "activate": "boolean" // send this to activate or deactivate job + "activate": "boolean", // send this to activate or deactivate job } ``` @@ -557,21 +593,23 @@ http://localhost:8000 "name": "string", "type": "string", "config": "json", - "version": "string" + "version": "string", }, "destination": { "name": "string", "type": "string", "config": "json", - "version": "string" + "version": "string", }, "frequency": "string", "streams_config": "json", - "activate": "boolean" + "activate": "boolean", } } ``` + + ### Delete Job - **Endpoint**: `/api/v1/project/:projectid/jobs/:id` @@ -582,11 +620,11 @@ http://localhost:8000 ```json { - "success": "boolean", - "message": "string", - "data": { - "name": "boolean" - } + "success": "boolean", + "message": "string", + "data": { + "name": "boolean" + } } ``` @@ -677,15 +715,14 @@ http://localhost:8000 "message": "string", "data": [ { - "id": "string", + "id":"string", "start_time": "timestamp", "runtime": "integer", "status": "string" - } + }, ] } ``` - ### cancel Job workflow - **Endpoint**: `/api/v1/project/:projectid/jobs/:jobid/cancel` @@ -705,21 +742,6 @@ http://localhost:8000 } ``` - ### Job Sync - -- **Endpoint**: `/api/v1/project/:projectid/jobs/:id/sync` -- **Method**: POST -- **Description**: Sync the job -- **Headers**: `Authorization: Bearer ` -- **Response**: - - ```json - { - "success": "boolean", - "message": "string", - "data": null - } - ``` ###Activate/Inactivate Job @@ -747,6 +769,7 @@ http://localhost:8000 } ``` + - **Endpoint**: `/api/v1/project/:projectid/jobs/:jobid/task/:id/logs` - **Method**: GET - **Description**: Give the Logs of that particular Job @@ -759,7 +782,7 @@ http://localhost:8000 "success": "boolean", "message": "string", "data": { - "task_logs": "json" + "task_logs":"json" } } ``` diff --git a/docker-compose.yml b/docker-compose.yml index a184df8b..4adf42fd 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -90,7 +90,7 @@ services: restart: "no" temporal-worker: - image: ${CONTAINER_REGISTRY_BASE:-registry-1.docker.io}/olakego/ui-worker:latest + image: ${CONTAINER_REGISTRY_BASE:-registry-1.docker.io}/olakego/ui-worker:stag-latest pull_policy: always container_name: olake-temporal-worker networks: @@ -100,6 +100,7 @@ services: - <<: *workerConfigVolumeDetails environment: <<: *sharedEnvs + OLAKE_CALLBACK_URL: "http://olake-ui:8000/internal/worker/callback" depends_on: temporal: condition: service_started # Or service_healthy if temporal has a healthcheck diff --git a/release.sh b/release.sh index b82ea85a..39cd6400 100755 --- a/release.sh +++ b/release.sh @@ -92,59 +92,6 @@ function release_frontend() { echo "$(chalk green "Frontend release successful for $image_name version $tag_version")" } -function release_worker() { - local version=$1 - local platform=$2 - local environment=$3 # Could be 'dev', 'staging', 'master', etc. - local image_name="$DOCKER_REPO_WORKER" # Use a specific repo name for the worker, e.g., yourdockerhubuser/olake-worker - - # Set tag based on environment - local tag_version="" - local latest_tag="" - - case "$environment" in - "master") - tag_version="${version}" - latest_tag="latest" - ;; - "staging") - tag_version="stag-${version}" - latest_tag="stag-latest" - ;; - "dev"|*) # Default to dev prefix if not master or staging - tag_version="dev-${version}" - latest_tag="dev-latest" - ;; - esac - - # # It's good practice to ensure DOCKER_REPO_WORKER is set - # if [ -z "$DOCKER_REPO_WORKER" ]; then - # echo "$(chalk red "Error: DOCKER_REPO_WORKER environment variable is not set.")" - # return 1 # Or use fail "DOCKER_REPO_WORKER not set" if 'fail' is a global helper - # fi - - echo "Logging into Docker (if not already logged in by a previous function call)..." - # Assuming DOCKER_LOGIN and DOCKER_PASSWORD are set globally or passed - # If login is handled globally at the start of the script, this might be redundant - # but doesn't hurt to ensure. - docker login -u="$DOCKER_LOGIN" -p="$DOCKER_PASSWORD" || fail "Docker login failed for $DOCKER_LOGIN" - - echo "**** Releasing worker image $image_name for platforms [$platform] with version [$tag_version] ****" - - echo "Building and pushing worker Docker image..." - - # Assuming worker.Dockerfile is in the project root (context '.') - # If worker.Dockerfile or its context (e.g., server files) are elsewhere, adjust paths. - docker buildx build --platform "$platform" --push \ - -t "${image_name}:${tag_version}" \ - -t "${image_name}:${latest_tag}" \ - --build-arg ENVIRONMENT="$environment" \ - --build-arg APP_VERSION="$version" \ - -f worker.Dockerfile . || fail "Worker build failed. Exiting..." - - echo "$(chalk green "Worker release successful for $image_name version $tag_version")" -} - SEMVER_EXPRESSION='v([0-9]+\.[0-9]+\.[0-9]+)$' STAGING_VERSION_EXPRESSION='v([0-9]+\.[0-9]+\.[0-9]+)-[a-zA-Z0-9_.-]+' @@ -193,6 +140,5 @@ chalk green "=== Release version: $VERSION ===" # Call the frontend-only release function release_frontend "$VERSION" "$platform" "$ENVIRONMENT" -release_worker "$VERSION" "$platform" "$ENVIRONMENT" echo "$(chalk green "✅ Frontend release process completed successfully")" \ No newline at end of file diff --git a/server/README.md b/server/README.md index d781c470..1e67f05e 100644 --- a/server/README.md +++ b/server/README.md @@ -13,7 +13,7 @@ Olake Server is a RESTful API service built with the Beego framework that manage ### 1. Clone the Repository ```bash - git clone https://github.com/datazip-inc/olake-frontend.git + git clone https://github.com/datazip-inc/olake-ui.git ``` ### 2. Configure Application Settings (Auth only works when session enabled) diff --git a/server/cmd/temporal-worker/main.go b/server/cmd/temporal-worker/main.go deleted file mode 100644 index f5b8c253..00000000 --- a/server/cmd/temporal-worker/main.go +++ /dev/null @@ -1,68 +0,0 @@ -package main - -import ( - "os" - "os/signal" - "syscall" - - "github.com/beego/beego/v2/core/config" - "github.com/beego/beego/v2/core/logs" - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/database" - "github.com/datazip/olake-frontend/server/internal/docker" - "github.com/datazip/olake-frontend/server/internal/logger" - "github.com/datazip/olake-frontend/server/internal/telemetry" - "github.com/datazip/olake-frontend/server/internal/temporal" - "github.com/datazip/olake-frontend/server/utils" -) - -func main() { - // Initialize telemetry - telemetry.InitTelemetry() - // check constants - constants.Init() - - // init logger - logsdir, _ := config.String("logsdir") - logger.InitLogger(logsdir) - - // init log cleaner - utils.InitLogCleaner(docker.GetDefaultConfigDir(), utils.GetLogRetentionPeriod()) - - // init database - err := database.Init() - if err != nil { - logs.Critical("Failed to initialize database: %s", err) - os.Exit(1) - } - - logs.Info("Starting Olake Temporal worker...") - - // Create a new worker - worker, err := temporal.NewWorker() - if err != nil { - logs.Critical("Failed to create worker: %v", err) - os.Exit(1) - } - - // Start the worker in a goroutine - go func() { - err := worker.Start() - if err != nil { - logs.Critical("Failed to start worker: %v", err) - os.Exit(1) - } - }() - - // Setup signal handling for graceful shutdown - signalChan := make(chan os.Signal, 1) - signal.Notify(signalChan, os.Interrupt, syscall.SIGTERM) - - // Wait for termination signal - sig := <-signalChan - logs.Info("Received signal %v, shutting down worker...", sig) - - // Stop the worker - worker.Stop() - logs.Info("Worker stopped. Goodbye!") -} diff --git a/server/conf/app.conf b/server/conf/app.conf index 812c93ab..e8601038 100644 --- a/server/conf/app.conf +++ b/server/conf/app.conf @@ -3,6 +3,7 @@ httpport = ${HTTP_PORT||8000} runmode = ${RUN_MODE||dev} copyrequestbody = ${COPY_REQUEST_BODY||true} postgresdb = ${POSTGRES_DB} +encryptionkey = ${OLAKE_SECRET_KEY} OLAKE_POSTGRES_USER = ${OLAKE_POSTGRES_USER||temporal} OLAKE_POSTGRES_PASSWORD = ${OLAKE_POSTGRES_PASSWORD||temporal} OLAKE_POSTGRES_HOST = ${OLAKE_POSTGRES_HOST||postgresql} diff --git a/server/go.mod b/server/go.mod index 57c82d8c..2be44495 100644 --- a/server/go.mod +++ b/server/go.mod @@ -1,4 +1,4 @@ -module github.com/datazip/olake-frontend/server +module github.com/datazip-inc/olake-ui/server go 1.24.2 @@ -6,17 +6,34 @@ require github.com/beego/beego/v2 v2.3.8 require ( github.com/apache/spark-connect-go/v35 v35.0.0-20250317154112-ffd832059443 - github.com/aws/aws-sdk-go-v2/config v1.31.0 - github.com/aws/aws-sdk-go-v2/service/ecr v1.49.0 + github.com/aws/aws-sdk-go-v2/config v1.29.17 + github.com/aws/aws-sdk-go-v2/service/ecr v1.50.5 github.com/aws/aws-sdk-go-v2/service/kms v1.41.1 github.com/docker/docker v28.3.3+incompatible + github.com/go-playground/validator/v10 v10.27.0 github.com/lib/pq v1.10.9 github.com/oklog/ulid v1.3.1 + github.com/rs/zerolog v1.34.0 github.com/spf13/viper v1.20.1 github.com/testcontainers/testcontainers-go v0.39.0 go.temporal.io/sdk v1.34.0 golang.org/x/crypto v0.41.0 - golang.org/x/mod v0.27.0 + golang.org/x/mod v0.26.0 +) + +require ( + github.com/aws/aws-sdk-go-v2 v1.39.2 + github.com/aws/aws-sdk-go-v2/credentials v1.17.70 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.32 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.4 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.17 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.25.5 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.3 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.34.0 // indirect + github.com/aws/smithy-go v1.23.0 // indirect ) require ( @@ -36,16 +53,22 @@ require ( github.com/docker/go-units v0.5.0 // indirect github.com/ebitengine/purego v0.8.4 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/gabriel-vasile/mimetype v1.4.8 // indirect github.com/go-errors/errors v1.5.1 // indirect github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-ole/go-ole v1.2.6 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect github.com/goccy/go-json v0.10.5 // indirect github.com/google/flatbuffers v25.2.10+incompatible // indirect github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/cpuid/v2 v2.2.10 // indirect + github.com/leodido/go-urn v1.4.0 // indirect github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect github.com/magiconair/properties v1.8.10 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect github.com/moby/docker-image-spec v1.3.1 // indirect github.com/moby/go-archive v0.1.0 // indirect github.com/moby/patternmatcher v0.6.0 // indirect @@ -66,7 +89,7 @@ require ( github.com/yusufpapurcu/wmi v1.2.4 // indirect github.com/zeebo/xxh3 v1.0.2 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect go.opentelemetry.io/otel v1.38.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 // indirect go.opentelemetry.io/otel/metric v1.38.0 // indirect @@ -79,21 +102,6 @@ require ( golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect ) -require ( - github.com/aws/aws-sdk-go-v2 v1.38.0 - github.com/aws/aws-sdk-go-v2/credentials v1.18.4 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.3 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.3 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.3 // indirect - github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.3 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.28.0 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.33.0 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.37.0 // indirect - github.com/aws/smithy-go v1.22.5 // indirect -) - require ( github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect @@ -116,8 +124,8 @@ require ( github.com/prometheus/client_model v0.5.0 // indirect github.com/prometheus/common v0.48.0 // indirect github.com/prometheus/procfs v0.12.0 // indirect - github.com/robfig/cron v1.2.0 - github.com/sagikazarmark/locafero v0.8.0 // indirect + github.com/robfig/cron v1.2.0 // indirect + github.com/sagikazarmark/locafero v0.7.0 // indirect github.com/shiena/ansicolor v0.0.0-20200904210342-c7312218db18 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/spf13/afero v1.14.0 // indirect @@ -128,7 +136,8 @@ require ( github.com/subosito/gotenv v1.6.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect go.temporal.io/api v1.46.0 - go.uber.org/multierr v1.11.0 // indirect + go.uber.org/atomic v1.9.0 // indirect + go.uber.org/multierr v1.9.0 // indirect golang.org/x/net v0.43.0 // indirect golang.org/x/sync v0.16.0 // indirect golang.org/x/sys v0.36.0 // indirect diff --git a/server/go.sum b/server/go.sum index 2d36cf4f..4110f958 100644 --- a/server/go.sum +++ b/server/go.sum @@ -20,36 +20,36 @@ github.com/apache/spark-connect-go/v35 v35.0.0-20250317154112-ffd832059443 h1:pA github.com/apache/spark-connect-go/v35 v35.0.0-20250317154112-ffd832059443/go.mod h1:ODlxb8YN0y/JyS7h+vhz+afnQ+beSkYTqDHYtg2T6E8= github.com/apache/thrift v0.21.0 h1:tdPmh/ptjE1IJnhbhrcl2++TauVjy242rkV/UzJChnE= github.com/apache/thrift v0.21.0/go.mod h1:W1H8aR/QRtYNvrPeFXBtobyRkd0/YVhTc6i07XIAgDw= -github.com/aws/aws-sdk-go-v2 v1.38.0 h1:UCRQ5mlqcFk9HJDIqENSLR3wiG1VTWlyUfLDEvY7RxU= -github.com/aws/aws-sdk-go-v2 v1.38.0/go.mod h1:9Q0OoGQoboYIAJyslFyF1f5K1Ryddop8gqMhWx/n4Wg= -github.com/aws/aws-sdk-go-v2/config v1.31.0 h1:9yH0xiY5fUnVNLRWO0AtayqwU1ndriZdN78LlhruJR4= -github.com/aws/aws-sdk-go-v2/config v1.31.0/go.mod h1:VeV3K72nXnhbe4EuxxhzsDc/ByrCSlZwUnWH52Nde/I= -github.com/aws/aws-sdk-go-v2/credentials v1.18.4 h1:IPd0Algf1b+Qy9BcDp0sCUcIWdCQPSzDoMK3a8pcbUM= -github.com/aws/aws-sdk-go-v2/credentials v1.18.4/go.mod h1:nwg78FjH2qvsRM1EVZlX9WuGUJOL5od+0qvm0adEzHk= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.3 h1:GicIdnekoJsjq9wqnvyi2elW6CGMSYKhdozE7/Svh78= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.3/go.mod h1:R7BIi6WNC5mc1kfRM7XM/VHC3uRWkjc396sfabq4iOo= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.3 h1:o9RnO+YZ4X+kt5Z7Nvcishlz0nksIt2PIzDglLMP0vA= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.3/go.mod h1:+6aLJzOG1fvMOyzIySYjOFjcguGvVRL68R+uoRencN4= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.3 h1:joyyUFhiTQQmVK6ImzNU9TQSNRNeD9kOklqTzyk5v6s= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.3/go.mod h1:+vNIyZQP3b3B1tSLI0lxvrU9cfM7gpdRXMFfm67ZcPc= +github.com/aws/aws-sdk-go-v2 v1.39.2 h1:EJLg8IdbzgeD7xgvZ+I8M1e0fL0ptn/M47lianzth0I= +github.com/aws/aws-sdk-go-v2 v1.39.2/go.mod h1:sDioUELIUO9Znk23YVmIk86/9DOpkbyyVb1i/gUNFXY= +github.com/aws/aws-sdk-go-v2/config v1.29.17 h1:jSuiQ5jEe4SAMH6lLRMY9OVC+TqJLP5655pBGjmnjr0= +github.com/aws/aws-sdk-go-v2/config v1.29.17/go.mod h1:9P4wwACpbeXs9Pm9w1QTh6BwWwJjwYvJ1iCt5QbCXh8= +github.com/aws/aws-sdk-go-v2/credentials v1.17.70 h1:ONnH5CM16RTXRkS8Z1qg7/s2eDOhHhaXVd72mmyv4/0= +github.com/aws/aws-sdk-go-v2/credentials v1.17.70/go.mod h1:M+lWhhmomVGgtuPOhO85u4pEa3SmssPTdcYpP/5J/xc= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.32 h1:KAXP9JSHO1vKGCr5f4O6WmlVKLFFXgWYAGoJosorxzU= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.32/go.mod h1:h4Sg6FQdexC1yYG9RDnOvLbW1a/P986++/Y/a+GyEM8= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9 h1:se2vOWGD3dWQUtfn4wEjRQJb1HK1XsNIt825gskZ970= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9/go.mod h1:hijCGH2VfbZQxqCDN7bwz/4dzxV+hkyhjawAtdPWKZA= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9 h1:6RBnKZLkJM4hQ+kN6E7yWFveOTg8NLPHAkqrs4ZPlTU= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9/go.mod h1:V9rQKRmK7AWuEsOMnHzKj8WyrIir1yUJbZxDuZLFvXI= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo= -github.com/aws/aws-sdk-go-v2/service/ecr v1.49.0 h1:NgkSYzgM3UhdSrXUKkl49FhbIPpNguZE4EXEGRhDcEU= -github.com/aws/aws-sdk-go-v2/service/ecr v1.49.0/go.mod h1:bi1dAg6vk8KC8nyf6DjQ3dkNJbzTirMSmZHbcRNa2vE= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0 h1:6+lZi2JeGKtCraAj1rpoZfKqnQ9SptseRZioejfUOLM= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0/go.mod h1:eb3gfbVIxIoGgJsi9pGne19dhCBpK6opTYpQqAmdy44= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.3 h1:ieRzyHXypu5ByllM7Sp4hC5f/1Fy5wqxqY0yB85hC7s= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.3/go.mod h1:O5ROz8jHiOAKAwx179v+7sHMhfobFVi6nZt8DEyiYoM= +github.com/aws/aws-sdk-go-v2/service/ecr v1.50.5 h1:jzjNyiIrXJHumV1hwofcQLpIZtcDw+vPQL00rLI3s4g= +github.com/aws/aws-sdk-go-v2/service/ecr v1.50.5/go.mod h1:UtPKcYVHY6RrV9EaaM1KZGNaf9dgviFdsT6xoFMLQsM= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.4 h1:CXV68E2dNqhuynZJPB80bhPQwAKqBWVer887figW6Jc= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.4/go.mod h1:/xFi9KtvBXP97ppCz1TAEvU1Uf66qvid89rbem3wCzQ= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.17 h1:t0E6FzREdtCsiLIoLCWsYliNsRBgyGD/MCK571qk4MI= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.17/go.mod h1:ygpklyoaypuyDvOM5ujWGrYWpAK3h7ugnmKCU/76Ys4= github.com/aws/aws-sdk-go-v2/service/kms v1.41.1 h1:dkaX98cOXw4EgqpDXPqrVVLjsPR9T24wA2TcjrQiank= github.com/aws/aws-sdk-go-v2/service/kms v1.41.1/go.mod h1:Pqd9k4TuespkireN206cK2QBsaBTL6X+VPAez5Qcijk= -github.com/aws/aws-sdk-go-v2/service/sso v1.28.0 h1:Mc/MKBf2m4VynyJkABoVEN+QzkfLqGj0aiJuEe7cMeM= -github.com/aws/aws-sdk-go-v2/service/sso v1.28.0/go.mod h1:iS5OmxEcN4QIPXARGhavH7S8kETNL11kym6jhoS7IUQ= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.33.0 h1:6csaS/aJmqZQbKhi1EyEMM7yBW653Wy/B9hnBofW+sw= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.33.0/go.mod h1:59qHWaY5B+Rs7HGTuVGaC32m0rdpQ68N8QCN3khYiqs= -github.com/aws/aws-sdk-go-v2/service/sts v1.37.0 h1:MG9VFW43M4A8BYeAfaJJZWrroinxeTi2r3+SnmLQfSA= -github.com/aws/aws-sdk-go-v2/service/sts v1.37.0/go.mod h1:JdeBDPgpJfuS6rU/hNglmOigKhyEZtBmbraLE4GK1J8= -github.com/aws/smithy-go v1.22.5 h1:P9ATCXPMb2mPjYBgueqJNCA5S9UfktsW0tTxi+a7eqw= -github.com/aws/smithy-go v1.22.5/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.5 h1:AIRJ3lfb2w/1/8wOOSqYb9fUKGwQbtysJ2H1MofRUPg= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.5/go.mod h1:b7SiVprpU+iGazDUqvRSLf5XmCdn+JtT1on7uNL6Ipc= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.3 h1:BpOxT3yhLwSJ77qIY3DoHAQjZsc4HEGfMCE4NGy3uFg= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.3/go.mod h1:vq/GQR1gOFLquZMSrxUK/cpvKCNVYibNyJ1m7JrU88E= +github.com/aws/aws-sdk-go-v2/service/sts v1.34.0 h1:NFOJ/NXEGV4Rq//71Hs1jC/NvPs1ezajK+yQmkwnPV0= +github.com/aws/aws-sdk-go-v2/service/sts v1.34.0/go.mod h1:7ph2tGpfQvwzgistp2+zga9f+bCjlQJPkPUmMgDSD7w= +github.com/aws/smithy-go v1.23.0 h1:8n6I3gXzWJB2DxBDnfxgBaSX6oe0d/t10qGz7OKqMCE= +github.com/aws/smithy-go v1.23.0/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= github.com/beego/beego/v2 v2.3.8 h1:wplhB1pF4TxR+2SS4PUej8eDoH4xGfxuHfS7wAk9VBc= github.com/beego/beego/v2 v2.3.8/go.mod h1:8vl9+RrXqvodrl9C8yivX1e6le6deCK6RWeq8R7gTTg= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= @@ -70,6 +70,7 @@ github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= +github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= @@ -102,6 +103,8 @@ github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHk github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= +github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8= github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= @@ -113,6 +116,14 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4= +github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= @@ -120,6 +131,7 @@ github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+d github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= @@ -165,12 +177,20 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/mattn/go-sqlite3 v1.14.27 h1:drZCnuvf37yPfs95E5jd9s3XhdVWLal+6BOK6qrv6IU= github.com/mattn/go-sqlite3 v1.14.27/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= @@ -232,8 +252,11 @@ github.com/robfig/cron v1.2.0 h1:ZjScXvvxeQ63Dbyxy76Fj3AT3Ut0aKsyd2/tl3DTMuQ= github.com/robfig/cron v1.2.0/go.mod h1:JGuDeoQd7Z6yL4zQhZ3OPEVHB7fL6Ka6skscFHfmt2k= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= -github.com/sagikazarmark/locafero v0.8.0 h1:mXaMVw7IqxNBxfv3LdWt9MDmcWDQ1fagDH918lOdVaQ= -github.com/sagikazarmark/locafero v0.8.0/go.mod h1:UBUyz37V+EdMS3hDF3QWIiVr/2dPrx49OMO0Bn0hJqk= +github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= +github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY= +github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ= +github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo= +github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k= github.com/shiena/ansicolor v0.0.0-20200904210342-c7312218db18 h1:DAYUYH5869yV94zvCES9F51oYtN5oGlwjxJJz7ZCnik= github.com/shiena/ansicolor v0.0.0-20200904210342-c7312218db18/go.mod h1:nkxAfR/5quYxwPZhyDxgasBMnRtBZd0FCEpawpjMUFg= github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs= @@ -282,8 +305,8 @@ github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 h1:GqRJVj7UmLjCVyVJ3ZFLdPRmhDUp2zFmQe3RHIOsw24= @@ -305,10 +328,12 @@ go.temporal.io/api v1.46.0/go.mod h1:iaxoP/9OXMJcQkETTECfwYq4cw/bj4nwov8b3ZLVnXM go.temporal.io/sdk v1.34.0 h1:VLg/h6ny7GvLFVoQPqz2NcC93V9yXboQwblkRvZ1cZE= go.temporal.io/sdk v1.34.0/go.mod h1:iE4U5vFrH3asOhqpBBphpj9zNtw8btp8+MSaf5A0D3w= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= +go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= -go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= +go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -325,8 +350,8 @@ golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ= -golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc= +golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= +golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -362,8 +387,11 @@ golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k= golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= diff --git a/server/internal/constants/constants.go b/server/internal/constants/constants.go index b8ef8738..99f557fe 100644 --- a/server/internal/constants/constants.go +++ b/server/internal/constants/constants.go @@ -23,8 +23,44 @@ var ( DefaultConfigDir = "/tmp/olake-config" DefaultLogRetentionPeriod = 30 DefaultSpecVersion = "v0.2.0" + // logging + EnvLogLevel = "LOG_LEVEL" + EnvLogFormat = "LOG_FORMAT" + OrderByUpdatedAtDesc = "-updated_at" + // Frontend index path key + FrontendIndexPath = "FRONTEND_INDEX_PATH" + TemporalTaskQueue = "OLAKE_DOCKER_TASK_QUEUE" + + // conf keys + ConfEncryptionKey = "encryptionkey" + ConfTemporalAddress = "TEMPORAL_ADDRESS" + ConfDeploymentMode = "DEPLOYMENT_MODE" + ConfRunMode = "runmode" + ConfContainerRegistryBase = "CONTAINER_REGISTRY_BASE" + // database keys + ConfPostgresDB = "postgresdb" + ConfOLakePostgresUser = "OLAKE_POSTGRES_USER" + ConfOLakePostgresPassword = "OLAKE_POSTGRES_PASSWORD" + ConfOLakePostgresHost = "OLAKE_POSTGRES_HOST" + ConfOLakePostgresPort = "OLAKE_POSTGRES_PORT" + ConfOLakePostgresDBname = "OLAKE_POSTGRES_DBNAME" + ConfOLakePostgresSslmode = "OLAKE_POSTGRES_SSLMODE" ) +// Supported database/source types +var SupportedSourceTypes = []string{ + "mysql", + "postgres", + "oracle", + "mongodb", +} + +// Supported database/source types +var SupportedDestinationTypes = []string{ + "parquet", + "iceberg", +} + var RequiredConfigVariable = []string{ "OLAKE_POSTGRES_USER", "OLAKE_POSTGRES_PASSWORD", @@ -37,13 +73,15 @@ var RequiredConfigVariable = []string{ func Init() { viper.AutomaticEnv() - + viper.SetDefault(EnvLogFormat, "console") + viper.SetDefault(EnvLogLevel, "info") viper.SetDefault("PORT", defaultPort) viper.SetDefault("BUILD", version) viper.SetDefault("COMMITSHA", commitsha) viper.SetDefault("RELEASE_CHANNEL", releasechannel) viper.SetDefault("BASE_HOST", defaultBaseHost) viper.SetDefault("BASE_URL", fmt.Sprintf("%s:%v", viper.GetString("BASE_HOST"), viper.GetString("PORT"))) + viper.SetDefault(FrontendIndexPath, "/opt/frontend/dist/index.html") checkForRequiredVariables(RequiredConfigVariable) diff --git a/server/internal/constants/messages.go b/server/internal/constants/messages.go new file mode 100644 index 00000000..ec9245fc --- /dev/null +++ b/server/internal/constants/messages.go @@ -0,0 +1,20 @@ +package constants + +import "errors" + +// Common error messages +var ( + // User related errors + ErrUserNotFound = errors.New("user not found") + ErrInvalidCredentials = errors.New("invalid credentials") + ErrUserAlreadyExists = errors.New("user already exists") + ErrPasswordProcessing = errors.New("failed to process password") + + // Source related errors + ErrSourceNotFound = errors.New("source not found") +) + +// Validation messages +const ( + ValidationInvalidRequestFormat = "Invalid request format" +) diff --git a/server/internal/database/postgres.go b/server/internal/database/database.go similarity index 64% rename from server/internal/database/postgres.go rename to server/internal/database/database.go index cda24135..c6f1c939 100644 --- a/server/internal/database/postgres.go +++ b/server/internal/database/database.go @@ -6,30 +6,35 @@ import ( "net/url" "github.com/beego/beego/v2/client/orm" - "github.com/beego/beego/v2/core/logs" "github.com/beego/beego/v2/server/web" _ "github.com/beego/beego/v2/server/web/session/postgres" // required for session _ "github.com/lib/pq" // required for registering driver - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/utils/logger" ) -func Init() error { +type Database struct { + ormer orm.Ormer +} + +func Init() (*Database, error) { // register driver uri, err := BuildPostgresURIFromConfig() if err != nil { - return fmt.Errorf("failed to build postgres uri: %s", err) + return nil, fmt.Errorf("failed to build postgres uri: %s", err) } + err = orm.RegisterDriver("postgres", orm.DRPostgres) if err != nil { - return fmt.Errorf("failed to register postgres driver: %s", err) + return nil, fmt.Errorf("failed to register postgres driver: %s", err) } // register database err = orm.RegisterDataBase("default", "postgres", uri) if err != nil { - return fmt.Errorf("failed to register postgres database: %s", err) + return nil, fmt.Errorf("failed to register postgres database: %s", err) } // enable session by default @@ -54,8 +59,9 @@ func Init() error { // Create tables if they do not exist err = orm.RunSyncdb("default", false, true) if err != nil { - return fmt.Errorf("failed to sync database schema: %s", err) + return nil, fmt.Errorf("failed to sync database schema: %s", err) } + // Add session table if sessions are enabled if web.BConfig.WebConfig.Session.SessionOn { _, err = orm.NewOrm().Raw(`CREATE TABLE IF NOT EXISTS session ( @@ -65,28 +71,28 @@ func Init() error { );`).Exec() if err != nil { - return fmt.Errorf("failed to create session table: %s", err) + return nil, fmt.Errorf("failed to create session table: %s", err) } } - return nil + return &Database{ormer: orm.NewOrm()}, nil } // BuildPostgresURIFromConfig reads POSTGRES_DB_HOST, POSTGRES_DB_PORT, etc. from app.conf // and constructs the Postgres connection URI. func BuildPostgresURIFromConfig() (string, error) { - logs.Info("Building Postgres URI from config") + logger.Info("Building Postgres URI from config") // First, check if postgresdb is set directly - if dsn, err := web.AppConfig.String("postgresdb"); err == nil && dsn != "" { + if dsn, err := web.AppConfig.String(constants.ConfPostgresDB); err == nil && dsn != "" { return dsn, nil } - user, _ := web.AppConfig.String("OLAKE_POSTGRES_USER") - password, _ := web.AppConfig.String("OLAKE_POSTGRES_PASSWORD") - host, _ := web.AppConfig.String("OLAKE_POSTGRES_HOST") - port, _ := web.AppConfig.String("OLAKE_POSTGRES_PORT") - dbName, _ := web.AppConfig.String("OLAKE_POSTGRES_DBNAME") - sslMode, _ := web.AppConfig.String("OLAKE_POSTGRES_SSLMODE") + user, _ := web.AppConfig.String(constants.ConfOLakePostgresUser) + password, _ := web.AppConfig.String(constants.ConfOLakePostgresPassword) + host, _ := web.AppConfig.String(constants.ConfOLakePostgresHost) + port, _ := web.AppConfig.String(constants.ConfOLakePostgresPort) + dbName, _ := web.AppConfig.String(constants.ConfOLakePostgresDBname) + sslMode, _ := web.AppConfig.String(constants.ConfOLakePostgresSslmode) u := &url.URL{ Scheme: "postgres", diff --git a/server/internal/database/destination.go b/server/internal/database/destination.go index fc6a509a..0316ce87 100644 --- a/server/internal/database/destination.go +++ b/server/internal/database/destination.go @@ -2,132 +2,96 @@ package database import ( "fmt" - "time" - "github.com/beego/beego/v2/client/orm" - - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/utils" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/utils" ) -// DestinationORM handles database operations for destinations -type DestinationORM struct { - ormer orm.Ormer - TableName string -} - -func NewDestinationORM() *DestinationORM { - return &DestinationORM{ - ormer: orm.NewOrm(), - TableName: constants.TableNameMap[constants.DestinationTable], - } -} - // decryptDestinationSliceConfigs decrypts config fields for a slice of destinations -func (r *DestinationORM) decryptDestinationSliceConfigs(destinations []*models.Destination) error { +func (db *Database) decryptDestinationSliceConfigs(destinations []*models.Destination) error { for _, dest := range destinations { dConfig, err := utils.Decrypt(dest.Config) if err != nil { - return fmt.Errorf("failed to decrypt destination config: %s", err) + return fmt.Errorf("failed to decrypt destination config id[%d]: %s", dest.ID, err) } dest.Config = dConfig } return nil } -func (r *DestinationORM) Create(destination *models.Destination) error { +func (db *Database) CreateDestination(destination *models.Destination) error { // Encrypt config before saving eConfig, err := utils.Encrypt(destination.Config) if err != nil { - return fmt.Errorf("failed to encrypt destination config: %s", err) + return fmt.Errorf("failed to encrypt destination config id[%d]: %s", destination.ID, err) } destination.Config = eConfig - _, err = r.ormer.Insert(destination) + _, err = db.ormer.Insert(destination) return err } -func (r *DestinationORM) GetAll() ([]*models.Destination, error) { +func (db *Database) ListDestinations() ([]*models.Destination, error) { var destinations []*models.Destination - _, err := r.ormer.QueryTable(r.TableName).RelatedSel().All(&destinations) + _, err := db.ormer.QueryTable(constants.TableNameMap[constants.DestinationTable]).RelatedSel().OrderBy(constants.OrderByUpdatedAtDesc).All(&destinations) if err != nil { - return nil, fmt.Errorf("failed to get all destinations: %s", err) + return nil, fmt.Errorf("failed to list destinations: %s", err) } // Decrypt config after reading - if err := r.decryptDestinationSliceConfigs(destinations); err != nil { - return nil, fmt.Errorf("failed to decrypt destination config: %s", err) + if err := db.decryptDestinationSliceConfigs(destinations); err != nil { + return nil, err } return destinations, nil } -func (r *DestinationORM) GetAllByProjectID(projectID string) ([]*models.Destination, error) { +func (db *Database) ListDestinationsByProjectID(projectID string) ([]*models.Destination, error) { var destinations []*models.Destination - _, err := r.ormer.QueryTable(r.TableName).Filter("project_id", projectID).RelatedSel().All(&destinations) + _, err := db.ormer.QueryTable(constants.TableNameMap[constants.DestinationTable]).Filter("project_id", projectID).RelatedSel().OrderBy(constants.OrderByUpdatedAtDesc).All(&destinations) if err != nil { - return nil, fmt.Errorf("failed to get all destinations by project_id[%s]: %s", projectID, err) + return nil, fmt.Errorf("failed to list destinations project_id[%s]: %s", projectID, err) } // Decrypt config after reading - if err := r.decryptDestinationSliceConfigs(destinations); err != nil { - return nil, fmt.Errorf("failed to decrypt destination config: %s", err) + if err := db.decryptDestinationSliceConfigs(destinations); err != nil { + return nil, err } return destinations, nil } -func (r *DestinationORM) GetByID(id int) (*models.Destination, error) { +func (db *Database) GetDestinationByID(id int) (*models.Destination, error) { destination := &models.Destination{ID: id} - err := r.ormer.Read(destination) + err := db.ormer.Read(destination) if err != nil { - return nil, fmt.Errorf("failed to get destination by ID: %s", err) + return nil, fmt.Errorf("failed to get destination id[%d]: %s", id, err) } // Decrypt config after reading dConfig, err := utils.Decrypt(destination.Config) if err != nil { - return nil, fmt.Errorf("failed to decrypt config for destination[%d]: %s", destination.ID, err) + return nil, fmt.Errorf("failed to decrypt destination config id[%d]: %s", destination.ID, err) } destination.Config = dConfig return destination, nil } -func (r *DestinationORM) Update(destination *models.Destination) error { - destination.UpdatedAt = time.Now() - +func (db *Database) UpdateDestination(destination *models.Destination) error { // Encrypt config before saving eConfig, err := utils.Encrypt(destination.Config) if err != nil { - return fmt.Errorf("failed to encrypt destination config: %s", err) + return fmt.Errorf("failed to encrypt destination[%d] config: %s", destination.ID, err) } destination.Config = eConfig - _, err = r.ormer.Update(destination) + _, err = db.ormer.Update(destination) return err } -func (r *DestinationORM) Delete(id int) error { +func (db *Database) DeleteDestination(id int) error { destination := &models.Destination{ID: id} - _, err := r.ormer.Delete(destination) + // Use ORM's Delete method which will automatically handle the soft delete + // by setting the DeletedAt field due to the ORM tags in BaseModel + _, err := db.ormer.Delete(destination) return err } - -// GetByNameAndType retrieves destinations by name, destType, and project ID -func (r *DestinationORM) GetByNameAndType(name, destType, projectID string) ([]*models.Destination, error) { - var destinations []*models.Destination - _, err := r.ormer.QueryTable(r.TableName). - Filter("name", name). - Filter("dest_type", destType). - Filter("project_id", projectID). - All(&destinations) - if err != nil { - return nil, fmt.Errorf("failed to get destination in project[%s] by name[%s] and type[%s]: %s", projectID, name, destType, err) - } - - // Decrypt config after reading - if err := r.decryptDestinationSliceConfigs(destinations); err != nil { - return nil, fmt.Errorf("failed to decrypt destination config: %s", err) - } - - return destinations, nil -} diff --git a/server/internal/database/job.go b/server/internal/database/job.go index 69132434..4f7559ed 100644 --- a/server/internal/database/job.go +++ b/server/internal/database/job.go @@ -2,37 +2,22 @@ package database import ( "fmt" - "time" "github.com/beego/beego/v2/client/orm" - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/utils" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/utils" ) -// JobORM handles database operations for jobs -type JobORM struct { - ormer orm.Ormer - TableName string -} - -// NewJobORM creates a new instance of JobORM -func NewJobORM() *JobORM { - return &JobORM{ - ormer: orm.NewOrm(), - TableName: constants.TableNameMap[constants.JobTable], - } -} - // decryptJobConfig decrypts Config fields in related Source and Destination -func (r *JobORM) decryptJobConfig(job *models.Job) error { +func (db *Database) decryptJobConfig(job *models.Job) error { // Decrypt Source Config if loaded // TODO: verify why source_id and dest_id coming nil, it must not nil if job.SourceID != nil { decryptedConfig, err := utils.Decrypt(job.SourceID.Config) if err != nil { - return fmt.Errorf("failed to decrypt source config: %s", err) + return fmt.Errorf("failed to decrypt source config job_id[%d] source_id[%d]: %s", job.ID, job.SourceID.ID, err) } job.SourceID.Config = decryptedConfig } @@ -41,7 +26,7 @@ func (r *JobORM) decryptJobConfig(job *models.Job) error { if job.DestID != nil { decryptedConfig, err := utils.Decrypt(job.DestID.Config) if err != nil { - return fmt.Errorf("failed to decrypt destination config: %s", err) + return fmt.Errorf("failed to decrypt destination config job_id[%d] dest_id[%d]: %s", job.ID, job.DestID.ID, err) } job.DestID.Config = decryptedConfig } @@ -50,179 +35,152 @@ func (r *JobORM) decryptJobConfig(job *models.Job) error { } // decryptJobSliceConfig decrypts related entities for a slice of jobs -func (r *JobORM) decryptJobSliceConfig(jobs []*models.Job) error { +func (db *Database) decryptJobSliceConfig(jobs []*models.Job) error { for _, job := range jobs { - if err := r.decryptJobConfig(job); err != nil { - return fmt.Errorf("failed to decrypt job config: %s", err) + if err := db.decryptJobConfig(job); err != nil { + return fmt.Errorf("failed to decrypt job config job_id[%d]: %s", job.ID, err) } } return nil } // Create a new job -func (r *JobORM) Create(job *models.Job) error { - _, err := r.ormer.Insert(job) +func (db *Database) CreateJob(job *models.Job) error { + _, err := db.ormer.Insert(job) return err } // GetAll retrieves all jobs -func (r *JobORM) GetAll() ([]*models.Job, error) { +func (db *Database) ListJobs() ([]*models.Job, error) { var jobs []*models.Job - _, err := r.ormer.QueryTable(r.TableName).RelatedSel().All(&jobs) + _, err := db.ormer.QueryTable(constants.TableNameMap[constants.JobTable]).RelatedSel().OrderBy(constants.OrderByUpdatedAtDesc).All(&jobs) if err != nil { - return nil, fmt.Errorf("failed to get all jobs: %s", err) + return nil, fmt.Errorf("failed to list jobs: %s", err) } // Decrypt related Source and Destination configs - if err := r.decryptJobSliceConfig(jobs); err != nil { - return nil, fmt.Errorf("failed to decrypt job config: %s", err) + if err := db.decryptJobSliceConfig(jobs); err != nil { + return nil, err } return jobs, nil } -// GetAllByProjectID retrieves all jobs for a specific project -func (r *JobORM) GetAllByProjectID(projectID string) ([]*models.Job, error) { +// GetAllJobsByProjectID retrieves all jobs belonging to a specific project, +// including related Source and Destination, sorted by latest update time. +func (db *Database) ListJobsByProjectID(projectID string) ([]*models.Job, error) { var jobs []*models.Job - // Query sources in the project - sourceTable := constants.TableNameMap[constants.SourceTable] - sources := []int{} - _, err := r.ormer.Raw(fmt.Sprintf(`SELECT id FROM %q WHERE project_id = ?`, sourceTable), projectID).QueryRows(&sources) - if err != nil { - return nil, fmt.Errorf("failed to get sources for project ID %s: %s", projectID, err) - } - - // Query destinations in the project - destTable := constants.TableNameMap[constants.DestinationTable] - destinations := []int{} - _, err = r.ormer.Raw(fmt.Sprintf(`SELECT id FROM %q WHERE project_id = ?`, destTable), projectID).QueryRows(&destinations) + // Directly query jobs filtered by project_id — since each job already stores project_id + _, err := db.ormer.QueryTable(constants.TableNameMap[constants.JobTable]). + Filter("project_id", projectID). + RelatedSel(). + OrderBy(constants.OrderByUpdatedAtDesc). + All(&jobs) if err != nil { - return nil, fmt.Errorf("failed to get destinations for project ID %s: %s", projectID, err) - } - - // If no sources or destinations in the project, return empty array - if len(sources) == 0 && len(destinations) == 0 { - return jobs, nil + return nil, fmt.Errorf("failed to list jobs project_id[%s]: %s", projectID, err) } - // Build query - qs := r.ormer.QueryTable(r.TableName) - // Filter by sources or destinations from the project - if len(sources) > 0 { - qs = qs.Filter("source_id__in", sources) - } - - if len(destinations) > 0 { - qs = qs.Filter("dest_id__in", destinations) - } - - // Add RelatedSel to load the related Source and Destination objects - _, err = qs.RelatedSel().All(&jobs) - if err != nil { - return nil, fmt.Errorf("failed to get jobs with related data for project ID %s: %s", projectID, err) + // If project has no jobs, return empty slice (not nil) + if len(jobs) == 0 { + return []*models.Job{}, nil } // Decrypt related Source and Destination configs - if err := r.decryptJobSliceConfig(jobs); err != nil { - return nil, fmt.Errorf("failed to decrypt job config: %s", err) + if err := db.decryptJobSliceConfig(jobs); err != nil { + return nil, err } return jobs, nil } // GetByID retrieves a job by ID -func (r *JobORM) GetByID(id int, decrypt bool) (*models.Job, error) { +func (db *Database) GetJobByID(id int, decrypt bool) (*models.Job, error) { job := &models.Job{ID: id} - err := r.ormer.Read(job) + err := db.ormer.Read(job) if err != nil { - return nil, fmt.Errorf("failed to get job by ID: %s", err) + return nil, fmt.Errorf("failed to get job id[%d]: %s", id, err) } // Load related entities (Source, Destination, etc.) - _, err = r.ormer.LoadRelated(job, "SourceID") + _, err = db.ormer.LoadRelated(job, "SourceID") if err != nil { - return nil, fmt.Errorf("failed to get job by ID: %s", err) + return nil, fmt.Errorf("failed to load source entities job_id[%d]: %s", id, err) } - _, err = r.ormer.LoadRelated(job, "DestID") + + _, err = db.ormer.LoadRelated(job, "DestID") if err != nil { - return nil, fmt.Errorf("failed to get job by ID: %s", err) + return nil, fmt.Errorf("failed to load destination entities job_id[%d]: %s", id, err) } // Decrypt related Source and Destination configs if decrypt { - if err := r.decryptJobConfig(job); err != nil { - return nil, fmt.Errorf("failed to decrypt job config: %s", err) + if err := db.decryptJobConfig(job); err != nil { + return nil, err } } return job, nil } -// Update a job -func (r *JobORM) Update(job *models.Job) error { - job.UpdatedAt = time.Now() - _, err := r.ormer.Update(job) - return err -} - -// Delete a job -func (r *JobORM) Delete(id int) error { - job := &models.Job{ID: id} - _, err := r.ormer.Delete(job) - return err -} - -// GetBySourceID retrieves all jobs associated with a source ID -func (r *JobORM) GetBySourceID(sourceID int) ([]*models.Job, error) { +func (db *Database) GetJobsBySourceID(sourceIDs []int) ([]*models.Job, error) { var jobs []*models.Job - source := &models.Source{ID: sourceID} - - _, err := r.ormer.QueryTable(r.TableName). - Filter("source_id", source). - RelatedSel(). - All(&jobs) - if err != nil { - return nil, fmt.Errorf("failed to get jobs by source ID: %s", err) + if len(sourceIDs) == 0 { + return jobs, nil } - - // Decrypt related Source and Destination configs - if err := r.decryptJobSliceConfig(jobs); err != nil { - return nil, fmt.Errorf("failed to decrypt job config: %s", err) + _, err := db.ormer.QueryTable(constants.TableNameMap[constants.JobTable]).Filter("source_id__in", sourceIDs).RelatedSel().All(&jobs) + if err != nil { + return nil, err } - return jobs, nil } -// GetByDestinationID retrieves all jobs associated with a destination ID -func (r *JobORM) GetByDestinationID(destID int) ([]*models.Job, error) { +func (db *Database) GetJobsByDestinationID(destIDs []int) ([]*models.Job, error) { var jobs []*models.Job - dest := &models.Destination{ID: destID} - - _, err := r.ormer.QueryTable(r.TableName). - Filter("dest_id", dest). - RelatedSel(). - All(&jobs) + if len(destIDs) == 0 { + return jobs, nil + } + _, err := db.ormer.QueryTable(constants.TableNameMap[constants.JobTable]).Filter("dest_id__in", destIDs).RelatedSel().All(&jobs) if err != nil { - return nil, fmt.Errorf("failed to get jobs by destination ID: %s", err) + return nil, err } + return jobs, nil +} - // Decrypt related Source and Destination configs - if err := r.decryptJobSliceConfig(jobs); err != nil { - return nil, fmt.Errorf("failed to decrypt job config: %s", err) +// Update a job +func (db *Database) UpdateJob(job *models.Job) error { + _, err := db.ormer.Update(job) + return err +} + +// BulkDeactivate deactivates multiple jobs by their IDs in a single query +func (db *Database) DeactivateJobs(ids []int) error { + if len(ids) == 0 { + return nil } - return jobs, nil + _, err := db.ormer.QueryTable(constants.TableNameMap[constants.JobTable]). + Filter("id__in", ids). + Update(orm.Params{ + "active": false, + }) + return err +} + +// Delete a job +func (db *Database) DeleteJob(id int) error { + _, err := db.ormer.Delete(&models.Job{ID: id}) + return err } // IsJobNameUnique checks if a job name is unique within a project in the jobs table. -func (r *JobORM) IsJobNameUnique(projectID, jobName string) (bool, error) { - count, err := r.ormer.QueryTable(r.TableName). +func (db *Database) IsJobNameUniqueInProject(projectID, jobName string) (bool, error) { + count, err := db.ormer.QueryTable(constants.TableNameMap[constants.JobTable]). Filter("name", jobName). Filter("project_id", projectID). Count() if err != nil { - return false, fmt.Errorf("failed to check job name uniqueness: %w", err) + return false, fmt.Errorf("failed to check job name uniqueness project_id[%s] job_name[%s]: %s", projectID, jobName, err) } return count == 0, nil } diff --git a/server/internal/database/source.go b/server/internal/database/source.go index 97932b24..273765ad 100644 --- a/server/internal/database/source.go +++ b/server/internal/database/source.go @@ -2,117 +2,79 @@ package database import ( "fmt" - "time" - "github.com/beego/beego/v2/client/orm" - - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/utils" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/utils" ) -// SourceORM handles database operations for sources -type SourceORM struct { - ormer orm.Ormer - TableName string -} - -func NewSourceORM() *SourceORM { - return &SourceORM{ - ormer: orm.NewOrm(), - TableName: constants.TableNameMap[constants.SourceTable], - } -} - // decryptSourceSliceConfigs decrypts config fields for a slice of sources -func (r *SourceORM) decryptSourceSliceConfigs(sources []*models.Source) error { +func (db *Database) decryptSourceSliceConfigs(sources []*models.Source) error { for _, source := range sources { dConfig, err := utils.Decrypt(source.Config) if err != nil { - return fmt.Errorf("failed to decrypt source config: %s", err) + return fmt.Errorf("failed to decrypt source config id[%d]: %s", source.ID, err) } source.Config = dConfig } return nil } -func (r *SourceORM) Create(source *models.Source) error { +func (db *Database) CreateSource(source *models.Source) error { // Encrypt config before saving eConfig, err := utils.Encrypt(source.Config) if err != nil { - return fmt.Errorf("failed to encrypt source config: %s", err) + return fmt.Errorf("failed to encrypt source config id[%d]: %s", source.ID, err) } source.Config = eConfig - _, err = r.ormer.Insert(source) + _, err = db.ormer.Insert(source) return err } -func (r *SourceORM) GetAll() ([]*models.Source, error) { +func (db *Database) ListSources() ([]*models.Source, error) { var sources []*models.Source - _, err := r.ormer.QueryTable(r.TableName).RelatedSel().All(&sources) + _, err := db.ormer.QueryTable(constants.TableNameMap[constants.SourceTable]).RelatedSel().OrderBy(constants.OrderByUpdatedAtDesc).All(&sources) if err != nil { - return nil, fmt.Errorf("failed to get all sources: %s", err) + return nil, fmt.Errorf("failed to list sources: %s", err) } // Decrypt config after reading - if err := r.decryptSourceSliceConfigs(sources); err != nil { - return nil, fmt.Errorf("failed to decrypt source config: %s", err) + if err := db.decryptSourceSliceConfigs(sources); err != nil { + return nil, err } return sources, nil } -func (r *SourceORM) GetByID(id int) (*models.Source, error) { +func (db *Database) GetSourceByID(id int) (*models.Source, error) { source := &models.Source{ID: id} - err := r.ormer.Read(source) + err := db.ormer.Read(source) if err != nil { - return nil, fmt.Errorf("failed to get source by id[%d]: %s", id, err) + return nil, fmt.Errorf("failed to get source id[%d]: %s", id, err) } // Decrypt config after reading dConfig, err := utils.Decrypt(source.Config) if err != nil { - return nil, fmt.Errorf("failed to decrypt source config by id[%d]: %s", source.ID, err) + return nil, fmt.Errorf("failed to decrypt source config id[%d]: %s", source.ID, err) } source.Config = dConfig return source, nil } -func (r *SourceORM) Update(source *models.Source) error { - // TODO: remove all code managed db timestamps - source.UpdatedAt = time.Now() +func (db *Database) UpdateSource(source *models.Source) error { // Encrypt config before saving eConfig, err := utils.Encrypt(source.Config) if err != nil { - return fmt.Errorf("failed to encrypt source config: %s", err) + return fmt.Errorf("failed to encrypt source config id[%d]: %s", source.ID, err) } source.Config = eConfig - _, err = r.ormer.Update(source) + _, err = db.ormer.Update(source) return err } -func (r *SourceORM) Delete(id int) error { +func (db *Database) DeleteSource(id int) error { source := &models.Source{ID: id} - _, err := r.ormer.Delete(source) + _, err := db.ormer.Delete(source) return err } - -// GetByNameAndType retrieves sources by name, type, and project ID -func (r *SourceORM) GetByNameAndType(name, sourceType, projectIDStr string) ([]*models.Source, error) { - var sources []*models.Source - _, err := r.ormer.QueryTable(r.TableName). - Filter("name", name). - Filter("type", sourceType). - Filter("project_id", projectIDStr). - All(&sources) - if err != nil { - return nil, fmt.Errorf("failed to get source by name: %s and type: %s and project_id: %s: %s", name, sourceType, projectIDStr, err) - } - - // Decrypt config after reading - if err := r.decryptSourceSliceConfigs(sources); err != nil { - return nil, fmt.Errorf("failed to decrypt source config: %s", err) - } - - return sources, nil -} diff --git a/server/internal/database/user.go b/server/internal/database/user.go index baac82aa..9cdf70fb 100644 --- a/server/internal/database/user.go +++ b/server/internal/database/user.go @@ -2,68 +2,52 @@ package database import ( "fmt" - "time" - "github.com/beego/beego/v2/client/orm" "golang.org/x/crypto/bcrypt" - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models" ) -// UserORM handles database operations -type UserORM struct { - ormer orm.Ormer - TableName string -} - -func NewUserORM() *UserORM { - return &UserORM{ - ormer: orm.NewOrm(), - TableName: constants.TableNameMap[constants.UserTable], - } -} - -func (r *UserORM) FindByUsername(username string) (*models.User, error) { +func (db *Database) GetUserByUsername(username string) (*models.User, error) { var user models.User - err := r.ormer.QueryTable(r.TableName).Filter("username", username).One(&user) + err := db.ormer.QueryTable(constants.TableNameMap[constants.UserTable]).Filter("username", username).One(&user) return &user, err } -func (r *UserORM) ComparePassword(hashedPassword, plainPassword string) error { +func (db *Database) CompareUserPassword(hashedPassword, plainPassword string) error { return bcrypt.CompareHashAndPassword([]byte(hashedPassword), []byte(plainPassword)) } -func (r *UserORM) Create(user *models.User) error { - exists := r.ormer.QueryTable(r.TableName).Filter("username", user.Username).Exist() +func (db *Database) CreateUser(user *models.User) error { + exists := db.ormer.QueryTable(constants.TableNameMap[constants.UserTable]).Filter("username", user.Username).Exist() if exists { return fmt.Errorf("username already exists") } - _, err := r.ormer.Insert(user) + _, err := db.ormer.Insert(user) return err } -func (r *UserORM) GetAll() ([]*models.User, error) { +func (db *Database) ListUsers() ([]*models.User, error) { var users []*models.User - _, err := r.ormer.QueryTable(r.TableName).All(&users) + _, err := db.ormer.QueryTable(constants.TableNameMap[constants.UserTable]).All(&users) return users, err } -func (r *UserORM) GetByID(id int) (*models.User, error) { +func (db *Database) GetUserByID(id int) (*models.User, error) { user := &models.User{ID: id} - err := r.ormer.Read(user) + err := db.ormer.Read(user) return user, err } -func (r *UserORM) Update(user *models.User) error { - user.UpdatedAt = time.Now() - _, err := r.ormer.Update(user) +func (db *Database) UpdateUser(user *models.User) error { + _, err := db.ormer.Update(user) return err } -func (r *UserORM) Delete(id int) error { +func (db *Database) DeleteUser(id int) error { user := &models.User{ID: id} - _, err := r.ormer.Delete(user) + _, err := db.ormer.Delete(user) return err } diff --git a/server/internal/docker/runner.go b/server/internal/docker/runner.go deleted file mode 100644 index d123e8df..00000000 --- a/server/internal/docker/runner.go +++ /dev/null @@ -1,497 +0,0 @@ -package docker - -import ( - "context" - "crypto/sha256" - "encoding/json" - "fmt" - "os" - "os/exec" - "path/filepath" - "strconv" - "strings" - - "github.com/beego/beego/v2/core/logs" - "github.com/beego/beego/v2/server/web" - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/database" - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/internal/telemetry" - "github.com/datazip/olake-frontend/server/utils" - "golang.org/x/mod/semver" -) - -// Constants -const ( - DefaultDirPermissions = 0755 - DefaultFilePermissions = 0644 -) - -// Command represents a Docker command type -type Command string - -const ( - Discover Command = "discover" - Spec Command = "spec" - Check Command = "check" - Sync Command = "sync" -) - -// File configuration for different operations -type FileConfig struct { - Name string - Data string -} - -// Runner is responsible for executing Docker commands -type Runner struct { - WorkingDir string - anonymousID string -} - -// NewRunner creates a new Docker runner -func NewRunner(workingDir string) *Runner { - if err := utils.CreateDirectory(workingDir, DefaultDirPermissions); err != nil { - logs.Critical("Failed to create working directory %s: %v", workingDir, err) - } - - return &Runner{ - WorkingDir: workingDir, - anonymousID: telemetry.GetTelemetryUserID(), - } -} - -// GetDefaultConfigDir returns the default directory for storing config files -func GetDefaultConfigDir() string { - return constants.DefaultConfigDir -} - -// setupWorkDirectory creates a working directory and returns the full path -func (r *Runner) setupWorkDirectory(subDir string) (string, error) { - workDir := filepath.Join(r.WorkingDir, subDir) - if err := utils.CreateDirectory(workDir, DefaultDirPermissions); err != nil { - return "", fmt.Errorf("failed to create work directory: %v", err) - } - return workDir, nil -} - -// writeConfigFiles writes multiple configuration files to the specified directory -func (r *Runner) writeConfigFiles(workDir string, configs []FileConfig) error { - for _, config := range configs { - filePath := filepath.Join(workDir, config.Name) - if err := utils.WriteFile(filePath, []byte(config.Data), DefaultFilePermissions); err != nil { - return fmt.Errorf("failed to write %s: %v", config.Name, err) - } - } - return nil -} - -// GetDockerImageName constructs a Docker image name based on source type and version -func (r *Runner) GetDockerImageName(sourceType, version string) string { - return fmt.Sprintf("olakego/source-%s:%s", sourceType, version) -} - -// ExecuteDockerCommand executes a Docker command with the given parameters -func (r *Runner) ExecuteDockerCommand(ctx context.Context, containerName, flag string, command Command, sourceType, version, configPath string, additionalArgs ...string) ([]byte, error) { - outputDir := filepath.Dir(configPath) - if err := utils.CreateDirectory(outputDir, DefaultDirPermissions); err != nil { - return nil, err - } - - dockerArgs := r.buildDockerArgs(ctx, containerName, flag, command, sourceType, version, configPath, outputDir, additionalArgs...) - if len(dockerArgs) == 0 { - return nil, fmt.Errorf("failed to build docker args") - } - - logs.Info("Running Docker command: docker %s\n", strings.Join(dockerArgs, " ")) - - dockerCmd := exec.CommandContext(ctx, "docker", dockerArgs...) - output, err := dockerCmd.CombinedOutput() - - logs.Info("Docker command output: %s\n", string(output)) - - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - return nil, fmt.Errorf("docker command failed with exit status %d", exitErr.ExitCode()) - } - return nil, err - } - - return output, nil -} - -// buildDockerArgs constructs Docker command arguments -func (r *Runner) buildDockerArgs(ctx context.Context, containerName, flag string, command Command, sourceType, version, configPath, outputDir string, additionalArgs ...string) []string { - hostOutputDir := r.getHostOutputDir(outputDir) - - repositoryBase, err := web.AppConfig.String("CONTAINER_REGISTRY_BASE") - if err != nil { - logs.Critical("failed to get CONTAINER_REGISTRY_BASE: %s", err) - return nil - } - imageName := r.GetDockerImageName(sourceType, version) - - // If using ECR, ensure login before run - if strings.Contains(repositoryBase, "ecr") { - imageName = fmt.Sprintf("%s/%s", repositoryBase, imageName) - accountID, region, _, err := utils.ParseECRDetails(imageName) - if err != nil { - logs.Critical("failed to parse ECR details: %s", err) - return nil - } - if err := utils.DockerLoginECR(ctx, region, accountID); err != nil { - logs.Critical("failed to login to ECR: %s", err) - return nil - } - } - - // base docker args - dockerArgs := []string{"run", "--name", containerName} - - if hostOutputDir != "" { - dockerArgs = append(dockerArgs, "-v", fmt.Sprintf("%s:/mnt/config", hostOutputDir)) - } - - for key, value := range utils.GetWorkerEnvVars() { - dockerArgs = append(dockerArgs, "-e", fmt.Sprintf("%s=%s", key, value)) - } - - dockerArgs = append(dockerArgs, imageName, string(command)) - - if flag != "" { - dockerArgs = append(dockerArgs, fmt.Sprintf("--%s", flag)) - } - - if configPath != "" { - dockerArgs = append(dockerArgs, fmt.Sprintf("/mnt/config/%s", filepath.Base(configPath))) - } - - if encryptionKey := os.Getenv(constants.EncryptionKey); encryptionKey != "" { - dockerArgs = append(dockerArgs, "--encryption-key", encryptionKey) - } - - return append(dockerArgs, additionalArgs...) -} - -// getHostOutputDir determines the host output directory path -func (r *Runner) getHostOutputDir(outputDir string) string { - if persistentDir := os.Getenv("PERSISTENT_DIR"); persistentDir != "" { - hostOutputDir := strings.Replace(outputDir, constants.DefaultConfigDir, persistentDir, 1) - logs.Info("hostOutputDir %s\n", hostOutputDir) - return hostOutputDir - } - return outputDir -} - -func (r *Runner) FetchSpec(ctx context.Context, destinationType, sourceType, version, workflowID string) (models.SpecOutput, error) { - flag := utils.Ternary(destinationType != "", "destination-type", "").(string) - dockerArgs := r.buildDockerArgs(ctx, workflowID, flag, Spec, sourceType, version, "", "", destinationType) - - cmd := exec.CommandContext(ctx, "docker", dockerArgs...) - logs.Info("Running Docker command: docker %s\n", strings.Join(dockerArgs, " ")) - output, err := cmd.CombinedOutput() - if err != nil { - return models.SpecOutput{}, fmt.Errorf("docker command failed: %v\nOutput: %s", err, string(output)) - } - spec, err := utils.ExtractJSON(string(output)) - if err != nil { - return models.SpecOutput{}, fmt.Errorf("failed to parse spec: %s", string(output)) - } - return models.SpecOutput{Spec: spec}, nil -} - -// TestConnection runs the check command and returns connection status -func (r *Runner) TestConnection(ctx context.Context, flag, sourceType, version, config, workflowID string) (map[string]interface{}, error) { - workDir, err := r.setupWorkDirectory(workflowID) - if err != nil { - return nil, err - } - - configs := []FileConfig{ - {Name: "config.json", Data: config}, - {Name: "user_id.txt", Data: r.anonymousID}, - } - - if err := r.writeConfigFiles(workDir, configs); err != nil { - return nil, err - } - - configPath := filepath.Join(workDir, "config.json") - output, err := r.ExecuteDockerCommand(ctx, workflowID, flag, Check, sourceType, version, configPath) - if err != nil { - return nil, err - } - - logs.Info("check command output: %s\n", string(output)) - - logMsg, err := utils.ExtractJSON(string(output)) - if err != nil { - return nil, err - } - - connectionStatus, ok := logMsg["connectionStatus"].(map[string]interface{}) - if !ok || connectionStatus == nil { - return nil, fmt.Errorf("connection status not found") - } - - status, statusOk := connectionStatus["status"].(string) - message, _ := connectionStatus["message"].(string) // message is optional - if !statusOk { - return nil, fmt.Errorf("connection status not found") - } - - return map[string]interface{}{ - "message": message, - "status": status, - }, nil -} - -// GetCatalog runs the discover command and returns catalog data -func (r *Runner) GetCatalog(ctx context.Context, sourceType, version, config, workflowID, streamsConfig, jobName string) (map[string]interface{}, error) { - workDir, err := r.setupWorkDirectory(workflowID) - if err != nil { - return nil, err - } - configs := []FileConfig{ - {Name: "config.json", Data: config}, - {Name: "streams.json", Data: streamsConfig}, - {Name: "user_id.txt", Data: r.anonymousID}, - } - - if err := r.writeConfigFiles(workDir, configs); err != nil { - return nil, err - } - - configPath := filepath.Join(workDir, "config.json") - catalogPath := filepath.Join(workDir, "streams.json") - var catalogsArgs []string - if streamsConfig != "" { - catalogsArgs = append(catalogsArgs, "--catalog", "/mnt/config/streams.json") - } - if jobName != "" && semver.Compare(version, "v0.2.0") >= 0 { - catalogsArgs = append(catalogsArgs, "--destination-database-prefix", jobName) - } - _, err = r.ExecuteDockerCommand(ctx, workflowID, "config", Discover, sourceType, version, configPath, catalogsArgs...) - if err != nil { - return nil, err - } - - // Simplified JSON parsing - just parse if exists, return error if not - return utils.ParseJSONFile(catalogPath) -} - -// RunSync runs the sync command to transfer data from source to destination -func (r *Runner) RunSync(ctx context.Context, jobID int, workflowID string) (map[string]interface{}, error) { - // Deterministic container name to enable adoption across retries - containerName := WorkflowHash(workflowID) - - // Setup work dir and configs - workDir, err := r.setupWorkDirectory(containerName) - if err != nil { - logs.Error("workflowID %s: failed to setup work directory: %s", workflowID, err) - return nil, err - } - - // Marker to indicate we have launched once; prevents relaunch after retries - launchedMarker := filepath.Join(workDir, "logs") - - // Inspect container state - state := getContainerState(ctx, containerName, workflowID) - - // 1) If container is running, adopt and wait for completion - if state.Exists && state.Running { - logs.Info("workflowID %s: adopting running container %s", workflowID, containerName) - if err := waitContainer(ctx, containerName, workflowID); err != nil { - logs.Error("workflowID %s: container wait failed: %s", workflowID, err) - return nil, err - } - state = getContainerState(ctx, containerName, workflowID) - } - - // 2) If container exists and exited, treat as finished: cleanup and return status - if state.Exists && !state.Running && state.ExitCode != nil { - logs.Info("workflowID %s: container %s exited with code %d", workflowID, containerName, *state.ExitCode) - if *state.ExitCode == 0 { - return map[string]interface{}{"status": "completed"}, nil - } - // Return typed error so policy can decide retry vs. fail-fast - return nil, fmt.Errorf("workflowID %s: container %s exit %d", workflowID, containerName, *state.ExitCode) - } - - // 3) First launch path: only if we never launched and nothing is running - if _, err := os.Stat(launchedMarker); os.IsNotExist(err) { - logs.Info("workflowID %s: first launch path, preparing configs", workflowID) - jobORM := database.NewJobORM() - job, err := jobORM.GetByID(jobID, false) - if err != nil { - logs.Error("workflowID %s: failed to fetch job %d: %s", workflowID, jobID, err) - return nil, err - } - configs := []FileConfig{ - {Name: "config.json", Data: job.SourceID.Config}, - {Name: "streams.json", Data: job.StreamsConfig}, - {Name: "writer.json", Data: job.DestID.Config}, - {Name: "state.json", Data: job.State}, - {Name: "user_id.txt", Data: r.anonymousID}, - } - if err := r.writeConfigFiles(workDir, configs); err != nil { - logs.Error("workflowID %s: failed to write config files: %s", workflowID, err) - return nil, err - } - - configPath := filepath.Join(workDir, "config.json") - logs.Info("workflowID %s: executing docker container %s", workflowID, containerName) - - if _, err = r.ExecuteDockerCommand( - ctx, - containerName, - "config", - Sync, - job.SourceID.Type, - job.SourceID.Version, - configPath, - "--catalog", "/mnt/config/streams.json", - "--destination", "/mnt/config/writer.json", - "--state", "/mnt/config/state.json", - ); err != nil { - logs.Error("workflowID %s: docker execution failed: %s", workflowID, err) - return nil, err - } - - logs.Info("workflowID %s: container %s completed successfully", workflowID, containerName) - return map[string]interface{}{"status": "completed"}, nil - } - // Skip if container is not running, was already launched (logs exist), and no new run is needed. - logs.Info("workflowID %s: container %s already handled, skipping launch", workflowID, containerName) - return map[string]interface{}{"status": "skipped"}, nil -} - -type ContainerState struct { - Exists bool - Running bool - ExitCode *int -} - -func getContainerState(ctx context.Context, name, workflowID string) ContainerState { - // docker inspect returns fields if exists - cmd := exec.CommandContext(ctx, "docker", "inspect", "-f", "{{.State.Status}} {{.State.Running}} {{.State.ExitCode}}", name) - out, err := cmd.CombinedOutput() - if err != nil { - // treat not found as non-existent - logs.Warn("workflowID %s: docker inspect failed for %s: %s, output: %s", workflowID, name, err, string(out)) - return ContainerState{Exists: false} - } - // Split Docker inspect output into fields: status, running flag, and exit code - // Example: "exited false 137" → parts[0]="exited", parts[1]="false", parts[2]="137" - parts := strings.Fields(strings.TrimSpace(string(out))) - if len(parts) < 3 { - return ContainerState{Exists: false} - } - // Docker .State.Status can be "created", "running", "paused", "restarting", "removing", "exited", or "dead"; we only handle running vs exited/dead. - status := parts[0] - running := parts[1] == "true" - var ec *int - if !running && (status == "exited" || status == "dead") { - if code, convErr := strconv.Atoi(parts[2]); convErr == nil { - ec = &code - } - } - return ContainerState{Exists: true, Running: running, ExitCode: ec} -} - -func waitContainer(ctx context.Context, name, workflowID string) error { - // docker wait prints exit code; validate non-zero as error - cmd := exec.CommandContext(ctx, "docker", "wait", name) - out, err := cmd.CombinedOutput() - if err != nil { - logs.Error("workflowID %s: docker wait failed for %s: %s, output: %s", workflowID, name, err, string(out)) - return fmt.Errorf("docker wait failed: %s", err) - } - strOut := strings.TrimSpace(string(out)) - code, convErr := strconv.Atoi(strOut) - if convErr != nil { - logs.Error("workflowID %s: failed to parse exit code from docker wait output %q: %s", workflowID, strOut, convErr) - return fmt.Errorf("failed to parse exit code: %s", convErr) - } - - if code != 0 { - return fmt.Errorf("workflowID %s: container %s exited with code %d", workflowID, name, code) - } - return nil -} - -// StopContainer stops a container by name, falling back to kill if needed. -func StopContainer(ctx context.Context, workflowID string) error { - containerName := WorkflowHash(workflowID) - logs.Info("workflowID %s: stop request received for container %s", workflowID, containerName) - - if strings.TrimSpace(containerName) == "" { - logs.Warn("workflowID %s: empty container name", workflowID) - return fmt.Errorf("empty container name") - } - - stopCmd := exec.CommandContext(ctx, "docker", "stop", "-t", "5", containerName) - if out, err := stopCmd.CombinedOutput(); err != nil { - logs.Warn("workflowID %s: docker stop failed for %s: %s, output: %s", workflowID, containerName, err, string(out)) - killCmd := exec.CommandContext(ctx, "docker", "kill", containerName) - if kout, kerr := killCmd.CombinedOutput(); kerr != nil { - logs.Error("workflowID %s: docker kill failed for %s: %s, output: %s", workflowID, containerName, kerr, string(kout)) - return fmt.Errorf("docker kill failed: %s", kerr) - } - } - - rmCmd := exec.CommandContext(ctx, "docker", "rm", "-f", containerName) - if rmOut, rmErr := rmCmd.CombinedOutput(); rmErr != nil { - logs.Warn("workflowID %s: docker rm failed for %s: %s, output: %s", workflowID, containerName, rmErr, string(rmOut)) - } else { - logs.Info("workflowID %s: container %s removed successfully", workflowID, containerName) - } - - return nil -} - -// PersistJobStateFromFile reads the state JSON file and updates the job state -func (r *Runner) PersistJobStateFromFile(jobID int, workflowID string) error { - hashWorkflowID := WorkflowHash(workflowID) - workDir, err := r.setupWorkDirectory(hashWorkflowID) - if err != nil { - logs.Error("workflowID %s: failed to setup work directory: %s", workflowID, err) - return err - } - - statePath := filepath.Join(workDir, "state.json") - state, err := utils.ParseJSONFile(statePath) - if err != nil { - logs.Error("workflowID %s: failed to parse state file %s: %s", workflowID, statePath, err) - return err - } - - jobORM := database.NewJobORM() - job, err := jobORM.GetByID(jobID, false) - if err != nil { - logs.Error("workflowID %s: failed to fetch job %d: %s", workflowID, jobID, err) - return err - } - - stateJSON, err := json.Marshal(state) - if err != nil { - logs.Error("workflowID %s: failed to marshal state: %s", workflowID, err) - return err - } - - job.State = string(stateJSON) - job.Active = true - - if err := jobORM.Update(job); err != nil { - logs.Error("workflowID %s: failed to update job %d: %s", workflowID, jobID, err) - return err - } - - logs.Info("workflowID %s: job state persisted successfully for jobID %d", workflowID, jobID) - return nil -} - -// WorkflowHash returns a deterministic hash string for a given workflowID -func WorkflowHash(workflowID string) string { - return fmt.Sprintf("%x", sha256.Sum256([]byte(workflowID))) -} diff --git a/server/internal/handlers/auth.go b/server/internal/handlers/auth.go index ffee8e5e..52bb6612 100644 --- a/server/internal/handlers/auth.go +++ b/server/internal/handlers/auth.go @@ -1,118 +1,119 @@ package handlers import ( - "context" - "encoding/json" + "errors" + "fmt" "net/http" - "strings" "github.com/beego/beego/v2/server/web" - "golang.org/x/crypto/bcrypt" - - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/database" - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/internal/telemetry" - "github.com/datazip/olake-frontend/server/utils" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/models/dto" + "github.com/datazip-inc/olake-ui/server/utils" + "github.com/datazip-inc/olake-ui/server/utils/logger" + "github.com/datazip-inc/olake-ui/server/utils/telemetry" ) -type AuthHandler struct { - web.Controller - userORM *database.UserORM -} - -func (c *AuthHandler) Prepare() { - c.userORM = database.NewUserORM() -} - // @router /login [post] -func (c *AuthHandler) Login() { - var req models.LoginRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") +func (h *Handler) Login() { + var req dto.LoginRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, constants.ValidationInvalidRequestFormat, err) return } - user, err := c.userORM.FindByUsername(req.Username) + logger.Debugf("Login initiated username[%s]", req.Username) + + user, err := h.etl.Login(h.Ctx.Request.Context(), req.Username, req.Password) if err != nil { - ErrorResponse := "Invalid credentials" - if strings.Contains(err.Error(), "no row found") { - ErrorResponse = "user not found, sign up first" + switch { + case errors.Is(err, constants.ErrUserNotFound): + utils.ErrorResponse(&h.Controller, http.StatusUnauthorized, fmt.Sprintf("user not found, sign up first: %s", err), err) + case errors.Is(err, constants.ErrInvalidCredentials): + utils.ErrorResponse(&h.Controller, http.StatusUnauthorized, fmt.Sprintf("Invalid credentials: %s", err), err) + default: + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("Login failed: %s", err), err) } - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, ErrorResponse) - return - } - - if err := c.userORM.ComparePassword(user.Password, req.Password); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid credentials") return } // check if session is enabled if web.BConfig.WebConfig.Session.SessionOn { - _ = c.SetSession(constants.SessionUserID, user.ID) + _ = h.SetSession(constants.SessionUserID, user.ID) } - telemetry.TrackUserLogin(context.Background(), user) - - utils.SuccessResponse(&c.Controller, map[string]interface{}{ + utils.SuccessResponse(&h.Controller, "login successful", map[string]interface{}{ "username": user.Username, }) } // @router /checkauth [get] -func (c *AuthHandler) CheckAuth() { - if userID := c.GetSession(constants.SessionUserID); userID == nil { - utils.ErrorResponse(&c.Controller, http.StatusUnauthorized, "Not authenticated") +func (h *Handler) CheckAuth() { + userID := h.GetSession(constants.SessionUserID) + if userID == nil { + utils.ErrorResponse(&h.Controller, http.StatusUnauthorized, "Not authenticated", errors.New("not authenticated")) return } - utils.SuccessResponse(&c.Controller, models.LoginResponse{ - Message: "Authenticated", - Success: true, - }) + logger.Debugf("Check auth initiated user_id[%v]", userID) + + // Optional: Validate that the user still exists in the database + if userIDInt, ok := userID.(int); ok { + if err := h.etl.ValidateUser(userIDInt); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusUnauthorized, fmt.Sprintf("Invalid session: %s", err), err) + return + } + } + + utils.SuccessResponse(&h.Controller, "authenticated successfully", nil) } // @router /logout [post] -func (c *AuthHandler) Logout() { - _ = c.DestroySession() - utils.SuccessResponse(&c.Controller, models.LoginResponse{ - Message: "Logged out successfully", - Success: true, - }) -} +func (h *Handler) Logout() { + userID := h.GetSession(constants.SessionUserID) + logger.Debugf("Logout initiated user_id[%v]", userID) -// @router /signup [post] -func (c *AuthHandler) Signup() { - var req models.User - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") + err := h.DestroySession() + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to destroy session: %s", err), err) return } - // Hash password - hashedPassword, err := bcrypt.GenerateFromPassword([]byte(req.Password), bcrypt.DefaultCost) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to process password") + utils.SuccessResponse(&h.Controller, "logout successful", nil) +} + +// @router /signup [post] +func (h *Handler) Signup() { + var req models.User + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, constants.ValidationInvalidRequestFormat, err) return } - req.Password = string(hashedPassword) - if err := c.userORM.Create(&req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusConflict, "Username already exists") + if err := h.etl.Signup(h.Ctx.Request.Context(), &req); err != nil { + switch { + case errors.Is(err, constants.ErrUserAlreadyExists): + utils.ErrorResponse(&h.Controller, http.StatusConflict, fmt.Sprintf("Username already exists: %s", err), err) + case errors.Is(err, constants.ErrPasswordProcessing): + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to process password: %s", err), err) + default: + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to create user: %s", err), err) + } return } - utils.SuccessResponse(&c.Controller, map[string]interface{}{ + utils.SuccessResponse(&h.Controller, "user created successfully", map[string]interface{}{ "email": req.Email, "username": req.Username, }) } // @router /telemetry-id [get] -func (c *AuthHandler) GetTelemetryID() { +func (h *Handler) GetTelemetryID() { + logger.Infof("Get telemetry ID initiated") + telemetryID := telemetry.GetTelemetryUserID() - utils.SuccessResponse(&c.Controller, map[string]interface{}{ + utils.SuccessResponse(&h.Controller, "telemetry ID fetched successfully", map[string]interface{}{ telemetry.TelemetryUserIDFile: string(telemetryID), }) } diff --git a/server/internal/handlers/destination.go b/server/internal/handlers/destination.go index 3a2a4ac6..c3012323 100644 --- a/server/internal/handlers/destination.go +++ b/server/internal/handlers/destination.go @@ -1,351 +1,213 @@ package handlers import ( - "context" - "encoding/json" + "errors" "fmt" "net/http" - "path/filepath" - "time" - - "github.com/beego/beego/v2/core/logs" - "github.com/beego/beego/v2/server/web" - - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/database" - "github.com/datazip/olake-frontend/server/internal/docker" - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/internal/telemetry" - "github.com/datazip/olake-frontend/server/internal/temporal" - "github.com/datazip/olake-frontend/server/utils" -) -type DestHandler struct { - web.Controller - destORM *database.DestinationORM - jobORM *database.JobORM - userORM *database.UserORM - tempClient *temporal.Client -} + "github.com/datazip-inc/olake-ui/server/internal/models/dto" + "github.com/datazip-inc/olake-ui/server/utils" + "github.com/datazip-inc/olake-ui/server/utils/logger" +) -func (c *DestHandler) Prepare() { - c.destORM = database.NewDestinationORM() - c.jobORM = database.NewJobORM() - c.userORM = database.NewUserORM() - var err error - c.tempClient, err = temporal.NewClient() +// @router /project/:projectid/destinations [get] +func (h *Handler) ListDestinations() { + projectID, err := GetProjectIDFromPath(&h.Controller) if err != nil { - logs.Error("Failed to create Temporal client: %v", err) + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } -} -// @router /project/:projectid/destinations [get] -func (c *DestHandler) GetAllDestinations() { - projectIDStr := c.Ctx.Input.Param(":projectid") - destinations, err := c.destORM.GetAllByProjectID(projectIDStr) + items, err := h.etl.ListDestinations(h.Ctx.Request.Context(), projectID) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to retrieve destinations") + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get destinations: %s", err), err) return } - destItems := make([]models.DestinationDataItem, 0, len(destinations)) - for _, dest := range destinations { - item := models.DestinationDataItem{ - ID: dest.ID, - Name: dest.Name, - Type: dest.DestType, - Version: dest.Version, - Config: dest.Config, - CreatedAt: dest.CreatedAt.Format(time.RFC3339), - UpdatedAt: dest.UpdatedAt.Format(time.RFC3339), - } - - setUsernames(&item.CreatedBy, &item.UpdatedBy, dest.CreatedBy, dest.UpdatedBy) - - jobs, err := c.jobORM.GetByDestinationID(dest.ID) - var success bool - item.Jobs, success = buildJobDataItems(jobs, err, projectIDStr, "destination", c.tempClient, &c.Controller) - if !success { - return // Error occurred in buildJobDataItems - } - - destItems = append(destItems, item) - } - - utils.SuccessResponse(&c.Controller, destItems) + utils.SuccessResponse(&h.Controller, "Destinations listed successfully", items) } // @router /project/:projectid/destinations [post] -func (c *DestHandler) CreateDestination() { - // Get project ID from path - projectIDStr := c.Ctx.Input.Param(":projectid") +func (h *Handler) CreateDestination() { + userID := GetUserIDFromSession(&h.Controller) + if userID == nil { + utils.ErrorResponse(&h.Controller, http.StatusUnauthorized, "Not authenticated", errors.New("not authenticated")) + return + } - var req models.CreateDestinationRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") + projectID, err := GetProjectIDFromPath(&h.Controller) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Convert request to Destination model - destination := &models.Destination{ - Name: req.Name, - DestType: req.Type, - Version: req.Version, - Config: req.Config, - ProjectID: projectIDStr, + + var req dto.CreateDestinationRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } - // Set created by if user is logged in - userID := c.GetSession(constants.SessionUserID) - if userID != nil { - user := &models.User{ID: userID.(int)} - destination.CreatedBy = user - destination.UpdatedBy = user + if err := dto.ValidateDestinationType(req.Type); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } - if err := c.destORM.Create(destination); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to create destination: %s", err)) + logger.Debugf("Create destination initiated project_id[%s] destination_type[%s] destination_name[%s] user_id[%v]", + projectID, req.Type, req.Name, userID) + + if err := h.etl.CreateDestination(h.Ctx.Request.Context(), &req, projectID, userID); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to create destination: %s", err), err) return } - // Track destination creation event - telemetry.TrackDestinationCreation(context.Background(), destination) - utils.SuccessResponse(&c.Controller, req) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("destination %s created successfully", req.Name), req) } // @router /project/:projectid/destinations/:id [put] -func (c *DestHandler) UpdateDestination() { - // Get destination ID from path - id := GetIDFromPath(&c.Controller) - projectID := c.Ctx.Input.Param(":projectid") - var req models.UpdateDestinationRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") +func (h *Handler) UpdateDestination() { + userID := GetUserIDFromSession(&h.Controller) + if userID == nil { + utils.ErrorResponse(&h.Controller, http.StatusUnauthorized, "Not authenticated", errors.New("not authenticated")) return } - // Get existing destination - existingDest, err := c.destORM.GetByID(id) + + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Destination not found") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Update fields - existingDest.Name = req.Name - existingDest.DestType = req.Type - existingDest.Version = req.Version - existingDest.Config = req.Config - existingDest.UpdatedAt = time.Now() - - // Update user who made changes - userID := c.GetSession(constants.SessionUserID) - if userID != nil { - user := &models.User{ID: userID.(int)} - existingDest.UpdatedBy = user - } - - // Find jobs linked to this source - jobs, err := c.jobORM.GetByDestinationID(existingDest.ID) + projectID, err := GetProjectIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to fetch jobs for destination %s", err)) + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Cancel workflows for those jobs - for _, job := range jobs { - err := cancelJobWorkflow(c.tempClient, job, projectID) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to cancel workflow for job %s", err)) - return - } + var req dto.UpdateDestinationRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } - // persist update - if err := c.destORM.Update(existingDest); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to update destination %s", err)) + if err := dto.ValidateDestinationType(req.Type); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Track destinations status after update - telemetry.TrackDestinationsStatus(context.Background()) + logger.Debugf("Update destination initiated project_id[%s], destination_id[%d], destination_type[%s], user_id[%v]", + projectID, id, req.Type, userID) - utils.SuccessResponse(&c.Controller, req) + if err := h.etl.UpdateDestination(h.Ctx.Request.Context(), id, projectID, &req, userID); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to update destination: %s", err), err) + return + } + utils.SuccessResponse(&h.Controller, fmt.Sprintf("destination %s updated successfully", req.Name), req) } // @router /project/:projectid/destinations/:id [delete] -func (c *DestHandler) DeleteDestination() { - // Get destination ID from path - id := GetIDFromPath(&c.Controller) - // Get the name for the response - dest, err := c.destORM.GetByID(id) +func (h *Handler) DeleteDestination() { + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Destination not found") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - jobs, err := c.jobORM.GetByDestinationID(id) + logger.Debugf("Delete destination initiated destination_id[%d]", id) + + resp, err := h.etl.DeleteDestination(h.Ctx.Request.Context(), id) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to get source by id") - } - for _, job := range jobs { - job.Active = false - if err := c.jobORM.Update(job); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to deactivate jobs using this destination") - return - } - } - if err := c.destORM.Delete(id); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to delete destination") + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to delete destination: %s", err), err) return } - // Track destinations status after deletion - telemetry.TrackDestinationsStatus(context.Background()) - - utils.SuccessResponse(&c.Controller, &models.DeleteDestinationResponse{ - Name: dest.Name, - }) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("destination %s deleted successfully", resp.Name), resp) } // @router /project/:projectid/destinations/test [post] -func (c *DestHandler) TestConnection() { - var req models.DestinationTestConnectionRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") - return - } - if req.Type == "" { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "valid destination type is required") +func (h *Handler) TestDestinationConnection() { + // need to remove sourceVersion from request + var req dto.DestinationTestConnectionRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - if req.Version == "" || req.Version == "latest" { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "valid destination version required") - return - } + logger.Infof("Test destination connection initiated destination_type[%s] destination_version[%s]", req.Type, req.Version) - // Determine driver and available tags - version := req.Version - driver := req.Source - if driver == "" { - var err error - _, driver, err = utils.GetDriverImageTags(c.Ctx.Request.Context(), "", true) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get valid driver image tags: %s", err)) - return - } - } - - encryptedConfig, err := utils.Encrypt(req.Config) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to encrypt destination config: "+err.Error()) - return - } - workflowID := fmt.Sprintf("test-connection-%s-%d", req.Type, time.Now().Unix()) - result, err := c.tempClient.TestConnection(c.Ctx.Request.Context(), workflowID, "destination", driver, version, encryptedConfig) - if result == nil { - result = map[string]interface{}{ - "message": err.Error(), - "status": "failed", - } - } - homeDir := docker.GetDefaultConfigDir() - mainLogDir := filepath.Join(homeDir, workflowID) - logs, err := utils.ReadLogs(mainLogDir) + result, logs, err := h.etl.TestDestinationConnection(h.Ctx.Request.Context(), &req) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to read logs: %s", err)) + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to verify driver credentials: %s", err), err) return } - utils.SuccessResponse(&c.Controller, models.TestConnectionResponse{ + + utils.SuccessResponse(&h.Controller, fmt.Sprintf("destination %s connection tested successfully", req.Type), dto.TestConnectionResponse{ ConnectionResult: result, Logs: logs, }) } // @router /destinations/:id/jobs [get] -func (c *DestHandler) GetDestinationJobs() { - id := GetIDFromPath(&c.Controller) - // Check if destination exists - _, err := c.destORM.GetByID(id) +func (h *Handler) GetDestinationJobs() { + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Destination not found") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Create a job ORM and get jobs by destination ID - jobORM := database.NewJobORM() - jobs, err := jobORM.GetByDestinationID(id) + logger.Debugf("Get destination jobs initiated destination_id[%d]", id) + + jobs, err := h.etl.GetDestinationJobs(h.Ctx.Request.Context(), id) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to retrieve jobs") + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get jobs related to destination: %s", err), err) return } - - // Format as required by API contract - utils.SuccessResponse(&c.Controller, map[string]interface{}{ - "jobs": jobs, - }) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("destination %d jobs fetched successfully", id), map[string]interface{}{"jobs": jobs}) } // @router /project/:projectid/destinations/versions [get] -func (c *DestHandler) GetDestinationVersions() { - // Get destination type from query parameter - destType := c.GetString("type") - if destType == "" { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Destination type is required") +func (h *Handler) GetDestinationVersions() { + projectID, err := GetProjectIDFromPath(&h.Controller) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // get available driver versions - versions, _, err := utils.GetDriverImageTags(c.Ctx.Request.Context(), "", true) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to fetch driver versions: %s", err)) + destType := h.GetString("type") + if err := dto.ValidateDestinationType(destType); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - utils.SuccessResponse(&c.Controller, map[string]interface{}{ - "version": versions, - }) -} + logger.Debugf("Get destination versions initiated project_id[%s] destination_type[%s]", projectID, destType) -// @router /project/:projectid/destinations/spec [post] -func (c *DestHandler) GetDestinationSpec() { - _ = c.Ctx.Input.Param(":projectid") - - var req models.SpecRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") + versions, err := h.etl.GetDestinationVersions(h.Ctx.Request.Context(), destType) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to get destination versions: %s", err), err) return } - var specOutput models.SpecOutput - var err error - // TODO: make destinationType consistent. Only use parquet and iceberg. - destinationType := "iceberg" - if req.Type == "s3" { - destinationType = "parquet" - } - version := req.Version + utils.SuccessResponse(&h.Controller, fmt.Sprintf("destination %s versions fetched successfully", destType), versions) +} - // Determine driver and available tags - _, driver, err := utils.GetDriverImageTags(c.Ctx.Request.Context(), "", true) +// @router /project/:projectid/destinations/spec [post] +func (h *Handler) GetDestinationSpec() { + projectID, err := GetProjectIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get valid driver image tags: %s", err)) + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - if c.tempClient != nil { - specOutput, err = c.tempClient.FetchSpec( - c.Ctx.Request.Context(), - destinationType, - driver, - version, - ) + var req dto.SpecRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } + + logger.Debugf("Get destination spec initiated project_id[%s] destination_type[%s] destination_version[%s]", + projectID, req.Type, req.Version) + + resp, err := h.etl.GetDestinationSpec(h.Ctx.Request.Context(), &req) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to get spec: %v", err)) + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get destination spec: %s", err), err) return } - - utils.SuccessResponse(&c.Controller, models.SpecResponse{ - Version: req.Version, - Type: req.Type, - Spec: specOutput.Spec, - }) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("destination %s spec fetched successfully", req.Type), resp) } diff --git a/server/internal/handlers/frontend_handlers.go b/server/internal/handlers/frontend_handlers.go deleted file mode 100644 index 040cd75b..00000000 --- a/server/internal/handlers/frontend_handlers.go +++ /dev/null @@ -1,22 +0,0 @@ -package handlers - -import ( - "net/http" - "path/filepath" - - "github.com/beego/beego/v2/server/web" -) - -type FrontendHandler struct { - web.Controller -} - -func (c *FrontendHandler) Get() { - const indexPath = "/opt/frontend/dist/index.html" - - // Set Content-Type early - c.Ctx.Output.ContentType("text/html") - - // Use built-in file serving for efficiency and proper headers - http.ServeFile(c.Ctx.ResponseWriter, c.Ctx.Request, filepath.Clean(indexPath)) -} diff --git a/server/internal/handlers/handler.go b/server/internal/handlers/handler.go new file mode 100644 index 00000000..1719972a --- /dev/null +++ b/server/internal/handlers/handler.go @@ -0,0 +1,25 @@ +package handlers + +import ( + "github.com/beego/beego/v2/server/web" + services "github.com/datazip-inc/olake-ui/server/internal/services/etl" +) + +type Handler struct { + web.Controller + etl *services.ETLService +} + +// appService holds the singleton service instance injected at startup. +var etl *services.ETLService + +func NewHandler(s *services.ETLService) *Handler { + etl = s + return &Handler{etl: s} +} + +// Prepare runs before each action; Beego constructs a fresh controller per request, +// so we assign the shared AppService here to avoid nil dereferences. +func (h *Handler) Prepare() { + h.etl = etl +} diff --git a/server/internal/handlers/handlers_utils.go b/server/internal/handlers/handlers_utils.go deleted file mode 100644 index 9f80e997..00000000 --- a/server/internal/handlers/handlers_utils.go +++ /dev/null @@ -1,120 +0,0 @@ -package handlers - -import ( - "context" - "fmt" - "net/http" - "strconv" - "time" - - "github.com/beego/beego/v2/server/web" - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/internal/temporal" - "github.com/datazip/olake-frontend/server/utils" - "go.temporal.io/api/workflowservice/v1" -) - -// get id from path -func GetIDFromPath(c *web.Controller) int { - idStr := c.Ctx.Input.Param(":id") - id, err := strconv.Atoi(idStr) - if err != nil { - utils.ErrorResponse(c, http.StatusBadRequest, "Invalid id") - return 0 - } - return id -} - -// setUsernames sets the created and updated usernames if available -func setUsernames(createdBy, updatedBy *string, creator, updater *models.User) { - if creator != nil { - *createdBy = creator.Username - } - if updater != nil { - *updatedBy = updater.Username - } -} - -// buildJobDataItems creates job data items with workflow information -// Returns (jobItems, success). If success is false, an error occurred and the handler should return. -func buildJobDataItems(jobs []*models.Job, err error, projectIDStr, contextType string, tempClient *temporal.Client, controller *web.Controller) ([]models.JobDataItem, bool) { - jobItems := make([]models.JobDataItem, 0) - - if err != nil { - return jobItems, true // No jobs is OK, return empty slice - } - - for _, job := range jobs { - jobInfo := models.JobDataItem{ - Name: job.Name, - ID: job.ID, - Activate: job.Active, - } - - // Set source/destination info based on context - if contextType == "source" && job.DestID != nil { - jobInfo.DestinationName = job.DestID.Name - jobInfo.DestinationType = job.DestID.DestType - } else if contextType == "destination" && job.SourceID != nil { - jobInfo.SourceName = job.SourceID.Name - jobInfo.SourceType = job.SourceID.Type - } - - if !setJobWorkflowInfo(&jobInfo, job.ID, projectIDStr, tempClient, controller) { - return nil, false // Error occurred, signal failure - } - jobItems = append(jobItems, jobInfo) - } - - return jobItems, true -} - -// setJobWorkflowInfo fetches and sets workflow execution information for a job -// Returns false if an error occurred that should stop processing -func setJobWorkflowInfo(jobInfo *models.JobDataItem, jobID int, projectIDStr string, tempClient *temporal.Client, controller *web.Controller) bool { - query := fmt.Sprintf("WorkflowId between 'sync-%s-%d' and 'sync-%s-%d-~'", projectIDStr, jobID, projectIDStr, jobID) - - resp, err := tempClient.ListWorkflow(context.Background(), &workflowservice.ListWorkflowExecutionsRequest{ - Query: query, - PageSize: 1, - }) - - if err != nil { - utils.ErrorResponse(controller, http.StatusInternalServerError, fmt.Sprintf("failed to list workflows: %v", err)) - return false - } - - if len(resp.Executions) > 0 { - jobInfo.LastRunTime = resp.Executions[0].StartTime.AsTime().Format(time.RFC3339) - jobInfo.LastRunState = resp.Executions[0].Status.String() - } else { - jobInfo.LastRunTime = "" - jobInfo.LastRunState = "" - } - return true -} - -func cancelJobWorkflow(tempClient *temporal.Client, job *models.Job, projectID string) error { - query := fmt.Sprintf( - "WorkflowId BETWEEN 'sync-%s-%d' AND 'sync-%s-%d-~' AND ExecutionStatus = 'Running'", - projectID, job.ID, projectID, job.ID, - ) - - resp, err := tempClient.ListWorkflow(context.Background(), &workflowservice.ListWorkflowExecutionsRequest{ - Query: query, - }) - if err != nil { - return fmt.Errorf("list workflows failed: %s", err) - } - if len(resp.Executions) == 0 { - return nil // no running workflows - } - - for _, wfExec := range resp.Executions { - if err := tempClient.CancelWorkflow(context.Background(), - wfExec.Execution.WorkflowId, wfExec.Execution.RunId); err != nil { - return fmt.Errorf("failed to cancel workflow[%s]: %s", wfExec.Execution.WorkflowId, err) - } - } - return nil -} diff --git a/server/internal/handlers/job.go b/server/internal/handlers/job.go index 16a2caf0..1647750b 100644 --- a/server/internal/handlers/job.go +++ b/server/internal/handlers/job.go @@ -1,659 +1,329 @@ package handlers import ( - "context" - "crypto/sha256" "encoding/json" "fmt" "net/http" - "path/filepath" - "strconv" - "time" - - "github.com/beego/beego/v2/core/logs" - "github.com/beego/beego/v2/server/web" - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/database" - "github.com/datazip/olake-frontend/server/internal/docker" - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/internal/telemetry" - "github.com/datazip/olake-frontend/server/internal/temporal" - "github.com/datazip/olake-frontend/server/utils" - "go.temporal.io/api/workflowservice/v1" -) - -type JobHandler struct { - web.Controller - jobORM *database.JobORM - sourceORM *database.SourceORM - destORM *database.DestinationORM - userORM *database.UserORM - tempClient *temporal.Client -} -// Prepare initializes the ORM instances -func (c *JobHandler) Prepare() { - c.jobORM = database.NewJobORM() - c.sourceORM = database.NewSourceORM() - c.destORM = database.NewDestinationORM() - c.userORM = database.NewUserORM() - var err error - c.tempClient, err = temporal.NewClient() - if err != nil { - logs.Error("Failed to create Temporal client: %v", err) - } -} + "github.com/datazip-inc/olake-ui/server/internal/models/dto" + "github.com/datazip-inc/olake-ui/server/utils" + "github.com/datazip-inc/olake-ui/server/utils/logger" +) // @router /project/:projectid/jobs [get] -func (c *JobHandler) GetAllJobs() { - projectIDStr := c.Ctx.Input.Param(":projectid") - // Get jobs with optional filtering - jobs, err := c.jobORM.GetAllByProjectID(projectIDStr) +func (h *Handler) ListJobs() { + projectID, err := GetProjectIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to retrieve jobs by project ID") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Transform to response format - jobResponses := make([]models.JobResponse, 0, len(jobs)) - for _, job := range jobs { - jobResp := models.JobResponse{ - ID: job.ID, - Name: job.Name, - StreamsConfig: job.StreamsConfig, - Frequency: job.Frequency, - CreatedAt: job.CreatedAt.Format(time.RFC3339), - UpdatedAt: job.UpdatedAt.Format(time.RFC3339), - Activate: job.Active, - } - - // Set source and destination details - if job.SourceID != nil { - jobResp.Source = models.JobSourceConfig{ - Name: job.SourceID.Name, - Type: job.SourceID.Type, - Config: job.SourceID.Config, - Version: job.SourceID.Version, - } - } - - if job.DestID != nil { - jobResp.Destination = models.JobDestinationConfig{ - Name: job.DestID.Name, - Type: job.DestID.DestType, - Config: job.DestID.Config, - Version: job.DestID.Version, - } - } - - // Set user details - if job.CreatedBy != nil { - jobResp.CreatedBy = job.CreatedBy.Username - } - if job.UpdatedBy != nil { - jobResp.UpdatedBy = job.UpdatedBy.Username - } - - // Get workflow information if Temporal client is available - if c.tempClient != nil { - query := fmt.Sprintf("WorkflowId between 'sync-%s-%d' and 'sync-%s-%d-~'", projectIDStr, job.ID, projectIDStr, job.ID) - if resp, err := c.tempClient.ListWorkflow(context.Background(), &workflowservice.ListWorkflowExecutionsRequest{ - Query: query, - PageSize: 1, - }); err != nil { - logs.Error("Failed to list workflows: %v", err) - } else if len(resp.Executions) > 0 { - jobResp.LastRunTime = resp.Executions[0].StartTime.AsTime().Format(time.RFC3339) - jobResp.LastRunState = resp.Executions[0].Status.String() - } - } + logger.Debugf("Get all jobs initiated project_id[%s]", projectID) - jobResponses = append(jobResponses, jobResp) + jobs, err := h.etl.ListJobs(h.Ctx.Request.Context(), projectID) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to retrieve jobs by project ID: %s", err), err) + return } - - utils.SuccessResponse(&c.Controller, jobResponses) + utils.SuccessResponse(&h.Controller, "jobs listed successfully", jobs) } // @router /project/:projectid/jobs [post] -func (c *JobHandler) CreateJob() { - // Get project ID from path - projectIDStr := c.Ctx.Input.Param(":projectid") - // Parse request body - var req models.CreateJobRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") - return - } - unique, err := c.jobORM.IsJobNameUnique(projectIDStr, req.Name) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to check job name uniqueness") - return - } - if !unique { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Job name already exists") - return - } - // Find or create source - source, err := c.getOrCreateSource(&req.Source, projectIDStr) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to process source: %s", err)) +func (h *Handler) CreateJob() { + userID := GetUserIDFromSession(&h.Controller) + if userID == nil { + utils.ErrorResponse(&h.Controller, http.StatusUnauthorized, "Not authenticated", fmt.Errorf("not authenticated")) return } - // Find or create destination - dest, err := c.getOrCreateDestination(&req.Destination, projectIDStr) + projectID, err := GetProjectIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to process destination: %s", err)) + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Create job model - job := &models.Job{ - Name: req.Name, - SourceID: source, - DestID: dest, - Active: true, - Frequency: req.Frequency, - StreamsConfig: req.StreamsConfig, - State: "{}", - ProjectID: projectIDStr, + var req dto.CreateJobRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } - // Get user information from session - userID := c.GetSession(constants.SessionUserID) - if userID != nil { - user := &models.User{ID: userID.(int)} - job.CreatedBy = user - job.UpdatedBy = user + // Conditional validation + if req.Source.ID == nil { + if err := dto.ValidateSourceType(req.Source.Type); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return + } + if req.Source.Name == "" || req.Source.Version == "" || req.Source.Config == "" { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, "source name, version, and config are required when source id is not provided", err) + return + } } - if err := c.jobORM.Create(job); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to create job: %s", err)) - return + if req.Destination.ID == nil { + if err := dto.ValidateDestinationType(req.Destination.Type); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return + } + if req.Destination.Name == "" || req.Destination.Version == "" || req.Destination.Config == "" { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, "destination name, version, and config are required when destination id is not provided", err) + return + } } - // telemetry events - telemetry.TrackJobCreation(context.Background(), job) + logger.Debugf("Create job initiated project_id[%s] job_name[%s] user_id[%v]", projectID, req.Name, userID) - if c.tempClient != nil { - fmt.Println("Using Temporal workflow for sync job") - _, err = c.tempClient.ManageSync( - c.Ctx.Request.Context(), - job.ProjectID, - job.ID, - job.Frequency, - temporal.ActionCreate, - ) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Temporal workflow execution failed for create job schedule: %s", err)) - } + if err := h.etl.CreateJob(h.Ctx.Request.Context(), &req, projectID, userID); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to create job: %s", err), err) + return } - - utils.SuccessResponse(&c.Controller, req) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("job '%s' created successfully", req.Name), nil) } // @router /project/:projectid/jobs/:id [put] -func (c *JobHandler) UpdateJob() { - // Get project ID and job ID from path - projectIDStr := c.Ctx.Input.Param(":projectid") - id := GetIDFromPath(&c.Controller) - - // Parse request body - var req models.UpdateJobRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") +func (h *Handler) UpdateJob() { + userID := GetUserIDFromSession(&h.Controller) + if userID == nil { + utils.ErrorResponse(&h.Controller, http.StatusUnauthorized, "Not authenticated", fmt.Errorf("not authenticated")) return } - // Get existing job - existingJob, err := c.jobORM.GetByID(id, true) + projectID, err := GetProjectIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Job not found") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Find or create source - source, err := c.getOrCreateSource(&req.Source, projectIDStr) + jobID, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to process source: %s", err)) + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Find or create destination - dest, err := c.getOrCreateDestination(&req.Destination, projectIDStr) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to process destination: %s", err)) + var req dto.UpdateJobRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Update fields - existingJob.Name = req.Name - existingJob.SourceID = source - existingJob.DestID = dest - existingJob.Active = req.Activate - existingJob.Frequency = req.Frequency - existingJob.StreamsConfig = req.StreamsConfig - existingJob.UpdatedAt = time.Now() - existingJob.ProjectID = projectIDStr - - // Update user information - userID := c.GetSession(constants.SessionUserID) - if userID != nil { - user := &models.User{ID: userID.(int)} - existingJob.UpdatedBy = user - } - - // cancel existing workflow - err = cancelJobWorkflow(c.tempClient, existingJob, projectIDStr) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to cancel workflow for job %s", err)) - return - } - // Update job in database - if err := c.jobORM.Update(existingJob); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to update job") - return + if req.Source.ID == nil { + if err := dto.ValidateSourceType(req.Source.Type); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return + } + if req.Source.Name == "" || req.Source.Version == "" || req.Source.Config == "" { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, "source name, version, and config are required when source id is not provided", err) + return + } } - - // Track sources and destinations status after job update - telemetry.TrackJobEntity(context.Background()) - - if c.tempClient != nil { - logs.Info("Using Temporal workflow for sync job") - _, err = c.tempClient.ManageSync( - c.Ctx.Request.Context(), - existingJob.ProjectID, - existingJob.ID, - existingJob.Frequency, - temporal.ActionUpdate, - ) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Temporal workflow execution failed for update job schedule: %s", err)) + if req.Destination.ID == nil { + if err := dto.ValidateDestinationType(req.Destination.Type); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return + } + if req.Destination.Name == "" || req.Destination.Version == "" || req.Destination.Config == "" { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, "destination name, version, and config are required when destination id is not provided", err) return } } - utils.SuccessResponse(&c.Controller, req) -} + logger.Debugf("Update job initiated project_id[%s] job_id[%d] job_name[%s] user_id[%v]", projectID, jobID, req.Name, userID) -// @router /project/:projectid/jobs/:id [delete] -func (c *JobHandler) DeleteJob() { - idStr := c.Ctx.Input.Param(":id") - id, err := strconv.Atoi(idStr) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid job ID") + if err := h.etl.UpdateJob(h.Ctx.Request.Context(), &req, projectID, jobID, userID); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to update job: %s", err), err) return } + utils.SuccessResponse(&h.Controller, fmt.Sprintf("job '%s' updated successfully", req.Name), nil) +} - // Get job name for response - job, err := c.jobORM.GetByID(id, true) +// @router /project/:projectid/jobs/:id [delete] +func (h *Handler) DeleteJob() { + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Job not found") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // cancel existing workflow - err = cancelJobWorkflow(c.tempClient, job, job.ProjectID) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to cancel workflow for job %s", err)) - return - } - jobName := job.Name - if c.tempClient != nil { - logs.Info("Using Temporal workflow for delete job schedule") - _, err = c.tempClient.ManageSync( - c.Ctx.Request.Context(), - job.ProjectID, - job.ID, - job.Frequency, - temporal.ActionDelete, - ) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Temporal workflow execution failed for delete job schedule: %s", err)) - return - } - } - // Delete job - if err := c.jobORM.Delete(id); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to delete job") + logger.Infof("Delete job initiated job_id[%d]", id) + + jobName, err := h.etl.DeleteJob(h.Ctx.Request.Context(), id) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to delete job: %s", err), err) return } - - // Track sources and destinations status after job deletion - telemetry.TrackJobEntity(context.Background()) - - utils.SuccessResponse(&c.Controller, models.DeleteDestinationResponse{ - Name: jobName, - }) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("job '%s' deleted successfully", jobName), nil) } // @router /project/:projectid/jobs/check-unique [post] -func (c *JobHandler) CheckUniqueJobName() { - projectIDStr := c.Ctx.Input.Param(":projectid") - var req models.CheckUniqueJobNameRequest - - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") +func (h *Handler) CheckUniqueJobName() { + projectID, err := GetProjectIDFromPath(&h.Controller) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - if req.JobName == "" { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Job name is required") + + var req dto.CheckUniqueJobNameRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - unique, err := c.jobORM.IsJobNameUnique(projectIDStr, req.JobName) + + logger.Infof("Check unique job name initiated project_id[%s] job_name[%s]", projectID, req.JobName) + + unique, err := h.etl.CheckUniqueJobName(h.Ctx.Request.Context(), projectID, req) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to check job name uniqueness") + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to check job name uniqueness: %s", err), err) return } - utils.SuccessResponse(&c.Controller, models.CheckUniqueJobNameResponse{ - Unique: unique, - }) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("job name '%s' uniqueness checked successfully", req.JobName), dto.CheckUniqueJobNameResponse{Unique: unique}) } // @router /project/:projectid/jobs/:id/sync [post] -func (c *JobHandler) SyncJob() { - idStr := c.Ctx.Input.Param(":id") - id, err := strconv.Atoi(idStr) +func (h *Handler) SyncJob() { + projectID, err := GetProjectIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid job ID") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Check if job exists - job, err := c.jobORM.GetByID(id, true) + + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Job not found") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Validate source and destination exist - if job.SourceID == nil || job.DestID == nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Job must have both source and destination configured") - return - } + logger.Infof("Sync trigger initiated for project_id[%s] job_id[%d]", projectID, id) - if c.tempClient != nil { - logs.Info("Using Temporal workflow for sync job") - _, err = c.tempClient.ManageSync( - c.Ctx.Request.Context(), - job.ProjectID, - job.ID, - job.Frequency, - temporal.ActionTrigger, - ) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Temporal workflow execution failed for sync job: %s", err)) - return - } + result, err := h.etl.SyncJob(h.Ctx.Request.Context(), projectID, id) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to trigger sync: %s", err), err) + return } - utils.SuccessResponse(&c.Controller, nil) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("sync triggered successfully for job_id[%d]", id), result) } -// @router /project/:projectid/jobs/:id/activate [post] -func (c *JobHandler) ActivateJob() { - id := GetIDFromPath(&c.Controller) - - // Parse request body - var req models.JobStatus - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") +// @router /project/:projectid/jobs/:id/activate [put] +func (h *Handler) ActivateJob() { + userID := GetUserIDFromSession(&h.Controller) + if userID == nil { + utils.ErrorResponse(&h.Controller, http.StatusUnauthorized, "Not authenticated", fmt.Errorf("not authenticated")) return } - // Get existing job - job, err := c.jobORM.GetByID(id, true) + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Job not found") - return - } - action := temporal.ActionUnpause - if !req.Activate { - action = temporal.ActionPause - } - if c.tempClient != nil { - logs.Info("Using Temporal workflow for activate job schedule") - _, err = c.tempClient.ManageSync( - c.Ctx.Request.Context(), - job.ProjectID, - job.ID, - job.Frequency, - action, - ) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Temporal workflow execution failed for activate job schedule: %s", err)) - return - } + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } - // Update activation status - job.Active = req.Activate - job.UpdatedAt = time.Now() - // Update user information - userID := c.GetSession(constants.SessionUserID) - if userID != nil { - user := &models.User{ID: userID.(int)} - job.UpdatedBy = user + var req dto.JobStatusRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } - // Update job in database - if err := c.jobORM.Update(job); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to update job activation status") + logger.Debugf("Activate job initiated job_id[%d] user_id[%v]", id, userID) + + if err := h.etl.ActivateJob(h.Ctx.Request.Context(), id, req, userID); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to activate job: %s", err), err) return } - - utils.SuccessResponse(&c.Controller, req) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("job %d %s successfully", id, utils.Ternary(req.Activate, "resumed", "paused")), nil) } -// @router /project/:projectid/jobs/:id/cancel [get] -func (c *JobHandler) CancelJobRun() { - // Parse inputs - idStr := c.Ctx.Input.Param(":id") - id, err := strconv.Atoi(idStr) +// @router /project/:projectid/jobs/:id/cancel [post] +func (h *Handler) CancelJobRun() { + projectID, err := GetProjectIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid job ID") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - projectID := c.Ctx.Input.Param(":projectid") - // Ensure job exists - job, err := c.jobORM.GetByID(id, true) + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, fmt.Sprintf("Job not found: %v", err)) + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - if err := cancelJobWorkflow(c.tempClient, job, projectID); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("job workflow cancel failed: %v", err)) + logger.Infof("Cancel job run initiated project_id[%s] job_id[%d]", projectID, id) + + if err := h.etl.CancelJobRun(h.Ctx.Request.Context(), projectID, id); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to cancel job run: %s", err), err) return } - - utils.SuccessResponse(&c.Controller, map[string]any{ - "message": "Job Cancellation initiated. Completion may take up to a minute", - }) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("job workflow cancel requested successfully for job_id[%d]", id), nil) } // @router /project/:projectid/jobs/:id/tasks [get] -func (c *JobHandler) GetJobTasks() { - idStr := c.Ctx.Input.Param(":id") - id, err := strconv.Atoi(idStr) +func (h *Handler) GetJobTasks() { + projectID, err := GetProjectIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid job ID") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - projectIDStr := c.Ctx.Input.Param(":projectid") - // Get job to verify it exists - job, err := c.jobORM.GetByID(id, true) + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Job not found") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - var tasks []models.JobTask - // Construct a query for workflows related to this project and job - query := fmt.Sprintf("WorkflowId between 'sync-%s-%d' and 'sync-%s-%d-~'", projectIDStr, job.ID, projectIDStr, job.ID) - // List workflows using the direct query - resp, err := c.tempClient.ListWorkflow(context.Background(), &workflowservice.ListWorkflowExecutionsRequest{ - Query: query, - }) + + logger.Debugf("Get job tasks initiated project_id[%s] job_id[%d]", projectID, id) + + tasks, err := h.etl.GetJobTasks(h.Ctx.Request.Context(), projectID, id) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to list workflows: %v", err)) + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get job tasks: %s", err), err) return } - for _, execution := range resp.Executions { - startTime := execution.StartTime.AsTime().UTC() - var runTime string - if execution.CloseTime != nil { - runTime = execution.CloseTime.AsTime().UTC().Sub(startTime).Round(time.Second).String() - } else { - runTime = time.Since(startTime).Round(time.Second).String() - } - tasks = append(tasks, models.JobTask{ - Runtime: runTime, - StartTime: startTime.Format(time.RFC3339), - Status: execution.Status.String(), - FilePath: execution.Execution.WorkflowId, - }) - } - - utils.SuccessResponse(&c.Controller, tasks) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("job tasks listed successfully for job_id[%d]", id), tasks) } -// @router /project/:projectid/jobs/:id/tasks/:taskid/logs [post] -func (c *JobHandler) GetTaskLogs() { - idStr := c.Ctx.Input.Param(":id") - id, err := strconv.Atoi(idStr) +// @router /project/:projectid/jobs/:id/logs [get] +func (h *Handler) GetTaskLogs() { + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid job ID") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Parse request body - var req struct { - FilePath string `json:"file_path"` - } - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") + var req dto.JobTaskRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Verify job exists - _, err = c.jobORM.GetByID(id, true) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Job not found") - return - } - syncFolderName := fmt.Sprintf("%x", sha256.Sum256([]byte(req.FilePath))) - // Read the log file + logger.Debugf("Get task logs initiated job_id[%d] file_path[%s]", id, req.FilePath) - // Get home directory - homeDir := docker.GetDefaultConfigDir() - mainSyncDir := filepath.Join(homeDir, syncFolderName) - logs, err := utils.ReadLogs(mainSyncDir) + logs, err := h.etl.GetTaskLogs(h.Ctx.Request.Context(), id, req.FilePath) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, err.Error()) + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get task logs: %s", err), err) return } - - utils.SuccessResponse(&c.Controller, logs) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("task logs retrieved successfully for job_id[%d]", id), logs) } -// Helper methods - -// getOrCreateSource finds or creates a source based on the provided config -func (c *JobHandler) getOrCreateSource(config *models.JobSourceConfig, projectIDStr string) (*models.Source, error) { - // Try to find an existing source matching the criteria - sources, err := c.sourceORM.GetByNameAndType(config.Name, config.Type, projectIDStr) - if err == nil && len(sources) > 0 { - // Update the existing source if found - source := sources[0] - source.Config = config.Config - source.Version = config.Version - - // Get user info for update - userID := c.GetSession(constants.SessionUserID) - if userID != nil { - user := &models.User{ID: userID.(int)} - source.UpdatedBy = user - } - - if err := c.sourceORM.Update(source); err != nil { - return nil, fmt.Errorf("failed to update source: %s", err) - } - - return source, nil - } - - // Create a new source if not found - source := &models.Source{ - Name: config.Name, - Type: config.Type, - Config: config.Config, - Version: config.Version, - ProjectID: projectIDStr, - } - - // Set user info - userID := c.GetSession(constants.SessionUserID) - if userID != nil { - user := &models.User{ID: userID.(int)} - source.CreatedBy = user - source.UpdatedBy = user - } - - if err := c.sourceORM.Create(source); err != nil { - return nil, fmt.Errorf("failed to create source: %s", err) - } - - telemetry.TrackSourceCreation(context.Background(), source) - - return source, nil -} - -// getOrCreateDestination finds or creates a destination based on the provided config -func (c *JobHandler) getOrCreateDestination(config *models.JobDestinationConfig, projectIDStr string) (*models.Destination, error) { - // Try to find an existing destination matching the criteria - destinations, err := c.destORM.GetByNameAndType(config.Name, config.Type, projectIDStr) - if err == nil && len(destinations) > 0 { - // Update the existing destination if found - dest := destinations[0] - dest.Config = config.Config - dest.Version = config.Version - - // Get user info for update - userID := c.GetSession(constants.SessionUserID) - if userID != nil { - user := &models.User{ID: userID.(int)} - dest.UpdatedBy = user - } - - if err := c.destORM.Update(dest); err != nil { - return nil, fmt.Errorf("failed to update destination: %s", err) - } - - return dest, nil +// @router /internal/worker/callback/sync-telemetry [post] +func (h *Handler) UpdateSyncTelemetry() { + var req struct { + JobID int `json:"job_id"` + WorkflowID string `json:"workflow_id"` + Event string `json:"event"` } - // Create a new destination if not found - dest := &models.Destination{ - Name: config.Name, - DestType: config.Type, - Config: config.Config, - Version: config.Version, - ProjectID: projectIDStr, + if err := json.Unmarshal(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, "Invalid request format", err) + return } - // Set user info - userID := c.GetSession(constants.SessionUserID) - if userID != nil { - user := &models.User{ID: userID.(int)} - dest.CreatedBy = user - dest.UpdatedBy = user + if req.JobID == 0 || req.WorkflowID == "" { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, "job_id and workflow_id are required", nil) + return } - if err := c.destORM.Create(dest); err != nil { - return nil, fmt.Errorf("failed to create destination: %s", err) + if err := h.etl.UpdateSyncTelemetry(h.Ctx.Request.Context(), req.JobID, req.WorkflowID, req.Event); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, "Failed to update sync telemetry", err) + return } - // Track destination creation event - telemetry.TrackDestinationCreation(context.Background(), dest) - return dest, nil + utils.SuccessResponse(&h.Controller, fmt.Sprintf("sync telemetry updated successfully for job_id[%d] workflow_id[%s] event[%s]", req.JobID, req.WorkflowID, req.Event), nil) } diff --git a/server/internal/handlers/auth_middleware.go b/server/internal/handlers/middleware/auth.go similarity index 71% rename from server/internal/handlers/auth_middleware.go rename to server/internal/handlers/middleware/auth.go index 49f830ce..9adaf8b6 100644 --- a/server/internal/handlers/auth_middleware.go +++ b/server/internal/handlers/middleware/auth.go @@ -1,11 +1,11 @@ -package handlers +package middleware import ( "github.com/beego/beego/v2/server/web" "github.com/beego/beego/v2/server/web/context" - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models/dto" ) // middleware only works if session is enabled @@ -14,7 +14,7 @@ func AuthMiddleware(ctx *context.Context) { if userID := ctx.Input.Session(constants.SessionUserID); userID == nil { // Send unauthorized response ctx.Output.SetStatus(401) - _ = ctx.Output.JSON(models.JSONResponse{ + _ = ctx.Output.JSON(dto.JSONResponse{ Message: "Unauthorized, try login again", Success: false, }, false, false) diff --git a/server/internal/handlers/source.go b/server/internal/handlers/source.go index a8b4192d..c96a5510 100644 --- a/server/internal/handlers/source.go +++ b/server/internal/handlers/source.go @@ -1,358 +1,260 @@ package handlers import ( - "context" - "encoding/json" + "errors" "fmt" "net/http" - "path/filepath" - "time" - - "github.com/beego/beego/v2/core/logs" - "github.com/beego/beego/v2/server/web" - - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/database" - "github.com/datazip/olake-frontend/server/internal/docker" - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/internal/telemetry" - "github.com/datazip/olake-frontend/server/internal/temporal" - "github.com/datazip/olake-frontend/server/utils" -) - -type SourceHandler struct { - web.Controller - sourceORM *database.SourceORM - userORM *database.UserORM - jobORM *database.JobORM - tempClient *temporal.Client -} -func (c *SourceHandler) Prepare() { - c.sourceORM = database.NewSourceORM() - c.userORM = database.NewUserORM() - c.jobORM = database.NewJobORM() - - // Initialize Temporal client - var err error - c.tempClient, err = temporal.NewClient() - if err != nil { - logs.Error("Failed to create Temporal client: %v", err) - } -} + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models/dto" + "github.com/datazip-inc/olake-ui/server/utils" + "github.com/datazip-inc/olake-ui/server/utils/logger" +) // @router /project/:projectid/sources [get] -func (c *SourceHandler) GetAllSources() { - sources, err := c.sourceORM.GetAll() +func (h *Handler) ListSources() { + projectID, err := GetProjectIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to retrieve sources") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - projectIDStr := c.Ctx.Input.Param(":projectid") - sourceItems := make([]models.SourceDataItem, 0, len(sources)) - - for _, source := range sources { - item := models.SourceDataItem{ - ID: source.ID, - Name: source.Name, - Type: source.Type, - Version: source.Version, - Config: source.Config, - CreatedAt: source.CreatedAt.Format(time.RFC3339), - UpdatedAt: source.UpdatedAt.Format(time.RFC3339), - } - - setUsernames(&item.CreatedBy, &item.UpdatedBy, source.CreatedBy, source.UpdatedBy) + logger.Debugf("Get all sources initiated project_id[%s]", projectID) - jobs, err := c.jobORM.GetBySourceID(source.ID) - var success bool - item.Jobs, success = buildJobDataItems(jobs, err, projectIDStr, "source", c.tempClient, &c.Controller) - if !success { - return // Error occurred in buildJobDataItems - } - - sourceItems = append(sourceItems, item) + sources, err := h.etl.ListSources(h.Ctx.Request.Context(), projectID) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to retrieve sources: %s", err), err) + return } - - utils.SuccessResponse(&c.Controller, sourceItems) + utils.SuccessResponse(&h.Controller, "sources listed successfully", sources) } // @router /project/:projectid/sources [post] -func (c *SourceHandler) CreateSource() { - var req models.CreateSourceRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") +func (h *Handler) CreateSource() { + userID := GetUserIDFromSession(&h.Controller) + if userID == nil { + utils.ErrorResponse(&h.Controller, http.StatusUnauthorized, "Not authenticated", errors.New("not authenticated")) return } - // Convert request to Source model - source := &models.Source{ - Name: req.Name, - Type: req.Type, - Version: req.Version, - Config: req.Config, + projectID, err := GetProjectIDFromPath(&h.Controller) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } - // Get project ID if needed - source.ProjectID = c.Ctx.Input.Param(":projectid") - - // Set created by if user is logged in - userID := c.GetSession(constants.SessionUserID) - if userID != nil { - user, err := c.userORM.GetByID(userID.(int)) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to get user") - return - } - source.CreatedBy = user - source.UpdatedBy = user + var req dto.CreateSourceRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } - if err := c.sourceORM.Create(source); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to create source: %s", err)) + + if err := dto.ValidateSourceType(req.Type); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Track source creation event - telemetry.TrackSourceCreation(context.Background(), source) + logger.Debugf("Create source initiated project_id[%s] source_type[%s] source_name[%s] user_id[%v]", + projectID, req.Type, req.Name, userID) + + if err := h.etl.CreateSource(h.Ctx.Request.Context(), &req, projectID, userID); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to create source: %s", err), err) + return + } - utils.SuccessResponse(&c.Controller, req) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("source %s created successfully", req.Name), req) } // @router /project/:projectid/sources/:id [put] -func (c *SourceHandler) UpdateSource() { - id := GetIDFromPath(&c.Controller) - projectID := c.Ctx.Input.Param(":projectid") - var req models.UpdateSourceRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") +func (h *Handler) UpdateSource() { + userID := GetUserIDFromSession(&h.Controller) + if userID == nil { + utils.ErrorResponse(&h.Controller, http.StatusUnauthorized, "Not authenticated", errors.New("not authenticated")) return } - // Get existing source - existingSource, err := c.sourceORM.GetByID(id) + + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Source not found") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Update fields - existingSource.Name = req.Name - existingSource.Config = req.Config - existingSource.Type = req.Type - existingSource.Version = req.Version - existingSource.UpdatedAt = time.Now() - - userID := c.GetSession(constants.SessionUserID) - if userID != nil { - user := &models.User{ID: userID.(int)} - existingSource.UpdatedBy = user + projectID, err := GetProjectIDFromPath(&h.Controller) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } - // Find jobs linked to this source - jobs, err := c.jobORM.GetBySourceID(existingSource.ID) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to fetch jobs for source %s", err)) + var req dto.UpdateSourceRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Cancel workflows for those jobs - for _, job := range jobs { - err := cancelJobWorkflow(c.tempClient, job, projectID) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to cancel workflow for job %s", err)) - return - } + if err := dto.ValidateSourceType(req.Type); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return } - // Persist update - if err := c.sourceORM.Update(existingSource); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to update source %s", err)) + logger.Debugf("Update source initiated project_id[%s] source_id[%d] source_type[%s] user_id[%v]", + projectID, id, req.Type, userID) + + if err := h.etl.UpdateSource(h.Ctx.Request.Context(), projectID, id, &req, userID); err != nil { + status := http.StatusInternalServerError + if errors.Is(err, constants.ErrSourceNotFound) { + status = http.StatusNotFound + } + utils.ErrorResponse(&h.Controller, status, fmt.Sprintf("failed to update source: %s", err), err) return } - // Track sources status after update - telemetry.TrackSourcesStatus(context.Background()) - utils.SuccessResponse(&c.Controller, req) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("source %s updated successfully", req.Name), req) } // @router /project/:projectid/sources/:id [delete] -func (c *SourceHandler) DeleteSource() { - id := GetIDFromPath(&c.Controller) - source, err := c.sourceORM.GetByID(id) +func (h *Handler) DeleteSource() { + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Source not found") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Get all jobs using this source - jobs, err := c.jobORM.GetBySourceID(id) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to get jobs for source") - return - } + logger.Debugf("Delete source initiated source_id[%d]", id) - // Deactivate all jobs using this source - for _, job := range jobs { - job.Active = false - if err := c.jobORM.Update(job); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to deactivate jobs using this source") - return + resp, err := h.etl.DeleteSource(h.Ctx.Request.Context(), id) + if err != nil { + if errors.Is(err, constants.ErrSourceNotFound) { + utils.ErrorResponse(&h.Controller, http.StatusNotFound, fmt.Sprintf("source not found: %s", err), err) + } else { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to delete source: %s", err), err) } - } - - // Delete the source - if err := c.sourceORM.Delete(id); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to delete source") return } - - telemetry.TrackSourcesStatus(context.Background()) - utils.SuccessResponse(&c.Controller, &models.DeleteSourceResponse{ - Name: source.Name, - }) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("source %s deleted successfully", resp.Name), resp) } // @router /project/:projectid/sources/test [post] -func (c *SourceHandler) TestConnection() { - var req models.SourceTestConnectionRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") +func (h *Handler) TestSourceConnection() { + var req dto.SourceTestConnectionRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - encryptedConfig, err := utils.Encrypt(req.Config) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to encrypt config") + + if err := dto.ValidateSourceType(req.Type); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - workflowID := fmt.Sprintf("test-connection-%s-%d", req.Type, time.Now().Unix()) - result, err := c.tempClient.TestConnection(context.Background(), workflowID, "config", req.Type, req.Version, encryptedConfig) - if result == nil { - result = map[string]interface{}{ - "message": err.Error(), - "status": "failed", - } - } - homeDir := docker.GetDefaultConfigDir() - mainLogDir := filepath.Join(homeDir, workflowID) - logs, err := utils.ReadLogs(mainLogDir) + + logger.Infof("Test source connection initiated source_type[%s] source_version[%s]", req.Type, req.Version) + + result, logs, err := h.etl.TestSourceConnection(h.Ctx.Request.Context(), &req) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to read logs: %s", err)) + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to verify credentials: %s", err), err) return } - utils.SuccessResponse(&c.Controller, models.TestConnectionResponse{ + + utils.SuccessResponse(&h.Controller, fmt.Sprintf("source %s connection tested successfully", req.Type), dto.TestConnectionResponse{ ConnectionResult: result, Logs: logs, }) } -// @router /sources/streams[post] -func (c *SourceHandler) GetSourceCatalog() { - var req models.StreamsRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") +// @router /sources/streams [post] +func (h *Handler) GetSourceCatalog() { + var req dto.StreamsRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - oldStreams := "" - // Load job details if JobID is provided - if req.JobID >= 0 { - job, err := c.jobORM.GetByID(req.JobID, true) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Job not found") - return - } - oldStreams = job.StreamsConfig - } - encryptedConfig, err := utils.Encrypt(req.Config) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to encrypt config") + + if err := dto.ValidateSourceType(req.Type); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Use Temporal client to get the catalog - var newStreams map[string]interface{} - if c.tempClient != nil { - newStreams, err = c.tempClient.GetCatalog( - c.Ctx.Request.Context(), - req.Type, - req.Version, - encryptedConfig, - oldStreams, - req.JobName, - ) - } + + logger.Debugf("Get source catalog initiated source_type[%s] source_version[%s] job_id[%d]", + req.Type, req.Version, req.JobID) + + catalog, err := h.etl.GetSourceCatalog(h.Ctx.Request.Context(), &req) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to get catalog: %v", err)) + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get source streams: %s", err), err) return } - utils.SuccessResponse(&c.Controller, newStreams) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("source %s catalog fetched successfully", req.Type), catalog) } // @router /sources/:id/jobs [get] -func (c *SourceHandler) GetSourceJobs() { - id := GetIDFromPath(&c.Controller) - // Check if source exists - _, err := c.sourceORM.GetByID(id) +func (h *Handler) GetSourceJobs() { + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "Source not found") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Create a job ORM and get jobs by source ID - jobs, err := c.jobORM.GetBySourceID(id) + logger.Debugf("Get source jobs initiated source_id[%d]", id) + + jobs, err := h.etl.GetSourceJobs(h.Ctx.Request.Context(), id) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to get jobs by source ID") + if errors.Is(err, constants.ErrSourceNotFound) { + utils.ErrorResponse(&h.Controller, http.StatusNotFound, fmt.Sprintf("source not found: %s", err), err) + } else { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get source jobs: %s", err), err) + } return } - // Format as required by API contract - utils.SuccessResponse(&c.Controller, map[string]interface{}{ - "jobs": jobs, - }) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("source %d jobs listed successfully", id), map[string]interface{}{"jobs": jobs}) } // @router /project/:projectid/sources/versions [get] -func (c *SourceHandler) GetSourceVersions() { - sourceType := c.GetString("type") - if sourceType == "" { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "source type is required") +func (h *Handler) GetSourceVersions() { + projectID, err := GetProjectIDFromPath(&h.Controller) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - versions, _, err := utils.GetDriverImageTags(c.Ctx.Request.Context(), fmt.Sprintf("olakego/source-%s", sourceType), true) + sourceType := h.GetString("type") + logger.Debugf("Get source versions initiated project_id[%s] source_type[%s]", projectID, sourceType) + if sourceType == "" { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to get source versions: %s", err), err) + return + } + versions, err := h.etl.GetSourceVersions(h.Ctx.Request.Context(), sourceType) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to fetch driver versions: %s", err)) + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get source versions: %s", err), err) return } - - utils.SuccessResponse(&c.Controller, map[string]interface{}{ - "version": versions, - }) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("source %s versions fetched successfully", sourceType), versions) } -// @router /project/:projectid/sources/spec [get] -func (c *SourceHandler) GetProjectSourceSpec() { - _ = c.Ctx.Input.Param(":projectid") +// @router /project/:projectid/sources/spec [post] +func (h *Handler) GetSourceSpec() { + projectID, err := GetProjectIDFromPath(&h.Controller) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return + } - var req models.SpecRequest - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") + var req dto.SpecRequest + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - var specOutput models.SpecOutput - var err error - - specOutput, err = c.tempClient.FetchSpec( - c.Ctx.Request.Context(), - "", - req.Type, - req.Version, - ) + + if err := dto.ValidateSourceType(req.Type); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return + } + + logger.Debugf("Get source spec initiated project_id[%s] source_type[%s] source_version[%s]", + projectID, req.Type, req.Version) + + resp, err := h.etl.GetSourceSpec(h.Ctx.Request.Context(), &req) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to get spec: %v", err)) + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get source spec: %s", err), err) return } - utils.SuccessResponse(&c.Controller, models.SpecResponse{ - Version: req.Version, - Type: req.Type, - Spec: specOutput.Spec, - }) + utils.SuccessResponse(&h.Controller, fmt.Sprintf("source %s spec fetched successfully", req.Type), resp) } diff --git a/server/internal/handlers/ui.go b/server/internal/handlers/ui.go new file mode 100644 index 00000000..8eb2246b --- /dev/null +++ b/server/internal/handlers/ui.go @@ -0,0 +1,19 @@ +package handlers + +import ( + "net/http" + "path/filepath" + + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/spf13/viper" +) + +func (h *Handler) ServeFrontend() { + indexPath := viper.GetString(constants.FrontendIndexPath) + + // Set Content-Type early + h.Ctx.Output.ContentType("text/html") + + // Use built-in file serving for efficiency and proper headers + http.ServeFile(h.Ctx.ResponseWriter, h.Ctx.Request, filepath.Clean(indexPath)) +} diff --git a/server/internal/handlers/user.go b/server/internal/handlers/user.go index b56401da..be9ade83 100644 --- a/server/internal/handlers/user.go +++ b/server/internal/handlers/user.go @@ -1,103 +1,90 @@ package handlers import ( - "encoding/json" + "errors" "fmt" "net/http" - "strconv" - "time" - "github.com/beego/beego/v2/server/web" - - "github.com/datazip/olake-frontend/server/internal/database" - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/utils" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/utils" + "github.com/datazip-inc/olake-ui/server/utils/logger" ) -type UserHandler struct { - web.Controller - userORM *database.UserORM -} - -func (c *UserHandler) Prepare() { - c.userORM = database.NewUserORM() -} - // @router /users [post] -func (c *UserHandler) CreateUser() { +func (h *Handler) CreateUser() { var req models.User - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) + return + } + + if req.Username == "" || req.Email == "" || req.Password == "" { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", errors.New("missing required user fields")), errors.New("missing required user fields")) return } - if err := c.userORM.Create(&req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, fmt.Sprintf("Failed to create user: %s", err)) + logger.Infof("Create user initiated username[%s] email[%s]", req.Username, req.Email) + + if err := h.etl.CreateUser(h.Ctx.Request.Context(), &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to create user: %s", err), err) return } - utils.SuccessResponse(&c.Controller, req) + utils.SuccessResponse(&h.Controller, "user created successfully", req) } // @router /users [get] -func (c *UserHandler) GetAllUsers() { - users, err := c.userORM.GetAll() +func (h *Handler) GetAllUsers() { + logger.Info("Get all users initiated") + + users, err := h.etl.GetAllUsers(h.Ctx.Request.Context()) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to retrieve users") + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to get users: %s", err), err) return } - utils.SuccessResponse(&c.Controller, users) + utils.SuccessResponse(&h.Controller, "users listed successfully", users) } // @router /users/:id [put] -func (c *UserHandler) UpdateUser() { - idStr := c.Ctx.Input.Param(":id") - id, err := strconv.Atoi(idStr) +func (h *Handler) UpdateUser() { + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid user ID") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } var req models.User - if err := json.Unmarshal(c.Ctx.Input.RequestBody, &req); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid request format") - return - } - - // Get existing user - existingUser, err := c.userORM.GetByID(id) - if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusNotFound, "User not found") + if err := UnmarshalAndValidate(h.Ctx.Input.RequestBody, &req); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - // Update fields - existingUser.Username = req.Username - existingUser.Email = req.Email - existingUser.UpdatedAt = time.Now() + logger.Infof("Update user initiated user_id[%d] username[%s]", id, req.Username) - if err := c.userORM.Update(existingUser); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to update user") + updatedUser, err := h.etl.UpdateUser(h.Ctx.Request.Context(), id, &req) + if err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to update user: %s", err), err) return } - utils.SuccessResponse(&c.Controller, existingUser) + utils.SuccessResponse(&h.Controller, "user updated successfully", updatedUser) } // @router /users/:id [delete] -func (c *UserHandler) DeleteUser() { - idStr := c.Ctx.Input.Param(":id") - id, err := strconv.Atoi(idStr) +func (h *Handler) DeleteUser() { + id, err := GetIDFromPath(&h.Controller) if err != nil { - utils.ErrorResponse(&c.Controller, http.StatusBadRequest, "Invalid user ID") + utils.ErrorResponse(&h.Controller, http.StatusBadRequest, fmt.Sprintf("failed to validate request: %s", err), err) return } - if err := c.userORM.Delete(id); err != nil { - utils.ErrorResponse(&c.Controller, http.StatusInternalServerError, "Failed to delete user") + logger.Infof("Delete user initiated user_id[%d]", id) + + if err := h.etl.DeleteUser(h.Ctx.Request.Context(), id); err != nil { + utils.ErrorResponse(&h.Controller, http.StatusInternalServerError, fmt.Sprintf("failed to delete user: %s", err), err) return } - c.Ctx.ResponseWriter.WriteHeader(http.StatusNoContent) + utils.SuccessResponse(&h.Controller, "user deleted successfully", nil) } diff --git a/server/internal/handlers/utils.go b/server/internal/handlers/utils.go new file mode 100644 index 00000000..ea4fdb46 --- /dev/null +++ b/server/internal/handlers/utils.go @@ -0,0 +1,49 @@ +package handlers + +import ( + "encoding/json" + "fmt" + "strconv" + + "github.com/beego/beego/v2/server/web" + + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models/dto" +) + +// get id from path +func GetIDFromPath(c *web.Controller) (int, error) { + idStr := c.Ctx.Input.Param(":id") + id, err := strconv.Atoi(idStr) + if err != nil { + return 0, fmt.Errorf("invalid id: %s", err) + } + return id, nil +} + +// get id from path +func GetProjectIDFromPath(c *web.Controller) (string, error) { + projectID := c.Ctx.Input.Param(":projectid") + if projectID == "" { + return "", fmt.Errorf("project id is required") + } + return projectID, nil +} + +// Helper to extract user ID from session +func GetUserIDFromSession(c *web.Controller) *int { + if sessionUserID := c.GetSession(constants.SessionUserID); sessionUserID != nil { + if uid, ok := sessionUserID.(int); ok { + return &uid + } + } + return nil +} + +// UnmarshalAndValidate unmarshals JSON from request body into the provided struct +func UnmarshalAndValidate(requestBody []byte, target interface{}) error { + if err := json.Unmarshal(requestBody, target); err != nil { + return err + } + return dto.Validate(target) +} diff --git a/server/internal/logger/logger.go b/server/internal/logger/logger.go deleted file mode 100644 index b45fb39a..00000000 --- a/server/internal/logger/logger.go +++ /dev/null @@ -1,56 +0,0 @@ -package logger - -import ( - "os" - "path" - "sync" - - "github.com/beego/beego/v2/core/logs" -) - -var ( - loggerInitOnce sync.Once -) - -func InitLogger(logdir string) { - loggerInitOnce.Do(func() { - // Clear existing loggers first - logs.Reset() - - // Create logs directory - if err := os.MkdirAll(logdir, 0755); err != nil { - panic("Failed to create log directory: " + err.Error()) - } - - // Console configuration - consoleConfig := `{ - "level": 7, - "color": true - }` - - // File configuration - fileConfig := `{ - "filename": "` + path.Join(logdir, "olake-server.log") + `", - "level": 7, - "maxlines": 1000, - "maxdays": 7, - "daily": false, - "rotate": true, - "perm": "0644" - }` - - // Initialize loggers - if err := logs.SetLogger(logs.AdapterConsole, consoleConfig); err != nil { - panic("Console logger init failed: " + err.Error()) - } - - if err := logs.SetLogger(logs.AdapterFile, fileConfig); err != nil { - panic("File logger init failed: " + err.Error()) - } - - // Configure logger behavior - logs.SetLogFuncCallDepth(3) - logs.EnableFuncCallDepth(true) - logs.SetLevel(logs.LevelDebug) - }) -} diff --git a/server/internal/models/db.go b/server/internal/models/db.go index 65aee7ed..67642bfe 100644 --- a/server/internal/models/db.go +++ b/server/internal/models/db.go @@ -3,7 +3,7 @@ package models import ( "time" - "github.com/datazip/olake-frontend/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/constants" ) // BaseModel with common fields diff --git a/server/internal/models/dto/requests.go b/server/internal/models/dto/requests.go new file mode 100644 index 00000000..ade7571a --- /dev/null +++ b/server/internal/models/dto/requests.go @@ -0,0 +1,98 @@ +package dto + +// Common fields for source/destination config +// source and destination are driver in olake cli +type DriverConfig struct { + ID *int `json:"id,omitempty"` + Name string `json:"name"` + Type string `json:"type"` + Version string `json:"version"` + Source string `json:"source_type"` + Config string `json:"config" orm:"type(jsonb)"` +} + +type LoginRequest struct { + Username string `json:"username" validate:"required"` + Password string `json:"password" validate:"required"` +} + +type SpecRequest struct { + Type string `json:"type" validate:"required"` + Version string `json:"version" validate:"required"` +} + +// check unique job name request +type CheckUniqueJobNameRequest struct { + JobName string `json:"job_name"` +} + +// Test connection requests +type SourceTestConnectionRequest struct { + Type string `json:"type" validate:"required"` + Version string `json:"version" validate:"required"` + Config string `json:"config" orm:"type(jsonb)" validate:"required"` +} +type StreamsRequest struct { + Name string `json:"name" validate:"required"` + Type string `json:"type" validate:"required"` + Version string `json:"version" validate:"required"` + Config string `json:"config" orm:"type(jsonb)" validate:"required"` + JobID int `json:"job_id" validate:"required"` + JobName string `json:"job_name" validate:"required"` +} + +// TODO: frontend needs to send only version no need for source version +type DestinationTestConnectionRequest struct { + Type string `json:"type" validate:"required"` + Version string `json:"version" validate:"required"` + Config string `json:"config" validate:"required"` + SourceType string `json:"source_type"` + SourceVersion string `json:"source_version"` +} + +type CreateSourceRequest struct { + Name string `json:"name" validate:"required"` + Type string `json:"type" validate:"required"` + Version string `json:"version" validate:"required"` + Config string `json:"config" orm:"type(jsonb)" validate:"required"` +} + +type UpdateSourceRequest struct { + Name string `json:"name" validate:"required"` + Type string `json:"type" validate:"required"` + Version string `json:"version" validate:"required"` + Config string `json:"config" orm:"type(jsonb)" validate:"required"` +} + +type CreateDestinationRequest struct { + Name string `json:"name" validate:"required"` + Type string `json:"type" validate:"required"` + Version string `json:"version" validate:"required"` + Config string `json:"config" orm:"type(jsonb)" validate:"required"` +} + +type UpdateDestinationRequest struct { + Name string `json:"name" validate:"required"` + Type string `json:"type" validate:"required"` + Version string `json:"version" validate:"required"` + Config string `json:"config" orm:"type(jsonb)" validate:"required"` +} + +type CreateJobRequest struct { + Name string `json:"name" validate:"required"` + Source *DriverConfig `json:"source" validate:"required"` + Destination *DriverConfig `json:"destination" validate:"required"` + Frequency string `json:"frequency" validate:"required"` + StreamsConfig string `json:"streams_config" orm:"type(jsonb)" validate:"required"` + Activate bool `json:"activate,omitempty"` +} + +type UpdateJobRequest = CreateJobRequest + +type JobTaskRequest struct { + FilePath string `json:"file_path" validate:"required"` +} + +type JobStatusRequest struct { + Activate bool `json:"activate"` +} diff --git a/server/internal/models/response.go b/server/internal/models/dto/response.go similarity index 70% rename from server/internal/models/response.go rename to server/internal/models/dto/response.go index 12e0f702..54331bb8 100644 --- a/server/internal/models/response.go +++ b/server/internal/models/dto/response.go @@ -1,9 +1,4 @@ -package models - -type LoginResponse struct { - Message string `json:"message"` - Success bool `json:"success"` -} +package dto type JSONResponse struct { Success bool `json:"success"` @@ -20,13 +15,6 @@ type SpecOutput struct { Spec map[string]interface{} `json:"spec"` } -// Reuse generic API response with generics -type APIResponse[T any] struct { - Success bool `json:"success"` - Message string `json:"message"` - Data T `json:"data"` -} - type DeleteSourceResponse struct { Name string `json:"name"` } @@ -52,19 +40,19 @@ type TestConnectionResponse struct { // Job response type JobResponse struct { - ID int `json:"id"` - Name string `json:"name"` - Source JobSourceConfig `json:"source"` - Destination JobDestinationConfig `json:"destination"` - StreamsConfig string `json:"streams_config"` - Frequency string `json:"frequency"` - LastRunTime string `json:"last_run_time,omitempty"` - LastRunState string `json:"last_run_state,omitempty"` - CreatedAt string `json:"created_at"` - UpdatedAt string `json:"updated_at"` - Activate bool `json:"activate"` - CreatedBy string `json:"created_by,omitempty"` - UpdatedBy string `json:"updated_by,omitempty"` + ID int `json:"id"` + Name string `json:"name"` + Source DriverConfig `json:"source"` + Destination DriverConfig `json:"destination"` + StreamsConfig string `json:"streams_config"` + Frequency string `json:"frequency"` + LastRunTime string `json:"last_run_time,omitempty"` + LastRunState string `json:"last_run_state,omitempty"` + CreatedAt string `json:"created_at"` + UpdatedAt string `json:"updated_at"` + Activate bool `json:"activate"` + CreatedBy string `json:"created_by,omitempty"` + UpdatedBy string `json:"updated_by,omitempty"` } type JobTask struct { diff --git a/server/internal/models/dto/validate.go b/server/internal/models/dto/validate.go new file mode 100644 index 00000000..ce2a33eb --- /dev/null +++ b/server/internal/models/dto/validate.go @@ -0,0 +1,47 @@ +package dto + +import ( + "fmt" + + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/go-playground/validator/v10" +) + +// ValidateStruct validates any struct that has `validate` tags. +func Validate(s interface{}) error { + validate := validator.New() + err := validate.Struct(s) + if err != nil { + if _, ok := err.(*validator.InvalidValidationError); ok { + return fmt.Errorf("invalid validation: %s", err) + } + + // collect all validation errors into a single message + var errorMessages string + for _, err := range err.(validator.ValidationErrors) { + errorMessages += fmt.Sprintf("Field '%s' failed validation rule '%s'; ", err.Field(), err.Tag()) + } + return fmt.Errorf("validation failed: %s", errorMessages) + } + return nil +} + +// ValidateSourceType checks if the provided type is in the list of supported source types +func ValidateSourceType(t string) error { + for _, allowed := range constants.SupportedSourceTypes { + if t == allowed { + return nil + } + } + return fmt.Errorf("invalid source type '%s', supported sources are: %v", t, constants.SupportedSourceTypes) +} + +// ValidateDestinationType checks if the provided type is in the list of supported destination types +func ValidateDestinationType(t string) error { + for _, allowed := range constants.SupportedDestinationTypes { + if t == allowed { + return nil + } + } + return fmt.Errorf("invalid destination type '%s', supported destinations are: %v", t, constants.SupportedDestinationTypes) +} diff --git a/server/internal/models/requests.go b/server/internal/models/requests.go deleted file mode 100644 index 1563f7cc..00000000 --- a/server/internal/models/requests.go +++ /dev/null @@ -1,83 +0,0 @@ -package models - -// Common fields for source/destination config -type ConnectorConfig struct { - Name string `json:"name"` - Type string `json:"type"` - Version string `json:"version"` - Source string `json:"source_type"` - Config string `json:"config" orm:"type(jsonb)"` -} - -// LoginRequest represents the expected JSON structure for login requests -type LoginRequest struct { - Username string `json:"username"` - Password string `json:"password"` -} - -// Spec request for getting specs -type SpecRequest struct { - Type string `json:"type"` - Version string `json:"version"` - Catalog string `json:"catalog"` -} - -// check unique job name request -type CheckUniqueJobNameRequest struct { - JobName string `json:"job_name"` -} - -// Test connection requests -type SourceTestConnectionRequest struct { - ConnectorConfig - SourceID int `json:"source_id"` -} -type StreamsRequest struct { - ConnectorConfig - JobID int `json:"job_id"` - JobName string `json:"job_name"` -} - -type DestinationTestConnectionRequest struct { - ConnectorConfig -} - -// Create/Update source and destination requests -type CreateSourceRequest struct { - ConnectorConfig -} - -type UpdateSourceRequest struct { - ConnectorConfig -} - -type CreateDestinationRequest struct { - ConnectorConfig -} - -type UpdateDestinationRequest struct { - ConnectorConfig -} - -// Job source and destination configurations -type JobSourceConfig = ConnectorConfig -type JobDestinationConfig = ConnectorConfig - -// Create and update job requests -type CreateJobRequest struct { - Name string `json:"name"` - Source JobSourceConfig `json:"source"` - Destination JobDestinationConfig `json:"destination"` - Frequency string `json:"frequency"` - StreamsConfig string `json:"streams_config" orm:"type(jsonb)"` - Activate bool `json:"activate,omitempty"` -} - -type UpdateJobRequest struct { - Name string `json:"name"` - Source JobSourceConfig `json:"source"` - Destination JobDestinationConfig `json:"destination"` - Frequency string `json:"frequency"` - StreamsConfig string `json:"streams_config" orm:"type(jsonb)"` - Activate bool `json:"activate,omitempty"` -} diff --git a/server/internal/services/etl/auth.go b/server/internal/services/etl/auth.go new file mode 100644 index 00000000..b5fb1a83 --- /dev/null +++ b/server/internal/services/etl/auth.go @@ -0,0 +1,64 @@ +package services + +import ( + "context" + "fmt" + "strings" + + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/utils/telemetry" + "golang.org/x/crypto/bcrypt" +) + +// Auth-related methods on AppService + +func (s *ETLService) Login(ctx context.Context, username, password string) (*models.User, error) { + user, err := s.db.GetUserByUsername(username) + if err != nil { + if strings.Contains(err.Error(), "no row found") { + return nil, fmt.Errorf("user not found: %s", err) + } + return nil, fmt.Errorf("failed to get user: %s", err) + } + + if err := s.db.CompareUserPassword(user.Password, password); err != nil { + return nil, fmt.Errorf("invalid credentials: %s", err) + } + + telemetry.TrackUserLogin(ctx, user) + + return user, nil +} + +func (s *ETLService) Signup(_ context.Context, user *models.User) error { + hashedPassword, err := bcrypt.GenerateFromPassword([]byte(user.Password), bcrypt.DefaultCost) + if err != nil { + return fmt.Errorf("failed to hash password: %s", err) + } + user.Password = string(hashedPassword) + + if err := s.db.CreateUser(user); err != nil { + if strings.Contains(err.Error(), "duplicate") || strings.Contains(err.Error(), "unique") { + return fmt.Errorf("user already exists: %s", err) + } + return fmt.Errorf("failed to create user: %s", err) + } + + return nil +} + +func (s *ETLService) GetUserByID(userID int) (*models.User, error) { + user, err := s.db.GetUserByID(userID) + if err != nil { + return nil, fmt.Errorf("failed to find user: %s", err) + } + return user, nil +} + +func (s *ETLService) ValidateUser(userID int) error { + _, err := s.db.GetUserByID(userID) + if err != nil { + return fmt.Errorf("failed to validate user: %s", err) + } + return nil +} diff --git a/server/internal/services/etl/destination.go b/server/internal/services/etl/destination.go new file mode 100644 index 00000000..067171db --- /dev/null +++ b/server/internal/services/etl/destination.go @@ -0,0 +1,231 @@ +package services + +import ( + "context" + "fmt" + "path/filepath" + "time" + + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/models/dto" + "github.com/datazip-inc/olake-ui/server/utils" + "github.com/datazip-inc/olake-ui/server/utils/telemetry" +) + +// Destination-related methods on AppService + +// ListDestinations returns all destinations for a project with lightweight job summaries. +func (s *ETLService) ListDestinations(_ context.Context, projectID string) ([]dto.DestinationDataItem, error) { + destinations, err := s.db.ListDestinationsByProjectID(projectID) + if err != nil { + return nil, fmt.Errorf("failed to list destinations: %s", err) + } + + destIDs := make([]int, 0, len(destinations)) + for _, dest := range destinations { + destIDs = append(destIDs, dest.ID) + } + + var allJobs []*models.Job + allJobs, err = s.db.GetJobsByDestinationID(destIDs) + if err != nil { + return nil, fmt.Errorf("failed to list jobs: %s", err) + } + + jobsByDestID := make(map[int][]*models.Job) + for _, job := range allJobs { + jobsByDestID[job.DestID.ID] = append(jobsByDestID[job.DestID.ID], job) + } + + destItems := make([]dto.DestinationDataItem, 0, len(destinations)) + for _, dest := range destinations { + entity := dto.DestinationDataItem{ + ID: dest.ID, + Name: dest.Name, + Type: dest.DestType, + Version: dest.Version, + Config: dest.Config, + CreatedAt: dest.CreatedAt.Format(time.RFC3339), + UpdatedAt: dest.UpdatedAt.Format(time.RFC3339), + } + setUsernames(&entity.CreatedBy, &entity.UpdatedBy, dest.CreatedBy, dest.UpdatedBy) + + jobs := jobsByDestID[dest.ID] + jobItems, err := buildJobDataItems(jobs, s.temporal, "destination") + if err != nil { + return nil, fmt.Errorf("failed to build job data items: %s", err) + } + entity.Jobs = jobItems + destItems = append(destItems, entity) + } + + return destItems, nil +} + +func (s *ETLService) CreateDestination(ctx context.Context, req *dto.CreateDestinationRequest, projectID string, userID *int) error { + destination := &models.Destination{ + Name: req.Name, + DestType: req.Type, + Version: req.Version, + Config: req.Config, + ProjectID: projectID, + } + user := &models.User{ID: *userID} + destination.CreatedBy = user + destination.UpdatedBy = user + + if err := s.db.CreateDestination(destination); err != nil { + return fmt.Errorf("failed to create destination: %s", err) + } + + telemetry.TrackDestinationCreation(ctx, destination) + return nil +} + +func (s *ETLService) UpdateDestination(ctx context.Context, id int, projectID string, req *dto.UpdateDestinationRequest, userID *int) error { + existingDest, err := s.db.GetDestinationByID(id) + if err != nil { + return fmt.Errorf("failed to get destination: %s", err) + } + + existingDest.Name = req.Name + existingDest.DestType = req.Type + existingDest.Version = req.Version + existingDest.Config = req.Config + + user := &models.User{ID: *userID} + existingDest.UpdatedBy = user + + jobs, err := s.db.GetJobsByDestinationID([]int{existingDest.ID}) + if err != nil { + return fmt.Errorf("failed to fetch jobs for destination update: %s", err) + } + + if err := cancelAllJobWorkflows(ctx, s.temporal, jobs, projectID); err != nil { + return fmt.Errorf("failed to cancel workflows for destination update: %s", err) + } + + if err := s.db.UpdateDestination(existingDest); err != nil { + return fmt.Errorf("failed to update destination: %s", err) + } + + telemetry.TrackDestinationsStatus(ctx) + return nil +} + +func (s *ETLService) DeleteDestination(ctx context.Context, id int) (*dto.DeleteDestinationResponse, error) { + dest, err := s.db.GetDestinationByID(id) + if err != nil { + return nil, fmt.Errorf("failed to find destination: %s", err) + } + + jobs, err := s.db.GetJobsByDestinationID([]int{id}) + if err != nil { + return nil, fmt.Errorf("failed to retrieve jobs for destination deletion: %s", err) + } + if len(jobs) > 0 { + return nil, fmt.Errorf("cannot delete destination '%s' id[%d] because it is used in %d jobs; please delete the associated jobs first", dest.Name, id, len(jobs)) + } + var jobIDs []int + for _, job := range jobs { + job.Active = false + jobIDs = append(jobIDs, job.ID) + } + + if err := s.db.DeactivateJobs(jobIDs); err != nil { + return nil, fmt.Errorf("failed to deactivate jobs for destination deletion: %s", err) + } + + if err := s.db.DeleteDestination(id); err != nil { + return nil, fmt.Errorf("failed to delete destination: %s", err) + } + + telemetry.TrackDestinationsStatus(ctx) + return &dto.DeleteDestinationResponse{Name: dest.Name}, nil +} + +func (s *ETLService) TestDestinationConnection(ctx context.Context, req *dto.DestinationTestConnectionRequest) (map[string]interface{}, []map[string]interface{}, error) { + version := req.Version + driver := req.SourceType + if driver == "" { + var err error + _, driver, err = utils.GetDriverImageTags(ctx, "", true) + if err != nil { + return nil, nil, fmt.Errorf("failed to get driver image tags: %s", err) + } + } + + encryptedConfig, err := utils.Encrypt(req.Config) + if err != nil { + return nil, nil, fmt.Errorf("failed to encrypt config for test connection: %s", err) + } + workflowID := fmt.Sprintf("test-connection-%s-%d", req.Type, time.Now().Unix()) + result, err := s.temporal.VerifyDriverCredentials(ctx, workflowID, "destination", driver, version, encryptedConfig) + // TODO: handle from frontend + if result == nil { + result = map[string]interface{}{ + "message": err.Error(), + "status": "failed", + } + } + + if err != nil { + return result, nil, fmt.Errorf("connection test failed: %s", err) + } + + homeDir := constants.DefaultConfigDir + mainLogDir := filepath.Join(homeDir, workflowID) + logs, err := utils.ReadLogs(mainLogDir) + if err != nil { + return result, nil, fmt.Errorf("failed to read logs destination_type[%s] destination_version[%s] error[%s]", + req.Type, req.Version, err) + } + + return result, logs, nil +} + +func (s *ETLService) GetDestinationJobs(_ context.Context, id int) ([]*models.Job, error) { + if _, err := s.db.GetDestinationByID(id); err != nil { + return nil, fmt.Errorf("failed to find destination: %s", err) + } + + jobs, err := s.db.GetJobsByDestinationID([]int{id}) + if err != nil { + return nil, fmt.Errorf("failed to get jobs by destination: %s", err) + } + + return jobs, nil +} + +func (s *ETLService) GetDestinationVersions(ctx context.Context, destType string) (map[string]interface{}, error) { + if destType == "" { + return nil, fmt.Errorf("destination type is required") + } + + versions, _, err := utils.GetDriverImageTags(ctx, "", true) + if err != nil { + return nil, fmt.Errorf("failed to get driver image tags: %s", err) + } + + return map[string]interface{}{"version": versions}, nil +} + +// TODO: cache spec in db for each version +func (s *ETLService) GetDestinationSpec(ctx context.Context, req *dto.SpecRequest) (dto.SpecResponse, error) { + _, driver, err := utils.GetDriverImageTags(ctx, "", true) + if err != nil { + return dto.SpecResponse{}, fmt.Errorf("failed to get driver image tags: %s", err) + } + + specOut, err := s.temporal.GetDriverSpecs(ctx, req.Type, driver, req.Version) + if err != nil { + return dto.SpecResponse{}, fmt.Errorf("failed to get spec: %s", err) + } + + return dto.SpecResponse{ + Version: req.Version, + Type: req.Type, + Spec: specOut.Spec, + }, nil +} diff --git a/server/internal/services/etl/job.go b/server/internal/services/etl/job.go new file mode 100644 index 00000000..9da89f6d --- /dev/null +++ b/server/internal/services/etl/job.go @@ -0,0 +1,399 @@ +package services + +import ( + "context" + "crypto/sha256" + "fmt" + "path/filepath" + "strings" + "time" + + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/models/dto" + "github.com/datazip-inc/olake-ui/server/utils" + "github.com/datazip-inc/olake-ui/server/utils/logger" + "github.com/datazip-inc/olake-ui/server/utils/telemetry" + "go.temporal.io/api/workflowservice/v1" +) + +// Job-related methods on AppService + +func (s *ETLService) ListJobs(ctx context.Context, projectID string) ([]dto.JobResponse, error) { + jobs, err := s.db.ListJobsByProjectID(projectID) + if err != nil { + return nil, fmt.Errorf("failed to list jobs: %s", err) + } + + jobResponses := make([]dto.JobResponse, 0, len(jobs)) + for _, job := range jobs { + jobResp, err := s.buildJobResponse(ctx, job, projectID) + if err != nil { + return nil, fmt.Errorf("failed to build job response: %s", err) + } + jobResponses = append(jobResponses, jobResp) + } + + return jobResponses, nil +} + +func (s *ETLService) CreateJob(ctx context.Context, req *dto.CreateJobRequest, projectID string, userID *int) error { + unique, err := s.db.IsJobNameUniqueInProject(projectID, req.Name) + if err != nil { + return fmt.Errorf("failed to check job name uniqueness: %s", err) + } + if !unique { + return fmt.Errorf("job name '%s' is not unique", req.Name) + } + source, err := s.upsertSource(req.Source, projectID, userID) + if err != nil { + return fmt.Errorf("failed to process source: %s", err) + } + + dest, err := s.upsertDestination(req.Destination, projectID, userID) + if err != nil { + return fmt.Errorf("failed to process destination: %s", err) + } + + user := &models.User{ID: *userID} + job := &models.Job{ + Name: req.Name, + SourceID: source, + DestID: dest, + Active: true, + Frequency: req.Frequency, + StreamsConfig: req.StreamsConfig, + State: "{}", + ProjectID: projectID, + CreatedBy: user, + UpdatedBy: user, + } + if err := s.db.CreateJob(job); err != nil { + return fmt.Errorf("failed to create job: %s", err) + } + + defer func() { + if err != nil { + if err := s.db.DeleteJob(job.ID); err != nil { + logger.Errorf("failed to delete job: %s", err) + } + } + }() + + if err = s.temporal.CreateSchedule(ctx, job); err != nil { + return fmt.Errorf("failed to create temporal workflow: %s", err) + } + + telemetry.TrackJobCreation(ctx, &models.Job{Name: req.Name}) + return nil +} + +func (s *ETLService) UpdateJob(ctx context.Context, req *dto.UpdateJobRequest, projectID string, jobID int, userID *int) error { + existingJob, err := s.db.GetJobByID(jobID, true) + if err != nil { + return fmt.Errorf("failed to get job: %s", err) + } + + // Snapshot previous job state for compensation on schedule update failure + prevJob := *existingJob + + source, err := s.upsertSource(req.Source, projectID, userID) + if err != nil { + return fmt.Errorf("failed to process source for job update: %s", err) + } + + dest, err := s.upsertDestination(req.Destination, projectID, userID) + if err != nil { + return fmt.Errorf("failed to process destination for job update: %s", err) + } + + existingJob.Name = req.Name // TODO: job name cant be changed + existingJob.SourceID = source + existingJob.DestID = dest + existingJob.Active = req.Activate + existingJob.Frequency = req.Frequency + existingJob.StreamsConfig = req.StreamsConfig + existingJob.ProjectID = projectID + existingJob.UpdatedBy = &models.User{ID: *userID} + // cancel existing workflow + err = cancelAllJobWorkflows(ctx, s.temporal, []*models.Job{existingJob}, projectID) + if err != nil { + return fmt.Errorf("failed to cancel workflow for job %s", err) + } + if err := s.db.UpdateJob(existingJob); err != nil { + return fmt.Errorf("failed to update job: %s", err) + } + + err = s.temporal.UpdateSchedule(ctx, existingJob.Frequency, existingJob.ProjectID, existingJob.ID) + if err != nil { + // Compensation: restore previous DB state if schedule update fails + if rerr := s.db.UpdateJob(&prevJob); rerr != nil { + logger.Errorf("failed to restore job after schedule update error: %s", rerr) + } + return fmt.Errorf("failed to update temporal workflow: %s", err) + } + + telemetry.TrackJobEntity(ctx) + return nil +} + +func (s *ETLService) DeleteJob(ctx context.Context, jobID int) (string, error) { + job, err := s.db.GetJobByID(jobID, true) + if err != nil { + return "", fmt.Errorf("failed to find job: %s", err) + } + + if err = s.temporal.DeleteSchedule(ctx, job.ProjectID, job.ID); err != nil { + return "", fmt.Errorf("failed to delete temporal workflow: %s", err) + } + + if err := s.db.DeleteJob(jobID); err != nil { + return "", fmt.Errorf("failed to delete job: %s", err) + } + + telemetry.TrackJobEntity(ctx) + return job.Name, nil +} + +func (s *ETLService) SyncJob(ctx context.Context, projectID string, jobID int) (interface{}, error) { + if err := s.temporal.TriggerSchedule(ctx, projectID, jobID); err != nil { + return nil, fmt.Errorf("failed to trigger sync: %s", err) + } + + return map[string]any{ + "message": "sync triggered successfully", + }, nil +} + +func (s *ETLService) CancelJobRun(ctx context.Context, projectID string, jobID int) error { + job, err := s.db.GetJobByID(jobID, true) + if err != nil { + return fmt.Errorf("failed to find job: %s", err) + } + + jobSlice := []*models.Job{job} + if err := cancelAllJobWorkflows(ctx, s.temporal, jobSlice, projectID); err != nil { + return fmt.Errorf("failed to cancel job workflow: %s", err) + } + return nil +} + +func (s *ETLService) ActivateJob(ctx context.Context, jobID int, req dto.JobStatusRequest, userID *int) error { + job, err := s.db.GetJobByID(jobID, true) + if err != nil { + return fmt.Errorf("failed to find job: %s", err) + } + + if req.Activate == job.Active { + return nil + } + + if req.Activate { + if err := s.temporal.ResumeSchedule(ctx, job.ProjectID, job.ID); err != nil { + return fmt.Errorf("failed to unpause schedule: %s", err) + } + } else { + if err := s.temporal.PauseSchedule(ctx, job.ProjectID, job.ID); err != nil { + return fmt.Errorf("failed to pause schedule: %s", err) + } + } + + job.Active = req.Activate + user := &models.User{ID: *userID} + job.UpdatedBy = user + + if err := s.db.UpdateJob(job); err != nil { + return fmt.Errorf("failed to update job activation status: %s", err) + } + + return nil +} + +func (s *ETLService) CheckUniqueJobName(_ context.Context, projectID string, req dto.CheckUniqueJobNameRequest) (bool, error) { + unique, err := s.db.IsJobNameUniqueInProject(projectID, req.JobName) + if err != nil { + return false, fmt.Errorf("failed to check job name uniqueness: %s", err) + } + + return unique, nil +} + +func (s *ETLService) GetJobTasks(ctx context.Context, projectID string, jobID int) ([]dto.JobTask, error) { + job, err := s.db.GetJobByID(jobID, true) + if err != nil { + return nil, fmt.Errorf("failed to find job: %s", err) + } + + var tasks []dto.JobTask + query := fmt.Sprintf("WorkflowId between 'sync-%s-%d' and 'sync-%s-%d-~'", projectID, job.ID, projectID, job.ID) + + resp, err := s.temporal.ListWorkflow(ctx, &workflowservice.ListWorkflowExecutionsRequest{ + Query: query, + }) + if err != nil { + return nil, fmt.Errorf("failed to list workflows: %s", err) + } + + for _, execution := range resp.Executions { + startTime := execution.StartTime.AsTime().UTC() + var runTime string + if execution.CloseTime != nil { + runTime = execution.CloseTime.AsTime().UTC().Sub(startTime).Round(time.Second).String() + } else { + runTime = time.Since(startTime).Round(time.Second).String() + } + tasks = append(tasks, dto.JobTask{ + Runtime: runTime, + StartTime: startTime.Format(time.RFC3339), + Status: execution.Status.String(), + FilePath: execution.Execution.WorkflowId, + }) + } + + return tasks, nil +} + +func (s *ETLService) GetTaskLogs(_ context.Context, jobID int, filePath string) ([]map[string]interface{}, error) { + _, err := s.db.GetJobByID(jobID, true) + if err != nil { + return nil, fmt.Errorf("failed to find job: %s", err) + } + + syncFolderName := fmt.Sprintf("%x", sha256.Sum256([]byte(filePath))) + + // Get home directory + homeDir := constants.DefaultConfigDir + mainSyncDir := filepath.Join(homeDir, syncFolderName) + logs, err := utils.ReadLogs(mainSyncDir) + if err != nil { + return nil, fmt.Errorf("failed to read logs: %s", err) + } + // TODO: need to add activity logs as well with sync logs + return logs, nil +} + +// TODO: frontend needs to send source id and destination id +func (s *ETLService) buildJobResponse(ctx context.Context, job *models.Job, projectID string) (dto.JobResponse, error) { + jobResp := dto.JobResponse{ + ID: job.ID, + Name: job.Name, + StreamsConfig: job.StreamsConfig, + Frequency: job.Frequency, + CreatedAt: job.CreatedAt.Format(time.RFC3339), + UpdatedAt: job.UpdatedAt.Format(time.RFC3339), + Activate: job.Active, + } + + if job.SourceID != nil { + jobResp.Source = dto.DriverConfig{ + ID: &job.SourceID.ID, + Name: job.SourceID.Name, + Type: job.SourceID.Type, + Config: job.SourceID.Config, + Version: job.SourceID.Version, + } + } + + if job.DestID != nil { + jobResp.Destination = dto.DriverConfig{ + ID: &job.DestID.ID, + Name: job.DestID.Name, + Type: job.DestID.DestType, + Config: job.DestID.Config, + Version: job.DestID.Version, + } + } + + if job.CreatedBy != nil { + jobResp.CreatedBy = job.CreatedBy.Username + } + if job.UpdatedBy != nil { + jobResp.UpdatedBy = job.UpdatedBy.Username + } + + query := fmt.Sprintf("WorkflowId between 'sync-%s-%d' and 'sync-%s-%d-~'", projectID, job.ID, projectID, job.ID) + resp, err := s.temporal.ListWorkflow(ctx, &workflowservice.ListWorkflowExecutionsRequest{ + Query: query, + PageSize: 1, + }) + if err != nil { + return dto.JobResponse{}, fmt.Errorf("failed to list workflows: %s", err) + } + if len(resp.Executions) > 0 { + jobResp.LastRunTime = resp.Executions[0].StartTime.AsTime().Format(time.RFC3339) + jobResp.LastRunState = resp.Executions[0].Status.String() + } + + return jobResp, nil +} + +func (s *ETLService) upsertSource(config *dto.DriverConfig, projectID string, userID *int) (*models.Source, error) { + if config == nil { + return nil, fmt.Errorf("source config is required") + } + + // If ID provided, use that source as-is without modifying it. + if config.ID != nil { + return s.db.GetSourceByID(*config.ID) + } + + user := &models.User{ID: *userID} + // Otherwise, create a new source. + newSource := &models.Source{ + Name: config.Name, + Type: config.Type, + Config: config.Config, + Version: config.Version, + ProjectID: projectID, + CreatedBy: user, + UpdatedBy: user, + } + if err := s.db.CreateSource(newSource); err != nil { + return nil, fmt.Errorf("failed to create source: %s", err) + } + + return newSource, nil +} + +func (s *ETLService) upsertDestination(config *dto.DriverConfig, projectID string, userID *int) (*models.Destination, error) { + if config == nil { + return nil, fmt.Errorf("destination config is required") + } + + // If ID provided, use that destination as-is without modifying it. + if config.ID != nil { + return s.db.GetDestinationByID(*config.ID) + } + + user := &models.User{ID: *userID} + // Otherwise, create a new destination. + newDest := &models.Destination{ + Name: config.Name, + DestType: config.Type, + Config: config.Config, + Version: config.Version, + ProjectID: projectID, + CreatedBy: user, + UpdatedBy: user, + } + + if err := s.db.CreateDestination(newDest); err != nil { + return nil, fmt.Errorf("failed to create destination: %s", err) + } + + return newDest, nil +} + +// worker service +func (s *ETLService) UpdateSyncTelemetry(ctx context.Context, jobID int, workflowID, event string) error { + switch strings.ToLower(event) { + case "started": + telemetry.TrackSyncStart(ctx, jobID, workflowID) + case "completed": + telemetry.TrackSyncCompleted(jobID, workflowID) + case "failed": + telemetry.TrackSyncFailed(jobID, workflowID) + } + + return nil +} diff --git a/server/internal/services/etl/services.go b/server/internal/services/etl/services.go new file mode 100644 index 00000000..a54fb26d --- /dev/null +++ b/server/internal/services/etl/services.go @@ -0,0 +1,26 @@ +package services + +import ( + "github.com/datazip-inc/olake-ui/server/internal/database" + "github.com/datazip-inc/olake-ui/server/internal/services/temporal" +) + +// AppService is a unified service exposing all domain operations backed by shared deps. +type ETLService struct { + // single ORM facade using one Ormer + db *database.Database + temporal *temporal.Temporal +} + +// InitAppService constructs a unified AppService with singletons. +func InitAppService(db *database.Database) (*ETLService, error) { + client, err := temporal.NewClient() + if err != nil { + return nil, err + } + + return &ETLService{ + db: db, + temporal: client, + }, nil +} diff --git a/server/internal/services/etl/source.go b/server/internal/services/etl/source.go new file mode 100644 index 00000000..62a499ab --- /dev/null +++ b/server/internal/services/etl/source.go @@ -0,0 +1,249 @@ +package services + +import ( + "context" + "fmt" + "path/filepath" + "time" + + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/models/dto" + "github.com/datazip-inc/olake-ui/server/utils" + "github.com/datazip-inc/olake-ui/server/utils/telemetry" +) + +// Source-related methods on AppService + +// GetAllSources returns all sources for a project with lightweight job summaries. +func (s *ETLService) ListSources(_ context.Context, _ string) ([]dto.SourceDataItem, error) { + sources, err := s.db.ListSources() + if err != nil { + return nil, fmt.Errorf("failed to list sources: %s", err) + } + + sourceIDs := make([]int, 0, len(sources)) + for _, src := range sources { + sourceIDs = append(sourceIDs, src.ID) + } + + var allJobs []*models.Job + allJobs, err = s.db.GetJobsBySourceID(sourceIDs) + if err != nil { + return nil, fmt.Errorf("failed to list jobs: %s", err) + } + + jobsBySourceID := make(map[int][]*models.Job) + for _, job := range allJobs { + if job.SourceID != nil { + jobsBySourceID[job.SourceID.ID] = append(jobsBySourceID[job.SourceID.ID], job) + } + } + + items := make([]dto.SourceDataItem, 0, len(sources)) + for _, src := range sources { + item := dto.SourceDataItem{ + ID: src.ID, + Name: src.Name, + Type: src.Type, + Version: src.Version, + Config: src.Config, + CreatedAt: src.CreatedAt.Format(time.RFC3339), + UpdatedAt: src.UpdatedAt.Format(time.RFC3339), + } + setUsernames(&item.CreatedBy, &item.UpdatedBy, src.CreatedBy, src.UpdatedBy) + + jobs := jobsBySourceID[src.ID] + jobItems, err := buildJobDataItems(jobs, s.temporal, "source") + if err != nil { + return nil, fmt.Errorf("failed to build job data items: %s", err) + } + item.Jobs = jobItems + + items = append(items, item) + } + + return items, nil +} + +func (s *ETLService) CreateSource(ctx context.Context, req *dto.CreateSourceRequest, projectID string, userID *int) error { + src := &models.Source{ + Name: req.Name, + Type: req.Type, + Version: req.Version, + Config: req.Config, + ProjectID: projectID, + } + + user := &models.User{ID: *userID} + src.CreatedBy = user + src.UpdatedBy = user + + if err := s.db.CreateSource(src); err != nil { + return fmt.Errorf("failed to create source: %s", err) + } + + telemetry.TrackSourceCreation(ctx, src) + return nil +} + +func (s *ETLService) UpdateSource(ctx context.Context, projectID string, id int, req *dto.UpdateSourceRequest, userID *int) error { + existing, err := s.db.GetSourceByID(id) + if err != nil { + return fmt.Errorf("failed to get source: %s", err) + } + + existing.Name = req.Name + existing.Config = req.Config + existing.Type = req.Type + existing.Version = req.Version + + user := &models.User{ID: *userID} + existing.UpdatedBy = user + + jobs, err := s.db.GetJobsBySourceID([]int{existing.ID}) + if err != nil { + return fmt.Errorf("failed to fetch jobs for source update: %s", err) + } + + if err := cancelAllJobWorkflows(ctx, s.temporal, jobs, projectID); err != nil { + return fmt.Errorf("failed to cancel workflows for source update: %s", err) + } + + if err := s.db.UpdateSource(existing); err != nil { + return fmt.Errorf("failed to update source: %s", err) + } + + telemetry.TrackSourcesStatus(ctx) + return nil +} + +func (s *ETLService) DeleteSource(ctx context.Context, id int) (*dto.DeleteSourceResponse, error) { + src, err := s.db.GetSourceByID(id) + if err != nil { + return nil, fmt.Errorf("failed to find source: %s", err) + } + + jobs, err := s.db.GetJobsBySourceID([]int{id}) + if err != nil { + return nil, fmt.Errorf("failed to retrieve jobs for source deletion: %s", err) + } + if len(jobs) > 0 { + return nil, fmt.Errorf("cannot delete source '%s' id[%d] because it is used in %d jobs; please delete the associated jobs first", src.Name, id, len(jobs)) + } + jobIDs := make([]int, 0, len(jobs)) + for _, job := range jobs { + jobIDs = append(jobIDs, job.ID) + } + + if err := s.db.DeactivateJobs(jobIDs); err != nil { + return nil, fmt.Errorf("failed to update jobs for source deletion: %s", err) + } + + if err := s.db.DeleteSource(id); err != nil { + return nil, fmt.Errorf("failed to delete source: %s", err) + } + + telemetry.TrackSourcesStatus(ctx) + return &dto.DeleteSourceResponse{Name: src.Name}, nil +} + +func (s *ETLService) TestSourceConnection(ctx context.Context, req *dto.SourceTestConnectionRequest) (map[string]interface{}, []map[string]interface{}, error) { + if s.temporal == nil { + return nil, nil, fmt.Errorf("temporal client not available") + } + + encryptedConfig, err := utils.Encrypt(req.Config) + if err != nil { + return nil, nil, fmt.Errorf("failed to encrypt config for test connection: %s", err) + } + workflowID := fmt.Sprintf("test-connection-%s-%d", req.Type, time.Now().Unix()) + result, err := s.temporal.VerifyDriverCredentials(ctx, workflowID, "config", req.Type, req.Version, encryptedConfig) + // TODO: handle from frontend + if result == nil { + result = map[string]interface{}{ + "message": err.Error(), + "status": "failed", + } + } + + if err != nil { + return result, nil, fmt.Errorf("connection test failed: %s", err) + } + homeDir := constants.DefaultConfigDir + mainLogDir := filepath.Join(homeDir, workflowID) + logs, err := utils.ReadLogs(mainLogDir) + if err != nil { + return result, nil, fmt.Errorf("failed to read logs source_type[%s] source_version[%s]: %s", + req.Type, req.Version, err) + } + + return result, logs, nil +} + +func (s *ETLService) GetSourceCatalog(ctx context.Context, req *dto.StreamsRequest) (map[string]interface{}, error) { + oldStreams := "" + if req.JobID >= 0 { + job, err := s.db.GetJobByID(req.JobID, true) + if err != nil { + return nil, fmt.Errorf("failed to find job for catalog: %s", err) + } + oldStreams = job.StreamsConfig + } + + encryptedConfig, err := utils.Encrypt(req.Config) + if err != nil { + return nil, fmt.Errorf("failed to encrypt config for catalog: %s", err) + } + + newStreams, err := s.temporal.DiscoverStreams( + ctx, + req.Type, + req.Version, + encryptedConfig, + oldStreams, + req.JobName, + ) + if err != nil { + return nil, fmt.Errorf("failed to get catalog: %s", err) + } + + return newStreams, nil +} + +func (s *ETLService) GetSourceJobs(_ context.Context, id int) ([]*models.Job, error) { + if _, err := s.db.GetSourceByID(id); err != nil { + return nil, fmt.Errorf("failed to find source: %s", err) + } + + jobs, err := s.db.GetJobsBySourceID([]int{id}) + if err != nil { + return nil, fmt.Errorf("failed to get jobs by source: %s", err) + } + + return jobs, nil +} + +func (s *ETLService) GetSourceVersions(ctx context.Context, sourceType string) (map[string]interface{}, error) { + imageName := fmt.Sprintf("olakego/source-%s", sourceType) + versions, _, err := utils.GetDriverImageTags(ctx, imageName, true) + if err != nil { + return nil, fmt.Errorf("failed to get Docker versions: %s", err) + } + + return map[string]interface{}{"version": versions}, nil +} + +// TODO: cache spec in db for each version +func (s *ETLService) GetSourceSpec(ctx context.Context, req *dto.SpecRequest) (dto.SpecResponse, error) { + specOut, err := s.temporal.GetDriverSpecs(ctx, "", req.Type, req.Version) + if err != nil { + return dto.SpecResponse{}, fmt.Errorf("failed to get spec: %s", err) + } + + return dto.SpecResponse{ + Version: req.Version, + Type: req.Type, + Spec: specOut.Spec, + }, nil +} diff --git a/server/internal/services/etl/user.go b/server/internal/services/etl/user.go new file mode 100644 index 00000000..fa34008c --- /dev/null +++ b/server/internal/services/etl/user.go @@ -0,0 +1,51 @@ +package services + +import ( + "context" + "fmt" + + "github.com/datazip-inc/olake-ui/server/internal/models" +) + +// User-related methods on AppService + +func (s *ETLService) CreateUser(_ context.Context, req *models.User) error { + if err := s.db.CreateUser(req); err != nil { + return fmt.Errorf("failed to create user: %s", err) + } + + return nil +} + +func (s *ETLService) GetAllUsers(_ context.Context) ([]*models.User, error) { + users, err := s.db.ListUsers() + if err != nil { + return nil, fmt.Errorf("failed to list users: %s", err) + } + return users, nil +} + +func (s *ETLService) UpdateUser(_ context.Context, id int, req *models.User) (*models.User, error) { + existingUser, err := s.db.GetUserByID(id) + if err != nil { + return nil, fmt.Errorf("failed to find user: %s", err) + } + + existingUser.Username = req.Username + existingUser.Email = req.Email + + if err := s.db.UpdateUser(existingUser); err != nil { + return nil, fmt.Errorf("failed to update user: %s", err) + } + + return existingUser, nil +} + +func (s *ETLService) DeleteUser(_ context.Context, id int) error { + if err := s.db.DeleteUser(id); err != nil { + return fmt.Errorf("failed to delete user: %s", err) + } + return nil +} + +// removed: duplicate of auth.GetUserByID diff --git a/server/internal/services/etl/utils.go b/server/internal/services/etl/utils.go new file mode 100644 index 00000000..9229f541 --- /dev/null +++ b/server/internal/services/etl/utils.go @@ -0,0 +1,110 @@ +package services + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/models/dto" + "github.com/datazip-inc/olake-ui/server/internal/services/temporal" + "go.temporal.io/api/workflowservice/v1" +) + +func cancelAllJobWorkflows(ctx context.Context, tempClient *temporal.Temporal, jobs []*models.Job, projectID string) error { + if len(jobs) == 0 { + return nil + } + + // Build combined query + var conditions []string + for _, job := range jobs { + conditions = append(conditions, fmt.Sprintf( + "(WorkflowId BETWEEN 'sync-%s-%d' AND 'sync-%s-%d-~')", + projectID, job.ID, projectID, job.ID, + )) + } + + query := fmt.Sprintf("(%s) AND ExecutionStatus = 'Running'", strings.Join(conditions, " OR ")) + + // List all running workflows at once + resp, err := tempClient.ListWorkflow(ctx, &workflowservice.ListWorkflowExecutionsRequest{ + Query: query, + }) + if err != nil { + return fmt.Errorf("list workflows failed: %s", err) + } + if len(resp.Executions) == 0 { + return nil // no running workflows + } + + // Cancel each found workflow (still a loop, but only one list RPC) + for _, wfExec := range resp.Executions { + if err := tempClient.CancelWorkflow(ctx, + wfExec.Execution.WorkflowId, wfExec.Execution.RunId); err != nil { + return fmt.Errorf("failed to cancel workflow[%s]: %s", wfExec.Execution.WorkflowId, err) + } + } + return nil +} + +func buildJobDataItems(jobs []*models.Job, tempClient *temporal.Temporal, contextType string) ([]dto.JobDataItem, error) { + jobItems := make([]dto.JobDataItem, 0) + for _, job := range jobs { + jobInfo := dto.JobDataItem{ + Name: job.Name, + ID: job.ID, + Activate: job.Active, + } + + // Set source/destination info based on context + if contextType == "source" && job.DestID != nil { + jobInfo.DestinationName = job.DestID.Name + jobInfo.DestinationType = job.DestID.DestType + } else if contextType == "destination" && job.SourceID != nil { + jobInfo.SourceName = job.SourceID.Name + jobInfo.SourceType = job.SourceID.Type + } + + if err := setJobWorkflowInfo(&jobInfo, job.ID, job.ProjectID, tempClient); err != nil { + return nil, fmt.Errorf("failed to set job workflow info: %s", err) + } + jobItems = append(jobItems, jobInfo) + } + + return jobItems, nil +} + +func setUsernames(createdBy, updatedBy *string, createdByUser, updatedByUser *models.User) { + if createdByUser != nil { + *createdBy = createdByUser.Username + } + if updatedByUser != nil { + *updatedBy = updatedByUser.Username + } +} + +// setJobWorkflowInfo fetches and sets workflow execution information for a job +// Returns false if an error occurred that should stop processing +func setJobWorkflowInfo(jobInfo *dto.JobDataItem, jobID int, projectID string, tempClient *temporal.Temporal) error { + query := fmt.Sprintf("WorkflowId between 'sync-%s-%d' and 'sync-%s-%d-~'", projectID, jobID, projectID, jobID) + + resp, err := tempClient.ListWorkflow(context.Background(), &workflowservice.ListWorkflowExecutionsRequest{ + Query: query, + PageSize: 1, + }) + + if err != nil { + return fmt.Errorf("failed to list workflows: %s", err) + } + + if len(resp.Executions) > 0 { + jobInfo.LastRunTime = resp.Executions[0].StartTime.AsTime().Format(time.RFC3339) + jobInfo.LastRunState = resp.Executions[0].Status.String() + } else { + jobInfo.LastRunTime = "" + jobInfo.LastRunState = "" + } + return nil +} diff --git a/server/internal/services/temporal/client.go b/server/internal/services/temporal/client.go new file mode 100644 index 00000000..1d41de9f --- /dev/null +++ b/server/internal/services/temporal/client.go @@ -0,0 +1,144 @@ +package temporal + +import ( + "context" + "fmt" + "time" + + "github.com/beego/beego/v2/server/web" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/utils" + "go.temporal.io/api/enums/v1" + "go.temporal.io/api/workflowservice/v1" + "go.temporal.io/sdk/client" +) + +type Temporal struct { + Client client.Client + taskQueue string +} + +// NewClient creates a new Temporal client +func NewClient() (*Temporal, error) { + temporalAddress, err := web.AppConfig.String(constants.ConfTemporalAddress) + if err != nil { + return nil, fmt.Errorf("failed to get temporal address: %s", err) + } + + var temporalClient *Temporal + err = utils.RetryWithBackoff(func() error { + client, dialErr := client.Dial(client.Options{ + HostPort: temporalAddress, + }) + if dialErr != nil { + return fmt.Errorf("failed to create temporal client: %s", dialErr) + } + + temporalClient = &Temporal{ + Client: client, + taskQueue: constants.TemporalTaskQueue, + } + return nil + }, 3, time.Second) + if err != nil { + return nil, err + } + + return temporalClient, nil +} + +// Close closes the Temporal client +func (t *Temporal) Close() { + if t.Client != nil { + t.Client.Close() + } +} + +func (t *Temporal) WorkflowAndScheduleID(projectID string, jobID int) (string, string) { + workflowID := fmt.Sprintf("sync-%s-%d", projectID, jobID) + return workflowID, fmt.Sprintf("schedule-%s", workflowID) +} + +// createSchedule creates a new schedule +func (t *Temporal) CreateSchedule(ctx context.Context, job *models.Job) error { + workflowID, scheduleID := t.WorkflowAndScheduleID(job.ProjectID, job.ID) + cronExpression := utils.ToCron(job.Frequency) + + req := buildExecutionReqForSync(job, workflowID) + + _, err := t.Client.ScheduleClient().Create(ctx, client.ScheduleOptions{ + ID: scheduleID, + Spec: client.ScheduleSpec{ + CronExpressions: []string{cronExpression}, + }, + Action: &client.ScheduleWorkflowAction{ + ID: workflowID, + Workflow: RunSyncWorkflow, + Args: []any{req}, + TaskQueue: t.taskQueue, + }, + Overlap: enums.SCHEDULE_OVERLAP_POLICY_SKIP, + }) + return err +} + +// updateSchedule updates an existing schedule +func (t *Temporal) UpdateSchedule(ctx context.Context, frequency, projectID string, jobID int) error { + cronExpression := utils.ToCron(frequency) + _, scheduleID := t.WorkflowAndScheduleID(projectID, jobID) + + handle := t.Client.ScheduleClient().GetHandle(ctx, scheduleID) + return handle.Update(ctx, client.ScheduleUpdateOptions{ + DoUpdate: func(input client.ScheduleUpdateInput) (*client.ScheduleUpdate, error) { + input.Description.Schedule.Spec = &client.ScheduleSpec{ + CronExpressions: []string{cronExpression}, + } + return &client.ScheduleUpdate{ + Schedule: &input.Description.Schedule, + }, nil + }, + }) +} + +func (t *Temporal) PauseSchedule(ctx context.Context, projectID string, jobID int) error { + _, scheduleID := t.WorkflowAndScheduleID(projectID, jobID) + return t.Client.ScheduleClient().GetHandle(ctx, scheduleID).Pause(ctx, client.SchedulePauseOptions{ + Note: "user paused the schedule", + }) +} + +func (t *Temporal) ResumeSchedule(ctx context.Context, projectID string, jobID int) error { + _, scheduleID := t.WorkflowAndScheduleID(projectID, jobID) + return t.Client.ScheduleClient().GetHandle(ctx, scheduleID).Unpause(ctx, client.ScheduleUnpauseOptions{ + Note: "user resumed the schedule", + }) +} + +func (t *Temporal) DeleteSchedule(ctx context.Context, projectID string, jobID int) error { + _, scheduleID := t.WorkflowAndScheduleID(projectID, jobID) + return t.Client.ScheduleClient().GetHandle(ctx, scheduleID).Delete(ctx) +} + +func (t *Temporal) TriggerSchedule(ctx context.Context, projectID string, jobID int) error { + _, scheduleID := t.WorkflowAndScheduleID(projectID, jobID) + return t.Client.ScheduleClient().GetHandle(ctx, scheduleID).Trigger(ctx, client.ScheduleTriggerOptions{ + Overlap: enums.SCHEDULE_OVERLAP_POLICY_SKIP, + }) +} + +// cancelWorkflow cancels a workflow execution +func (t *Temporal) CancelWorkflow(ctx context.Context, workflowID, runID string) error { + return t.Client.CancelWorkflow(ctx, workflowID, runID) +} + +// ListWorkflow lists workflow executions based on the provided query +func (t *Temporal) ListWorkflow(ctx context.Context, request *workflowservice.ListWorkflowExecutionsRequest) (*workflowservice.ListWorkflowExecutionsResponse, error) { + // Query workflows using the SDK's ListWorkflow method + resp, err := t.Client.ListWorkflow(ctx, request) + if err != nil { + return nil, fmt.Errorf("error listing workflow executions: %s", err) + } + + return resp, nil +} diff --git a/server/internal/services/temporal/execute.go b/server/internal/services/temporal/execute.go new file mode 100644 index 00000000..a8bb4e88 --- /dev/null +++ b/server/internal/services/temporal/execute.go @@ -0,0 +1,210 @@ +package temporal + +import ( + "context" + "fmt" + "time" + + "github.com/beego/beego/v2/server/web" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/models/dto" + "go.temporal.io/sdk/client" + "golang.org/x/mod/semver" +) + +const ( + RunSyncWorkflow = "RunSyncWorkflow" + ExecuteWorkflow = "ExecuteWorkflow" +) + +type Command string + +type JobConfig struct { + Name string `json:"name"` + Data string `json:"data"` +} + +type ExecutionRequest struct { + Type string `json:"type"` + Command Command `json:"command"` + ConnectorType string `json:"connector_type"` + Version string `json:"version"` + Args []string `json:"args"` + Configs []JobConfig `json:"configs"` + WorkflowID string `json:"workflow_id"` + JobID int `json:"job_id"` + Timeout time.Duration `json:"timeout"` + OutputFile string `json:"output_file"` // to get the output file from the workflow +} + +const ( + Discover Command = "discover" + Check Command = "check" + Sync Command = "sync" + Spec Command = "spec" +) + +// DiscoverStreams runs a workflow to discover catalog data +func (t *Temporal) DiscoverStreams(ctx context.Context, sourceType, version, config, streamsConfig, jobName string) (map[string]interface{}, error) { + workflowID := fmt.Sprintf("discover-catalog-%s-%d", sourceType, time.Now().Unix()) + + configs := []JobConfig{ + {Name: "config.json", Data: config}, + {Name: "streams.json", Data: streamsConfig}, + } + + cmdArgs := []string{ + "discover", + "--config", + "/mnt/config/config.json", + } + + if jobName != "" && semver.Compare(version, "v0.2.0") >= 0 { + cmdArgs = append(cmdArgs, "--destination-database-prefix", jobName) + } + + if streamsConfig != "" { + cmdArgs = append(cmdArgs, "--catalog", "/mnt/config/streams.json") + } + + if encryptionKey, _ := web.AppConfig.String(constants.ConfEncryptionKey); encryptionKey != "" { + cmdArgs = append(cmdArgs, "--encryption-key", encryptionKey) + } + + req := &ExecutionRequest{ + Type: "docker", + Command: Discover, + ConnectorType: sourceType, + Version: version, + Args: cmdArgs, + Configs: configs, + WorkflowID: workflowID, + JobID: 0, + Timeout: GetWorkflowTimeout(Discover), + OutputFile: "streams.json", + } + + workflowOptions := client.StartWorkflowOptions{ + ID: workflowID, + TaskQueue: t.taskQueue, + } + + run, err := t.Client.ExecuteWorkflow(ctx, workflowOptions, ExecuteWorkflow, req) + if err != nil { + return nil, fmt.Errorf("failed to execute discover workflow: %s", err) + } + + result, err := ExtractWorkflowResponse(ctx, run) + if err != nil { + return nil, fmt.Errorf("failed to extract workflow response: %v", err) + } + + return result, nil +} + +// FetchSpec runs a workflow to fetch driver specifications +func (t *Temporal) GetDriverSpecs(ctx context.Context, destinationType, sourceType, version string) (dto.SpecOutput, error) { + workflowID := fmt.Sprintf("fetch-spec-%s-%d", sourceType, time.Now().Unix()) + + // spec version >= DefaultSpecVersion is required + if semver.Compare(version, constants.DefaultSpecVersion) < 0 { + version = constants.DefaultSpecVersion + } + + cmdArgs := []string{ + "spec", + } + if destinationType != "" { + cmdArgs = append(cmdArgs, "--destination-type", destinationType) + } + + req := &ExecutionRequest{ + Type: "docker", + Command: Spec, + ConnectorType: sourceType, + Version: version, + Args: cmdArgs, + Configs: nil, + WorkflowID: workflowID, + JobID: 0, + Timeout: GetWorkflowTimeout(Spec), + OutputFile: "", + } + + workflowOptions := client.StartWorkflowOptions{ + ID: workflowID, + TaskQueue: t.taskQueue, + } + + run, err := t.Client.ExecuteWorkflow(ctx, workflowOptions, ExecuteWorkflow, req) + if err != nil { + return dto.SpecOutput{}, fmt.Errorf("failed to execute fetch spec workflow: %s", err) + } + + result, err := ExtractWorkflowResponse(ctx, run) + if err != nil { + return dto.SpecOutput{}, fmt.Errorf("failed to extract workflow response: %v", err) + } + + return dto.SpecOutput{ + Spec: result, + }, nil +} + +// TestConnection runs a workflow to test connection +func (t *Temporal) VerifyDriverCredentials(ctx context.Context, workflowID, flag, sourceType, version, config string) (map[string]interface{}, error) { + configs := []JobConfig{ + {Name: "config.json", Data: config}, + } + + cmdArgs := []string{ + "check", + fmt.Sprintf("--%s", flag), + "/mnt/config/config.json", + } + if encryptionKey, _ := web.AppConfig.String(constants.ConfEncryptionKey); encryptionKey != "" { + cmdArgs = append(cmdArgs, "--encryption-key", encryptionKey) + } + + req := &ExecutionRequest{ + Type: "docker", + Command: Check, + ConnectorType: sourceType, + Version: version, + Args: cmdArgs, + Configs: configs, + WorkflowID: workflowID, + Timeout: GetWorkflowTimeout(Check), + } + + workflowOptions := client.StartWorkflowOptions{ + ID: workflowID, + TaskQueue: t.taskQueue, + } + + run, err := t.Client.ExecuteWorkflow(ctx, workflowOptions, ExecuteWorkflow, req) + if err != nil { + return nil, fmt.Errorf("failed to execute test connection workflow: %s", err) + } + + result, err := ExtractWorkflowResponse(ctx, run) + if err != nil { + return nil, fmt.Errorf("failed to extract workflow response: %v", err) + } + + connectionStatus, ok := result["connectionStatus"].(map[string]interface{}) + if !ok || connectionStatus == nil { + return nil, fmt.Errorf("connection status not found") + } + + status, statusOk := connectionStatus["status"].(string) + message, _ := connectionStatus["message"].(string) // message is optional + if !statusOk { + return nil, fmt.Errorf("connection status not found") + } + + return map[string]interface{}{ + "message": message, + "status": status, + }, nil +} diff --git a/server/internal/services/temporal/utils.go b/server/internal/services/temporal/utils.go new file mode 100644 index 00000000..1c234bb4 --- /dev/null +++ b/server/internal/services/temporal/utils.go @@ -0,0 +1,102 @@ +package temporal + +import ( + "context" + "encoding/json" + "fmt" + "strings" + "time" + + "github.com/datazip-inc/olake-ui/server/internal/models" + "go.temporal.io/sdk/client" +) + +// buildExecutionReqForSync builds the ExecutionRequest for a sync job +func buildExecutionReqForSync(job *models.Job, workflowID string) ExecutionRequest { + args := []string{ + "sync", + "--config", "/mnt/config/source.json", + "--destination", "/mnt/config/destination.json", + "--catalog", "/mnt/config/streams.json", + "--state", "/mnt/config/state.json", + } + + return ExecutionRequest{ + Type: "docker", + Command: Sync, + ConnectorType: job.SourceID.Type, + Version: job.SourceID.Version, + Args: args, + WorkflowID: workflowID, + JobID: job.ID, + Timeout: GetWorkflowTimeout(Sync), + OutputFile: "state.json", + } +} + +// extractWorkflowResponse extracts and parses the JSON response from a workflow execution result +func ExtractWorkflowResponse(ctx context.Context, run client.WorkflowRun) (map[string]interface{}, error) { + var result map[string]interface{} + if err := run.Get(ctx, &result); err != nil { + return nil, fmt.Errorf("workflow execution failed: %v", err) + } + + response, ok := result["response"].(string) + if !ok { + return nil, fmt.Errorf("invalid response format from worker") + } + + jsonResponse, err := ExtractJSON(response) + if err != nil { + return nil, err + } + + return jsonResponse, nil +} + +func GetWorkflowTimeout(op Command) time.Duration { + switch op { + case Discover: + return time.Minute * 10 + case Check: + return time.Minute * 10 + case Spec: + return time.Minute * 5 + case Sync: + return time.Hour * 24 * 30 + // check what can the fallback time be + default: + return time.Minute * 5 + } +} + +// ExtractJSON extracts and returns the last valid JSON block from output +func ExtractJSON(output string) (map[string]interface{}, error) { + outputStr := strings.TrimSpace(output) + if outputStr == "" { + return nil, fmt.Errorf("empty output") + } + + lines := strings.Split(outputStr, "\n") + + // Find the last non-empty line with valid JSON + for i := len(lines) - 1; i >= 0; i-- { + line := strings.TrimSpace(lines[i]) + if line == "" { + continue + } + + start := strings.Index(line, "{") + end := strings.LastIndex(line, "}") + if start != -1 && end != -1 && end > start { + jsonPart := line[start : end+1] + var result map[string]interface{} + if err := json.Unmarshal([]byte(jsonPart), &result); err != nil { + continue // Skip invalid JSON + } + return result, nil + } + } + + return nil, fmt.Errorf("no valid JSON block found in output") +} diff --git a/server/internal/telemetry/job.go b/server/internal/telemetry/job.go deleted file mode 100644 index 16c91886..00000000 --- a/server/internal/telemetry/job.go +++ /dev/null @@ -1,45 +0,0 @@ -package telemetry - -import ( - "context" - "time" - - "github.com/beego/beego/v2/core/logs" - "github.com/datazip/olake-frontend/server/internal/models" -) - -// TrackJobCreation tracks the creation of a new job with relevant properties -func TrackJobCreation(ctx context.Context, job *models.Job) { - go func() { - if instance == nil || job == nil { - return - } - - properties := map[string]interface{}{ - "job_id": job.ID, - "job_name": job.Name, - "project_id": job.ProjectID, - "source_type": job.SourceID.Type, - "source_name": job.SourceID.Name, - "destination_type": job.DestID.DestType, - "destination_name": job.DestID.Name, - "frequency": job.Frequency, - "active": job.Active, - } - - if !job.CreatedAt.IsZero() { - properties["created_at"] = job.CreatedAt.Format(time.RFC3339) - } - - if err := TrackEvent(ctx, EventJobCreated, properties); err != nil { - logs.Debug("Failed to track job creation event: %s", err) - return - } - TrackJobEntity(ctx) - }() -} - -func TrackJobEntity(ctx context.Context) { - TrackSourcesStatus(ctx) - TrackDestinationsStatus(ctx) -} diff --git a/server/internal/temporal/README.md b/server/internal/temporal/README.md deleted file mode 100644 index 5627e66e..00000000 --- a/server/internal/temporal/README.md +++ /dev/null @@ -1,144 +0,0 @@ -# Temporal-based Docker Runner - -This package provides a Temporal-based implementation for running Docker commands. It offers improved reliability, observability, and error handling compared to the direct Docker command execution approach. - -## Features - -- Durable execution with automatic retries -- Detailed workflow history for debugging -- Heartbeats to track long-running operations -- Improved monitoring and visibility -- Better error handling and recovery - -## Prerequisites - -1. Install and run a local Temporal server: - -```bash -# Using docker-compose -docker-compose up -d --build - -# OR using Temporal CLI -temporal server start-dev -``` - -See [Temporal documentation](https://docs.temporal.io/clusters/quick-install) for more installation options. - -## Usage - -### Starting a Temporal Worker - -You need to run at least one worker to process workflow and activity tasks: - -```go -package main - -import ( - "log" - "os" - "os/signal" - "syscall" - - "github.com/datazip/olake-server/internal/temporal" -) - -func main() { - // Create and start a worker - worker, err := temporal.NewWorker("") - if err != nil { - log.Fatalf("Failed to create worker: %v", err) - } - - // Start the worker - go func() { - if err := worker.Start(); err != nil { - log.Fatalf("Failed to start worker: %v", err) - } - }() - - // Handle graceful shutdown - signalChan := make(chan os.Signal, 1) - signal.Notify(signalChan, os.Interrupt, syscall.SIGTERM) - <-signalChan - - log.Println("Shutting down worker...") - worker.Stop() -} -``` - -### Using the Temporal-based Docker Runner - -```go -package main - -import ( - "fmt" - "log" - - "github.com/datazip/olake-server/internal/docker" -) - -func main() { - // Create a Temporal-based runner - runner, err := docker.NewTemporalRunner("", "") - if err != nil { - log.Fatalf("Failed to create Temporal runner: %v", err) - } - defer runner.Close() - - // Example: Get catalog from PostgreSQL source - config := `{ - "host": "postgres", - "port": 5432, - "database": "example", - "username": "postgres", - "password": "postgres" - }` - - result, err := runner.GetCatalog("postgres", "latest", config, 1) - if err != nil { - log.Fatalf("Failed to get catalog: %v", err) - } - - fmt.Printf("Catalog result: %+v\n", result) -} -``` - -## Monitoring and Debugging - -You can access the Temporal Web UI to monitor and debug workflow executions: - -- Local development: http://localhost:8233 -- With standard Temporal: http://localhost:8080 - -The Web UI provides: -- Workflow execution history -- Activity details and failures -- Workflow retry information -- Query and signal capabilities - -## Advanced Usage - -### Custom Workflow Configurations - -You can customize workflow options like timeouts, retry policies, and task queues by modifying the Client implementation. - -### Running with a Production Temporal Cluster - -For production, configure your application to connect to your production Temporal cluster: - -```go -// Connect to production Temporal cluster -runner, err := docker.NewTemporalRunner("", "temporal.example.com:7233") -``` - -## Troubleshooting - -1. **Worker Not Processing Tasks:** Ensure the worker is running and registered to the same task queue. -2. **Connection Issues:** Verify Temporal server is running and accessible. -3. **Docker Execution Failures:** Check Docker is installed and available to the worker process. - -## Additional Resources - -- [Temporal Documentation](https://docs.temporal.io/) -- [Go SDK Documentation](https://pkg.go.dev/go.temporal.io/sdk) \ No newline at end of file diff --git a/server/internal/temporal/activities.go b/server/internal/temporal/activities.go deleted file mode 100644 index 8a5ac164..00000000 --- a/server/internal/temporal/activities.go +++ /dev/null @@ -1,118 +0,0 @@ -package temporal - -import ( - "context" - "fmt" - "time" - - "github.com/datazip/olake-frontend/server/internal/docker" - "github.com/datazip/olake-frontend/server/internal/models" - "go.temporal.io/sdk/activity" - "go.temporal.io/sdk/temporal" -) - -// DiscoverCatalogActivity runs the discover command to get catalog data -func DiscoverCatalogActivity(ctx context.Context, params *ActivityParams) (map[string]interface{}, error) { - logger := activity.GetLogger(ctx) - logger.Info("Starting sync activity", - "sourceType", params.SourceType, - "workflowID", params.WorkflowID) - - // Create a Docker runner with the default config directory - runner := docker.NewRunner(docker.GetDefaultConfigDir()) - - // Record heartbeat - activity.RecordHeartbeat(ctx, "Running sync command") - - // Execute the sync operation - result, err := runner.GetCatalog( - ctx, - params.SourceType, - params.Version, - params.Config, - params.WorkflowID, - params.StreamsConfig, - params.JobName, - ) - if err != nil { - logger.Error("Sync command failed", "error", err) - return result, fmt.Errorf("sync command failed: %v", err) - } - - return result, nil -} - -// FetchSpecActivity runs the spec command to get connector specifications -func FetchSpecActivity(ctx context.Context, params *ActivityParams) (models.SpecOutput, error) { - runner := docker.NewRunner(docker.GetDefaultConfigDir()) - return runner.FetchSpec(ctx, params.DestinationType, params.SourceType, params.Version, params.WorkflowID) -} - -// TestConnectionActivity runs the check command to test connection -func TestConnectionActivity(ctx context.Context, params *ActivityParams) (map[string]interface{}, error) { - // Create a Docker runner with the default config directory - runner := docker.NewRunner(docker.GetDefaultConfigDir()) - resp, err := runner.TestConnection(ctx, params.Flag, params.SourceType, params.Version, params.Config, params.WorkflowID) - return resp, err -} - -// SyncActivity runs the sync command to transfer data between source and destination -func SyncActivity(ctx context.Context, params *SyncParams) (map[string]interface{}, error) { - logger := activity.GetLogger(ctx) - logger.Info("Starting sync activity", "jobId", params.JobID, "workflowID", params.WorkflowID) - - activity.RecordHeartbeat(ctx, "Running sync command") - - type resErr struct { - res map[string]interface{} - err error - } - done := make(chan resErr, 1) - // excueting sync in a goroutine to prevent blocking and monitoring the sync progress - go func() { - runner := docker.NewRunner(docker.GetDefaultConfigDir()) - res, err := runner.RunSync(ctx, params.JobID, params.WorkflowID) - done <- resErr{res: res, err: err} - }() - - for { - select { - case <-ctx.Done(): - logger.Info("SyncActivity canceled, deferring cleanup to SyncCleanupActivity") - return nil, ctx.Err() - case r := <-done: - if r.err != nil { - // CRITICAL: Check if error is because context was cancelled - if ctx.Err() != nil { - logger.Info("Goroutine failed due to context cancellation", "dockerError", r.err) - return nil, ctx.Err() // Return cancellation error, not docker error - } - - logger.Error("Sync command failed", "error", r.err) - return r.res, temporal.NewNonRetryableApplicationError(r.err.Error(), "SyncFailed", r.err) - } - return r.res, nil - default: - activity.RecordHeartbeat(ctx, "sync in progress") - time.Sleep(1 * time.Second) - } - } -} - -// SyncCleanupActivity ensures container is fully stopped and state is persisted to database -func SyncCleanupActivity(ctx context.Context, params *SyncParams) error { - logger := activity.GetLogger(ctx) - logger.Info("Starting cleanup activity", "jobId", params.JobID, "workflowID", params.WorkflowID) - // Stop container gracefully - logger.Info("Stopping container for cleanup %s", params.WorkflowID) - if err := docker.StopContainer(ctx, params.WorkflowID); err != nil { - return temporal.NewNonRetryableApplicationError(err.Error(), "CleanupFailed", err) - } - runner := docker.NewRunner(docker.GetDefaultConfigDir()) - logger.Info("Persisting job state for workflowID %s", params.WorkflowID) - if err := runner.PersistJobStateFromFile(params.JobID, params.WorkflowID); err != nil { - return temporal.NewNonRetryableApplicationError(err.Error(), "CleanupFailed", err) - } - logger.Info("Cleanup completed successfully") - return nil -} diff --git a/server/internal/temporal/client.go b/server/internal/temporal/client.go deleted file mode 100644 index 30105ee5..00000000 --- a/server/internal/temporal/client.go +++ /dev/null @@ -1,303 +0,0 @@ -package temporal - -import ( - "context" - "fmt" - "time" - - "github.com/beego/beego/v2/server/web" - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/docker" - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/utils" - "go.temporal.io/api/enums/v1" - "go.temporal.io/api/workflowservice/v1" - "go.temporal.io/sdk/client" - "golang.org/x/mod/semver" -) - -// TaskQueue is the default task queue for Olake Docker workflows -const ( - DockerTaskQueue = "OLAKE_DOCKER_TASK_QUEUE" - K8sTaskQueue = "OLAKE_K8S_TASK_QUEUE" -) - -var TaskQueue string - -var ( - TemporalAddress string -) - -// SyncAction represents the type of action to perform -type SyncAction string - -const ( - ActionCreate SyncAction = "create" - ActionUpdate SyncAction = "update" - ActionDelete SyncAction = "delete" - ActionTrigger SyncAction = "trigger" - ActionPause SyncAction = "pause" - ActionUnpause SyncAction = "unpause" -) - -func init() { - TemporalAddress = web.AppConfig.DefaultString("TEMPORAL_ADDRESS", "localhost:7233") - - // Choose task queue based on deployment mode - deploymentMode := web.AppConfig.DefaultString("DEPLOYMENT_MODE", "docker") - if deploymentMode == "kubernetes" { - TaskQueue = K8sTaskQueue - } else { - TaskQueue = DockerTaskQueue - } -} - -// Client provides methods to interact with Temporal -type Client struct { - temporalClient client.Client -} - -// NewClient creates a new Temporal client -func NewClient() (*Client, error) { - c, err := client.Dial(client.Options{ - HostPort: TemporalAddress, - }) - if err != nil { - return nil, fmt.Errorf("failed to create Temporal client: %v", err) - } - - return &Client{ - temporalClient: c, - }, nil -} - -// Close closes the Temporal client -func (c *Client) Close() { - if c.temporalClient != nil { - c.temporalClient.Close() - } -} - -// GetCatalog runs a workflow to discover catalog data -func (c *Client) GetCatalog(ctx context.Context, sourceType, version, config, streamsConfig, jobName string) (map[string]interface{}, error) { - params := &ActivityParams{ - SourceType: sourceType, - Version: version, - Config: config, - WorkflowID: fmt.Sprintf("discover-catalog-%s-%d", sourceType, time.Now().Unix()), - Command: docker.Discover, - StreamsConfig: streamsConfig, - JobName: jobName, - } - - workflowOptions := client.StartWorkflowOptions{ - ID: params.WorkflowID, - TaskQueue: TaskQueue, - } - - run, err := c.temporalClient.ExecuteWorkflow(ctx, workflowOptions, DiscoverCatalogWorkflow, params) - if err != nil { - return nil, fmt.Errorf("failed to execute discover workflow: %v", err) - } - - var result map[string]interface{} - if err := run.Get(ctx, &result); err != nil { - return nil, fmt.Errorf("workflow execution failed: %v", err) - } - - return result, nil -} - -// FetchSpec runs a workflow to fetch connector specifications -func (c *Client) FetchSpec(ctx context.Context, destinationType, sourceType, version string) (models.SpecOutput, error) { - // spec version >= DefaultSpecVersion is required - if semver.Compare(version, constants.DefaultSpecVersion) < 0 { - version = constants.DefaultSpecVersion - } - - params := &ActivityParams{ - SourceType: sourceType, - Version: version, - WorkflowID: fmt.Sprintf("fetch-spec-%s-%d", sourceType, time.Now().Unix()), - DestinationType: destinationType, - } - - workflowOptions := client.StartWorkflowOptions{ - ID: params.WorkflowID, - TaskQueue: TaskQueue, - } - - run, err := c.temporalClient.ExecuteWorkflow(ctx, workflowOptions, FetchSpecWorkflow, params) - if err != nil { - return models.SpecOutput{}, fmt.Errorf("failed to execute fetch spec workflow: %v", err) - } - - var result models.SpecOutput - if err := run.Get(ctx, &result); err != nil { - return models.SpecOutput{}, fmt.Errorf("workflow execution failed: %v", err) - } - - return result, nil -} - -// TestConnection runs a workflow to test connection -func (c *Client) TestConnection(ctx context.Context, workflowID, flag, sourceType, version, config string) (map[string]interface{}, error) { - params := &ActivityParams{ - SourceType: sourceType, - Version: version, - Config: config, - WorkflowID: workflowID, - Command: docker.Check, - Flag: flag, - } - - workflowOptions := client.StartWorkflowOptions{ - ID: params.WorkflowID, - TaskQueue: TaskQueue, - } - - run, err := c.temporalClient.ExecuteWorkflow(ctx, workflowOptions, TestConnectionWorkflow, params) - if err != nil { - return nil, fmt.Errorf("failed to execute test connection workflow: %v", err) - } - - var result map[string]interface{} - if err := run.Get(ctx, &result); err != nil { - return nil, fmt.Errorf("workflow execution failed: %v", err) - } - - return result, nil -} - -// ManageSync handles all sync operations (create, update, delete, trigger) -func (c *Client) ManageSync(ctx context.Context, projectID string, jobID int, frequency string, action SyncAction) (map[string]interface{}, error) { - workflowID := fmt.Sprintf("sync-%s-%d", projectID, jobID) - scheduleID := fmt.Sprintf("schedule-%s", workflowID) - - handle := c.temporalClient.ScheduleClient().GetHandle(ctx, scheduleID) - currentSchedule, err := handle.Describe(ctx) - scheduleExists := err == nil - if action != ActionCreate && !scheduleExists { - return nil, fmt.Errorf("schedule does not exist") - } - switch action { - case ActionCreate: - if frequency == "" { - return nil, fmt.Errorf("frequency is required for creating schedule") - } - if scheduleExists { - return nil, fmt.Errorf("schedule already exists") - } - return c.createSchedule(ctx, handle, scheduleID, workflowID, frequency, jobID) - - case ActionUpdate: - if frequency == "" { - return nil, fmt.Errorf("frequency is required for updating schedule") - } - return c.updateSchedule(ctx, handle, currentSchedule, scheduleID, frequency) - - case ActionDelete: - if err := handle.Delete(ctx); err != nil { - return nil, fmt.Errorf("failed to delete schedule: %s", err) - } - return map[string]interface{}{"message": "Schedule deleted successfully"}, nil - - case ActionTrigger: - if err := handle.Trigger(ctx, client.ScheduleTriggerOptions{ - Overlap: enums.SCHEDULE_OVERLAP_POLICY_SKIP, - }); err != nil { - return nil, fmt.Errorf("failed to trigger schedule: %s", err) - } - return map[string]interface{}{"message": "Schedule triggered successfully"}, nil - case ActionPause: - if err := handle.Pause(ctx, client.SchedulePauseOptions{ - Note: "Paused via API", - }); err != nil { - return nil, fmt.Errorf("failed to pause schedule: %s", err) - } - return map[string]interface{}{"message": "Schedule paused successfully"}, nil - - case ActionUnpause: - if err := handle.Unpause(ctx, client.ScheduleUnpauseOptions{ - Note: "Unpaused via API", - }); err != nil { - return nil, fmt.Errorf("failed to unpause schedule: %s", err) - } - return map[string]interface{}{"message": "Schedule unpaused successfully"}, nil - - default: - return nil, fmt.Errorf("unsupported action: %s", action) - } -} - -// createSchedule creates a new schedule -func (c *Client) createSchedule(ctx context.Context, _ client.ScheduleHandle, scheduleID, workflowID, cronSpec string, jobID int) (map[string]interface{}, error) { - cronSpec = utils.ToCron(cronSpec) - _, err := c.temporalClient.ScheduleClient().Create(ctx, client.ScheduleOptions{ - ID: scheduleID, - Spec: client.ScheduleSpec{ - CronExpressions: []string{cronSpec}, - }, - Action: &client.ScheduleWorkflowAction{ - ID: workflowID, - Workflow: RunSyncWorkflow, - Args: []any{jobID}, - TaskQueue: TaskQueue, - }, - Overlap: enums.SCHEDULE_OVERLAP_POLICY_SKIP, - }) - - if err != nil { - return nil, fmt.Errorf("failed to create schedule: %s", err) - } - - return map[string]interface{}{ - "message": "Schedule created successfully", - "cron": cronSpec, - }, nil -} - -// updateSchedule updates an existing schedule -func (c *Client) updateSchedule(ctx context.Context, handle client.ScheduleHandle, currentSchedule *client.ScheduleDescription, _, cronSpec string) (map[string]interface{}, error) { - cronSpec = utils.ToCron(cronSpec) - // Check if update is needed - if len(currentSchedule.Schedule.Spec.CronExpressions) > 0 && - currentSchedule.Schedule.Spec.CronExpressions[0] == cronSpec { - return map[string]interface{}{"message": "Schedule already up to date"}, nil - } - - err := handle.Update(ctx, client.ScheduleUpdateOptions{ - DoUpdate: func(input client.ScheduleUpdateInput) (*client.ScheduleUpdate, error) { - input.Description.Schedule.Spec = &client.ScheduleSpec{ - CronExpressions: []string{cronSpec}, - } - return &client.ScheduleUpdate{ - Schedule: &input.Description.Schedule, - }, nil - }, - }) - - if err != nil { - return nil, fmt.Errorf("failed to update schedule: %s", err) - } - return map[string]interface{}{ - "message": "Schedule updated successfully", - "cron": cronSpec, - }, nil -} - -// cancelWorkflow cancels a workflow execution -func (c *Client) CancelWorkflow(ctx context.Context, workflowID, runID string) error { - return c.temporalClient.CancelWorkflow(ctx, workflowID, runID) -} - -// ListWorkflow lists workflow executions based on the provided query -func (c *Client) ListWorkflow(ctx context.Context, request *workflowservice.ListWorkflowExecutionsRequest) (*workflowservice.ListWorkflowExecutionsResponse, error) { - // Query workflows using the SDK's ListWorkflow method - resp, err := c.temporalClient.ListWorkflow(ctx, request) - if err != nil { - return nil, fmt.Errorf("error listing workflow executions: %v", err) - } - - return resp, nil -} diff --git a/server/internal/temporal/types.go b/server/internal/temporal/types.go deleted file mode 100644 index 036c8cec..00000000 --- a/server/internal/temporal/types.go +++ /dev/null @@ -1,41 +0,0 @@ -package temporal - -import "github.com/datazip/olake-frontend/server/internal/docker" - -// DockerCommandParams contains parameters for Docker commands (legacy) -type DockerCommandParams struct { - SourceType string - Version string - Config string - SourceID int - Command string -} - -// ActivityParams contains parameters for Docker command activities -type ActivityParams struct { - DestinationType string - SourceType string - Version string - Config string - SourceID int - Command docker.Command - DestConfig string - DestID int - WorkflowID string - StreamsConfig string - Flag string - JobName string -} - -// SyncParams contains parameters for sync activities -type SyncParams struct { - JobID int - WorkflowID string - JobName string - CreatedBy string - CreatedAt string - SourceType string - SourceName string - DestinationType string - DestinationName string -} diff --git a/server/internal/temporal/worker.go b/server/internal/temporal/worker.go deleted file mode 100644 index 677fe44a..00000000 --- a/server/internal/temporal/worker.go +++ /dev/null @@ -1,56 +0,0 @@ -package temporal - -import ( - "fmt" - - "go.temporal.io/sdk/client" - "go.temporal.io/sdk/worker" -) - -// Worker handles Temporal worker functionality -type Worker struct { - temporalClient client.Client - worker worker.Worker -} - -// NewWorker creates a new Temporal worker -func NewWorker() (*Worker, error) { - c, err := client.Dial(client.Options{ - HostPort: TemporalAddress, - }) - if err != nil { - return nil, fmt.Errorf("failed to create Temporal client: %v", err) - } - - // Create a worker - w := worker.New(c, TaskQueue, worker.Options{}) - - // Register workflows - w.RegisterWorkflow(DiscoverCatalogWorkflow) - w.RegisterWorkflow(TestConnectionWorkflow) - w.RegisterWorkflow(RunSyncWorkflow) - w.RegisterWorkflow(FetchSpecWorkflow) - - // Register activities - w.RegisterActivity(DiscoverCatalogActivity) - w.RegisterActivity(TestConnectionActivity) - w.RegisterActivity(SyncActivity) - w.RegisterActivity(FetchSpecActivity) - w.RegisterActivity(SyncCleanupActivity) - - return &Worker{ - temporalClient: c, - worker: w, - }, nil -} - -// Start starts the worker -func (w *Worker) Start() error { - return w.worker.Start() -} - -// Stop stops the worker -func (w *Worker) Stop() { - w.worker.Stop() - w.temporalClient.Close() -} diff --git a/server/internal/temporal/workflows.go b/server/internal/temporal/workflows.go deleted file mode 100644 index 87cffc27..00000000 --- a/server/internal/temporal/workflows.go +++ /dev/null @@ -1,129 +0,0 @@ -package temporal - -import ( - "context" - "fmt" - "time" - - "github.com/datazip/olake-frontend/server/internal/models" - "github.com/datazip/olake-frontend/server/internal/telemetry" - "go.temporal.io/sdk/temporal" - "go.temporal.io/sdk/workflow" -) - -// Retry policy constants -var ( - // DefaultRetryPolicy is used for standard operations like discovery and testing connections - DefaultRetryPolicy = &temporal.RetryPolicy{ - InitialInterval: time.Second * 15, - BackoffCoefficient: 2.0, - MaximumInterval: time.Minute * 10, - MaximumAttempts: 1, - } -) - -// DiscoverCatalogWorkflow is a workflow for discovering catalogs -func DiscoverCatalogWorkflow(ctx workflow.Context, params *ActivityParams) (map[string]interface{}, error) { - // Execute the DiscoverCatalogActivity directly - options := workflow.ActivityOptions{ - StartToCloseTimeout: time.Minute * 10, - RetryPolicy: DefaultRetryPolicy, - } - ctx = workflow.WithActivityOptions(ctx, options) - - var result map[string]interface{} - err := workflow.ExecuteActivity(ctx, DiscoverCatalogActivity, params).Get(ctx, &result) - if err != nil { - return nil, err - } - - return result, nil -} - -// FetchSpecWorkflow is a workflow for fetching connector specifications -func FetchSpecWorkflow(ctx workflow.Context, params *ActivityParams) (models.SpecOutput, error) { - // Execute the FetchSpecActivity directly - options := workflow.ActivityOptions{ - StartToCloseTimeout: time.Minute * 5, - HeartbeatTimeout: time.Minute * 1, - RetryPolicy: DefaultRetryPolicy, - } - ctx = workflow.WithActivityOptions(ctx, options) - - var result models.SpecOutput - err := workflow.ExecuteActivity(ctx, FetchSpecActivity, params).Get(ctx, &result) - if err != nil { - return models.SpecOutput{}, err - } - - return result, nil -} - -// TestConnectionWorkflow is a workflow for testing connections -func TestConnectionWorkflow(ctx workflow.Context, params *ActivityParams) (map[string]interface{}, error) { - // Execute the TestConnectionActivity directly - options := workflow.ActivityOptions{ - StartToCloseTimeout: time.Minute * 10, - RetryPolicy: DefaultRetryPolicy, - } - ctx = workflow.WithActivityOptions(ctx, options) - - var result map[string]interface{} - err := workflow.ExecuteActivity(ctx, TestConnectionActivity, params).Get(ctx, &result) - if err != nil { - return nil, err - } - - return result, nil -} - -// RunSyncWorkflow is a workflow for running data synchronization -func RunSyncWorkflow(ctx workflow.Context, jobID int) (result map[string]interface{}, err error) { - logger := workflow.GetLogger(ctx) - options := workflow.ActivityOptions{ - StartToCloseTimeout: time.Hour * 24 * 30, // 30 days - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: time.Second * 15, - BackoffCoefficient: 2.0, - MaximumInterval: time.Minute * 10, - MaximumAttempts: 0, - }, - WaitForCancellation: true, - HeartbeatTimeout: time.Minute * 1, - } - params := SyncParams{ - JobID: jobID, - WorkflowID: workflow.GetInfo(ctx).WorkflowExecution.ID, - } - - ctx = workflow.WithActivityOptions(ctx, options) - // Defer cleanup for cancellation - defer func() { - logger.Info("executing workflow cleanup...") - newCtx, _ := workflow.NewDisconnectedContext(ctx) - cleanupOptions := workflow.ActivityOptions{ - StartToCloseTimeout: time.Minute * 15, - RetryPolicy: DefaultRetryPolicy, - } - newCtx = workflow.WithActivityOptions(newCtx, cleanupOptions) - perr := workflow.ExecuteActivity(newCtx, SyncCleanupActivity, params).Get(newCtx, nil) - if perr != nil { - perr = fmt.Errorf("cleanup error: %s", perr) - if err != nil { - // preserve original err, just append cleanup info - err = fmt.Errorf("%s; cleanup error: %s", err, perr) - } - } - }() - - err = workflow.ExecuteActivity(ctx, SyncActivity, params).Get(ctx, &result) - if err != nil { - // Track sync failure event - telemetry.TrackSyncFailed(context.Background(), jobID, params.WorkflowID) - return nil, err - } - - // Track sync completion - telemetry.TrackSyncCompleted(context.Background(), jobID, params.WorkflowID) - return result, nil -} diff --git a/server/main.go b/server/main.go index 0dabb52b..b5eb5ba2 100644 --- a/server/main.go +++ b/server/main.go @@ -1,49 +1,43 @@ package main import ( - "os" - "github.com/beego/beego/v2/client/orm" - "github.com/beego/beego/v2/core/config" - "github.com/beego/beego/v2/core/logs" "github.com/beego/beego/v2/server/web" - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/database" - "github.com/datazip/olake-frontend/server/internal/logger" - "github.com/datazip/olake-frontend/server/internal/telemetry" - "github.com/datazip/olake-frontend/server/routes" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/database" + "github.com/datazip-inc/olake-ui/server/internal/handlers" + services "github.com/datazip-inc/olake-ui/server/internal/services/etl" + "github.com/datazip-inc/olake-ui/server/routes" + "github.com/datazip-inc/olake-ui/server/utils/logger" + "github.com/datazip-inc/olake-ui/server/utils/telemetry" ) func main() { - // TODO: check if we have to create a new config file for docker compatibility - if key := os.Getenv(constants.EncryptionKey); key == "" { - logs.Warning("Encryption key is not set. This is not recommended for production environments.") - } - - // start telemetry service - telemetry.InitTelemetry() - - // check constants constants.Init() + logger.Init() + db, err := database.Init() + if err != nil { + logger.Fatalf("Failed to initialize database: %s", err) + return + } - // init logger - logsdir, _ := config.String("logsdir") - logger.InitLogger(logsdir) - - // init database - err := database.Init() + // Initialize unified AppService + appSvc, err := services.InitAppService(db) if err != nil { - logs.Critical("Failed to initialize database: %s", err) + logger.Fatalf("Failed to initialize services: %s", err) return } + logger.Info("Application services initialized successfully") + telemetry.InitTelemetry(db) - // init routers - routes.Init() + routes.Init(handlers.NewHandler(appSvc)) + if key, _ := web.AppConfig.String(constants.ConfEncryptionKey); key == "" { + logger.Warn("Encryption key is not set. This is not recommended for production environments.") + } - // setup environment mode if web.BConfig.RunMode == "dev" || web.BConfig.RunMode == "staging" { orm.Debug = true } - web.Run() + // TODO: handle gracefull shutdown } diff --git a/server/routes/router.go b/server/routes/router.go index f7ea7a72..3dc17344 100644 --- a/server/routes/router.go +++ b/server/routes/router.go @@ -5,7 +5,9 @@ import ( "github.com/beego/beego/v2/server/web" "github.com/beego/beego/v2/server/web/context" - "github.com/datazip/olake-frontend/server/internal/handlers" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/internal/handlers" + "github.com/datazip-inc/olake-ui/server/internal/handlers/middleware" ) // writeDefaultCorsHeaders sets common CORS headers @@ -31,59 +33,62 @@ func CustomCorsFilter(ctx *context.Context) { } } -func Init() { - if runmode, err := web.AppConfig.String("runmode"); err == nil && runmode == "localdev" { +func Init(h *handlers.Handler) { + if runmode, err := web.AppConfig.String(constants.ConfRunMode); err == nil && runmode == "localdev" { web.InsertFilter("*", web.BeforeRouter, CustomCorsFilter) } else { // Serve static frontend files web.SetStaticPath("", "/opt/frontend/dist") // Vite assets are in /assets // Serve index.html for React frontend - web.Router("/*", &handlers.FrontendHandler{}) // any other frontend route + web.Router("/*", h, "get:ServeFrontend") // any other frontend route } // Apply auth middleware to protected routes - web.InsertFilter("/api/v1/*", web.BeforeRouter, handlers.AuthMiddleware) + web.InsertFilter("/api/v1/*", web.BeforeRouter, middleware.AuthMiddleware) // Auth routes - web.Router("/login", &handlers.AuthHandler{}, "post:Login") - web.Router("/signup", &handlers.AuthHandler{}, "post:Signup") - web.Router("/auth/check", &handlers.AuthHandler{}, "get:CheckAuth") - web.Router("/telemetry-id", &handlers.AuthHandler{}, "get:GetTelemetryID") + web.Router("/login", h, "post:Login") + web.Router("/signup", h, "post:Signup") + web.Router("/auth/check", h, "get:CheckAuth") + web.Router("/telemetry-id", h, "get:GetTelemetryID") // User routes - web.Router("/api/v1/users", &handlers.UserHandler{}, "post:CreateUser") - web.Router("/api/v1/users", &handlers.UserHandler{}, "get:GetAllUsers") - web.Router("/api/v1/users/:id", &handlers.UserHandler{}, "put:UpdateUser") - web.Router("/api/v1/users/:id", &handlers.UserHandler{}, "delete:DeleteUser") + web.Router("/api/v1/users", h, "post:CreateUser") + web.Router("/api/v1/users", h, "get:GetAllUsers") + web.Router("/api/v1/users/:id", h, "put:UpdateUser") + web.Router("/api/v1/users/:id", h, "delete:DeleteUser") // Source routes - web.Router("/api/v1/project/:projectid/sources", &handlers.SourceHandler{}, "get:GetAllSources") - web.Router("/api/v1/project/:projectid/sources", &handlers.SourceHandler{}, "post:CreateSource") - web.Router("/api/v1/project/:projectid/sources/:id", &handlers.SourceHandler{}, "put:UpdateSource") - web.Router("/api/v1/project/:projectid/sources/:id", &handlers.SourceHandler{}, "delete:DeleteSource") - web.Router("/api/v1/project/:projectid/sources/test", &handlers.SourceHandler{}, "post:TestConnection") - web.Router("/api/v1/project/:projectid/sources/streams", &handlers.SourceHandler{}, "post:GetSourceCatalog") - web.Router("/api/v1/project/:projectid/sources/versions", &handlers.SourceHandler{}, "get:GetSourceVersions") - web.Router("/api/v1/project/:projectid/sources/spec", &handlers.SourceHandler{}, "post:GetProjectSourceSpec") + web.Router("/api/v1/project/:projectid/sources", h, "get:ListSources") + web.Router("/api/v1/project/:projectid/sources", h, "post:CreateSource") + web.Router("/api/v1/project/:projectid/sources/:id", h, "put:UpdateSource") + web.Router("/api/v1/project/:projectid/sources/:id", h, "delete:DeleteSource") + web.Router("/api/v1/project/:projectid/sources/test", h, "post:TestSourceConnection") + web.Router("/api/v1/project/:projectid/sources/streams", h, "post:GetSourceCatalog") + web.Router("/api/v1/project/:projectid/sources/versions", h, "get:GetSourceVersions") + web.Router("/api/v1/project/:projectid/sources/spec", h, "post:GetSourceSpec") // Destination routes - web.Router("/api/v1/project/:projectid/destinations", &handlers.DestHandler{}, "get:GetAllDestinations") - web.Router("/api/v1/project/:projectid/destinations", &handlers.DestHandler{}, "post:CreateDestination") - web.Router("/api/v1/project/:projectid/destinations/:id", &handlers.DestHandler{}, "put:UpdateDestination") - web.Router("/api/v1/project/:projectid/destinations/:id", &handlers.DestHandler{}, "delete:DeleteDestination") - web.Router("/api/v1/project/:projectid/destinations/test", &handlers.DestHandler{}, "post:TestConnection") - web.Router("/api/v1/project/:projectid/destinations/versions", &handlers.DestHandler{}, "get:GetDestinationVersions") - web.Router("/api/v1/project/:projectid/destinations/spec", &handlers.DestHandler{}, "post:GetDestinationSpec") + web.Router("/api/v1/project/:projectid/destinations", h, "get:ListDestinations") + web.Router("/api/v1/project/:projectid/destinations", h, "post:CreateDestination") + web.Router("/api/v1/project/:projectid/destinations/:id", h, "put:UpdateDestination") + web.Router("/api/v1/project/:projectid/destinations/:id", h, "delete:DeleteDestination") + web.Router("/api/v1/project/:projectid/destinations/test", h, "post:TestDestinationConnection") + web.Router("/api/v1/project/:projectid/destinations/versions", h, "get:GetDestinationVersions") + web.Router("/api/v1/project/:projectid/destinations/spec", h, "post:GetDestinationSpec") // Job routes - web.Router("/api/v1/project/:projectid/jobs", &handlers.JobHandler{}, "get:GetAllJobs") - web.Router("/api/v1/project/:projectid/jobs", &handlers.JobHandler{}, "post:CreateJob") - web.Router("/api/v1/project/:projectid/jobs/:id", &handlers.JobHandler{}, "put:UpdateJob") - web.Router("/api/v1/project/:projectid/jobs/:id", &handlers.JobHandler{}, "delete:DeleteJob") - web.Router("/api/v1/project/:projectid/jobs/:id/sync", &handlers.JobHandler{}, "post:SyncJob") - web.Router("/api/v1/project/:projectid/jobs/:id/activate", &handlers.JobHandler{}, "post:ActivateJob") - web.Router("/api/v1/project/:projectid/jobs/:id/tasks", &handlers.JobHandler{}, "get:GetJobTasks") - web.Router("/api/v1/project/:projectid/jobs/:id/cancel", &handlers.JobHandler{}, "get:CancelJobRun") - web.Router("/api/v1/project/:projectid/jobs/:id/tasks/:taskid/logs", &handlers.JobHandler{}, "post:GetTaskLogs") - web.Router("/api/v1/project/:projectid/jobs/check-unique", &handlers.JobHandler{}, "post:CheckUniqueJobName") + web.Router("/api/v1/project/:projectid/jobs", h, "get:ListJobs") + web.Router("/api/v1/project/:projectid/jobs", h, "post:CreateJob") + web.Router("/api/v1/project/:projectid/jobs/:id", h, "put:UpdateJob") + web.Router("/api/v1/project/:projectid/jobs/:id", h, "delete:DeleteJob") + web.Router("/api/v1/project/:projectid/jobs/:id/sync", h, "post:SyncJob") + web.Router("/api/v1/project/:projectid/jobs/:id/activate", h, "post:ActivateJob") + web.Router("/api/v1/project/:projectid/jobs/:id/tasks", h, "get:GetJobTasks") + web.Router("/api/v1/project/:projectid/jobs/:id/cancel", h, "get:CancelJobRun") + web.Router("/api/v1/project/:projectid/jobs/:id/tasks/:taskid/logs", h, "post:GetTaskLogs") + web.Router("/api/v1/project/:projectid/jobs/check-unique", h, "post:CheckUniqueJobName") + + // worker callback routes + web.Router("/internal/worker/callback/sync-telemetry", h, "post:UpdateSyncTelemetry") } diff --git a/server/tests/test_utils.go b/server/tests/test_utils.go index 21680f8f..81c40b0e 100644 --- a/server/tests/test_utils.go +++ b/server/tests/test_utils.go @@ -243,6 +243,10 @@ func DinDTestContainer(t *testing.T) error { } t.Log("Playwright tests passed successfully.") + // wait before verifying iceberg data + t.Log("Waiting for 20 seconds before verifying iceberg data...") + time.Sleep(20 * time.Second) + // Step 11: Verify in iceberg t.Logf("Starting Iceberg data verification...") VerifyIcebergTest(ctx, t, ctr, host, sparkPort.Port()) @@ -271,7 +275,7 @@ func ExecCommandWithStreaming(ctx context.Context, t *testing.T, ctr testcontain return exitCode, output.String(), nil } -// PatchDockerCompose updates olake-ui and temporal-worker to build from local code +// PatchDockerCompose updates olake-ui to build from local code // TODO: Remove patch command and find alternative to use local code func PatchDockerCompose(ctx context.Context, t *testing.T, ctr testcontainers.Container) error { patchCmd := ` @@ -289,12 +293,6 @@ func PatchDockerCompose(ctx context.Context, t *testing.T, ctr testcontainers.Co print " dockerfile: Dockerfile"; next } - if (svc=="temporal-worker" && $0 ~ /^ image:/) { - print " build:"; - print " context: ."; - print " dockerfile: worker.Dockerfile"; - next - } print } ' /mnt/docker-compose.yml > "$tmpfile" && mv "$tmpfile" /mnt/docker-compose.yml diff --git a/server/utils/docker_utils.go b/server/utils/docker_utils.go index 8aa786df..ec733182 100644 --- a/server/utils/docker_utils.go +++ b/server/utils/docker_utils.go @@ -15,8 +15,9 @@ import ( "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/config" "github.com/aws/aws-sdk-go-v2/service/ecr" - "github.com/beego/beego/v2/core/logs" "github.com/beego/beego/v2/server/web" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/utils/logger" "golang.org/x/mod/semver" ) @@ -67,9 +68,9 @@ func GetWorkerEnvVars() map[string]string { // GetDriverImageTags returns image tags from ECR or Docker Hub with fallback to cached images func GetDriverImageTags(ctx context.Context, imageName string, cachedTags bool) ([]string, string, error) { // TODO: make constants file and validate all env vars in start of server - repositoryBase, err := web.AppConfig.String("CONTAINER_REGISTRY_BASE") + repositoryBase, err := web.AppConfig.String(constants.ConfContainerRegistryBase) if err != nil { - return nil, "", fmt.Errorf("failed to get CONTAINER_REGISTRY_BASE: %v", err) + return nil, "", fmt.Errorf("failed to get CONTAINER_REGISTRY_BASE: %s", err) } var tags []string images := []string{imageName} @@ -87,7 +88,7 @@ func GetDriverImageTags(ctx context.Context, imageName string, cachedTags bool) // Fallback to cached if online fetch fails or explicitly requested if err != nil && cachedTags { - logs.Warn("failed to fetch image tags online for %s: %s, falling back to cached tags", imageName, err) + logger.Warn("failed to fetch image tags online for %s: %s, falling back to cached tags", imageName, err) tags, err = fetchCachedImageTags(ctx, imageName, repositoryBase) if err != nil { return nil, "", fmt.Errorf("failed to fetch cached image tags for %s: %s", imageName, err) diff --git a/server/utils/encryption.go b/server/utils/encryption.go index aa2aa3f0..6b6c6edb 100644 --- a/server/utils/encryption.go +++ b/server/utils/encryption.go @@ -11,12 +11,12 @@ import ( "errors" "fmt" "io" - "os" "strings" "github.com/aws/aws-sdk-go-v2/config" "github.com/aws/aws-sdk-go-v2/service/kms" - "github.com/datazip/olake-frontend/server/internal/constants" + "github.com/beego/beego/v2/server/web" + "github.com/datazip-inc/olake-ui/server/internal/constants" ) // utility provides encryption and decryption functionality using either AWS KMS or local AES-256-GCM. @@ -31,7 +31,7 @@ import ( func getSecretKey() ([]byte, *kms.Client, error) { // TODO: can we move this to constants and set key and kms client // TODO: use viper package to read environment variables - envKey := os.Getenv(constants.EncryptionKey) + envKey, _ := web.AppConfig.String(constants.ConfEncryptionKey) if strings.TrimSpace(envKey) == "" { return []byte{}, nil, nil // Encryption is disabled } @@ -105,12 +105,12 @@ func Decrypt(encryptedText string) (string, error) { var config string err = json.Unmarshal([]byte(encryptedText), &config) if err != nil { - return "", fmt.Errorf("failed to unmarshal JSON string: %v", err) + return "", fmt.Errorf("failed to unmarshal JSON string: %s", err) } encryptedData, err := base64.StdEncoding.DecodeString(config) if err != nil { - return "", fmt.Errorf("failed to decode base64 data: %v", err) + return "", fmt.Errorf("failed to decode base64 data: %s", err) } // Use KMS if client is provided diff --git a/server/utils/logger/logger.go b/server/utils/logger/logger.go new file mode 100644 index 00000000..1d99f74a --- /dev/null +++ b/server/utils/logger/logger.go @@ -0,0 +1,102 @@ +package logger + +import ( + "io" + "os" + "strings" + "time" + + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/rs/zerolog" + "github.com/spf13/viper" +) + +var logger zerolog.Logger + +func Init() { + format := viper.GetString(constants.EnvLogFormat) + level := viper.GetString(constants.EnvLogLevel) + + zerolog.TimestampFunc = func() time.Time { return time.Now().UTC() } + + var writer io.Writer + switch strings.ToLower(format) { + case "console": + // Use ConsoleWriter with built-in colors and formatting + writer = zerolog.ConsoleWriter{ + Out: os.Stdout, + TimeFormat: time.RFC3339, + } + default: + // Default to JSON for production safety + writer = os.Stdout + } + + logger = zerolog.New(writer).With().Timestamp().Logger() + zerolog.SetGlobalLevel(parseLogLevel(level)) +} + +// parseLogLevel converts a string level to a zerolog.Level +func parseLogLevel(levelStr string) zerolog.Level { + switch strings.ToLower(levelStr) { + case "debug": + return zerolog.DebugLevel + case "info": + return zerolog.InfoLevel + case "warn": + return zerolog.WarnLevel + case "error": + return zerolog.ErrorLevel + case "fatal": + return zerolog.FatalLevel + default: + return zerolog.InfoLevel // Default to info level + } +} + +// Info writes record with log level INFO +func Info(v ...interface{}) { + if len(v) == 1 { + logger.Info().Interface("message", v[0]).Send() + } else { + logger.Info().Msgf("%s", v...) + } +} + +func Infof(format string, v ...interface{}) { + logger.Info().Msgf(format, v...) +} + +func Debug(v ...interface{}) { + logger.Debug().Msgf("%s", v...) +} + +func Debugf(format string, v ...interface{}) { + logger.Debug().Msgf(format, v...) +} + +func Error(v ...interface{}) { + logger.Error().Msgf("%s", v...) +} + +func Errorf(format string, v ...interface{}) { + logger.Error().Msgf(format, v...) +} + +func Warn(v ...interface{}) { + logger.Warn().Msgf("%s", v...) +} + +func Warnf(format string, v ...interface{}) { + logger.Warn().Msgf(format, v...) +} + +func Fatal(v ...interface{}) { + logger.Fatal().Msgf("%s", v...) + os.Exit(1) +} + +func Fatalf(format string, v ...interface{}) { + logger.Fatal().Msgf(format, v...) + os.Exit(1) +} diff --git a/server/internal/telemetry/auth.go b/server/utils/telemetry/auth.go similarity index 71% rename from server/internal/telemetry/auth.go rename to server/utils/telemetry/auth.go index 49608f3c..a8e36192 100644 --- a/server/internal/telemetry/auth.go +++ b/server/utils/telemetry/auth.go @@ -3,8 +3,8 @@ package telemetry import ( "context" - "github.com/beego/beego/v2/core/logs" - "github.com/datazip/olake-frontend/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/utils/logger" ) // TrackUserLogin tracks when a user logs in to olake-ui @@ -22,7 +22,7 @@ func TrackUserLogin(ctx context.Context, user *models.User) { err := TrackEvent(ctx, EventUserLogin, properties) if err != nil { - logs.Debug("Failed to track user login event: %s", err) + logger.Debug("Failed to track user login event: %s", err) } }() } diff --git a/server/internal/telemetry/constants.go b/server/utils/telemetry/constants.go similarity index 100% rename from server/internal/telemetry/constants.go rename to server/utils/telemetry/constants.go diff --git a/server/internal/telemetry/destination.go b/server/utils/telemetry/destination.go similarity index 73% rename from server/internal/telemetry/destination.go rename to server/utils/telemetry/destination.go index b92fa793..34db60ec 100644 --- a/server/internal/telemetry/destination.go +++ b/server/utils/telemetry/destination.go @@ -5,9 +5,8 @@ import ( "encoding/json" "time" - "github.com/beego/beego/v2/core/logs" - "github.com/datazip/olake-frontend/server/internal/database" - "github.com/datazip/olake-frontend/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/utils/logger" ) // TrackDestinationCreation tracks the creation of a new destination with relevant properties @@ -27,7 +26,7 @@ func TrackDestinationCreation(ctx context.Context, dest *models.Destination) { var configMap map[string]interface{} // parse config to get catalog_type if err := json.Unmarshal([]byte(dest.Config), &configMap); err != nil { - logs.Debug("Failed to unmarshal config: %s", err) + logger.Debug("Failed to unmarshal config: %s", err) return } @@ -42,7 +41,7 @@ func TrackDestinationCreation(ctx context.Context, dest *models.Destination) { } if err := TrackEvent(ctx, EventDestinationCreated, properties); err != nil { - logs.Debug("Failed to track destination creation event: %s", err) + logger.Debug("Failed to track destination creation event: %s", err) return } @@ -58,13 +57,9 @@ func TrackDestinationsStatus(ctx context.Context) { return } - // TODO: remove creation of orm from here - destORM := database.NewDestinationORM() - jobORM := database.NewJobORM() - - destinations, err := destORM.GetAll() + destinations, err := instance.db.ListDestinations() if err != nil { - logs.Debug("Failed to get all destinations: %s", err) + logger.Debug("Failed to get all destinations: %s", err) return } @@ -72,9 +67,9 @@ func TrackDestinationsStatus(ctx context.Context) { for _, dest := range destinations { // TODO: remove db calls loop - jobs, err := jobORM.GetByDestinationID(dest.ID) + jobs, err := instance.db.GetJobsByDestinationID([]int{dest.ID}) if err != nil { - logs.Debug("Failed to get jobs for destination %d: %s", dest.ID, err) + logger.Debug("Failed to get jobs for destination %d: %s", dest.ID, err) break } if len(jobs) > 0 { @@ -90,7 +85,7 @@ func TrackDestinationsStatus(ctx context.Context) { } if err := TrackEvent(ctx, EventDestinationsUpdated, props); err != nil { - logs.Debug("failed to track destination status event: %s", err) + logger.Debug("failed to track destination status event: %s", err) } }() } diff --git a/server/utils/telemetry/job.go b/server/utils/telemetry/job.go new file mode 100644 index 00000000..a8ff9b6e --- /dev/null +++ b/server/utils/telemetry/job.go @@ -0,0 +1,53 @@ +package telemetry + +import ( + "context" + "time" + + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/utils/logger" +) + +// TrackJobCreation tracks the creation of a new job with relevant properties +func TrackJobCreation(ctx context.Context, job *models.Job) { + go func() { + if instance == nil || job == nil { + return + } + + properties := map[string]interface{}{ + "job_id": job.ID, + "job_name": job.Name, + "project_id": job.ProjectID, + "frequency": job.Frequency, + "active": job.Active, + } + + // Safely add source properties + if job.SourceID != nil { + properties["source_type"] = job.SourceID.Type + properties["source_name"] = job.SourceID.Name + } + + // Safely add destination properties + if job.DestID != nil { + properties["destination_type"] = job.DestID.DestType + properties["destination_name"] = job.DestID.Name + } + + if !job.CreatedAt.IsZero() { + properties["created_at"] = job.CreatedAt.Format(time.RFC3339) + } + + if err := TrackEvent(ctx, EventJobCreated, properties); err != nil { + logger.Debug("Failed to track job creation event: %s", err) + return + } + TrackJobEntity(ctx) + }() +} + +func TrackJobEntity(ctx context.Context) { + TrackSourcesStatus(ctx) + TrackDestinationsStatus(ctx) +} diff --git a/server/internal/telemetry/source.go b/server/utils/telemetry/source.go similarity index 69% rename from server/internal/telemetry/source.go rename to server/utils/telemetry/source.go index 3e47e769..49bc3601 100644 --- a/server/internal/telemetry/source.go +++ b/server/utils/telemetry/source.go @@ -4,9 +4,8 @@ import ( "context" "time" - "github.com/beego/beego/v2/core/logs" - "github.com/datazip/olake-frontend/server/internal/database" - "github.com/datazip/olake-frontend/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/models" + "github.com/datazip-inc/olake-ui/server/utils/logger" ) // TrackSourceCreation tracks the creation of a new source with relevant properties @@ -28,7 +27,7 @@ func TrackSourceCreation(ctx context.Context, source *models.Source) { } if err := TrackEvent(ctx, EventSourceCreated, properties); err != nil { - logs.Debug("Failed to track source creation event: %s", err) + logger.Debug("Failed to track source creation event: %s", err) return } // Track sources status after creation @@ -43,21 +42,18 @@ func TrackSourcesStatus(ctx context.Context) { return } - sourceORM := database.NewSourceORM() - jobORM := database.NewJobORM() - - sources, err := sourceORM.GetAll() + sources, err := instance.db.ListSources() if err != nil { - logs.Debug("failed to get all sources in track source status: %s", err) + logger.Debug("failed to get all sources in track source status: %s", err) return } activeSources := 0 for _, source := range sources { // TODO: remove orm calls from loop - jobs, err := jobORM.GetBySourceID(source.ID) + jobs, err := instance.db.GetJobsBySourceID([]int{source.ID}) if err != nil { - logs.Debug("failed to get all jobs for source[%d] in track source status: %s", source.ID, err) + logger.Debug("failed to get all jobs for source[%d] in track source status: %s", source.ID, err) break } if len(jobs) > 0 { @@ -73,7 +69,7 @@ func TrackSourcesStatus(ctx context.Context) { } if err := TrackEvent(ctx, EventSourcesUpdated, props); err != nil { - logs.Debug("failed to track source status event: %s", err) + logger.Debug("failed to track source status event: %s", err) } }() } diff --git a/server/internal/telemetry/sync.go b/server/utils/telemetry/sync.go similarity index 85% rename from server/internal/telemetry/sync.go rename to server/utils/telemetry/sync.go index c069a8b4..9f03e369 100644 --- a/server/internal/telemetry/sync.go +++ b/server/utils/telemetry/sync.go @@ -9,9 +9,8 @@ import ( "path/filepath" "time" - "github.com/beego/beego/v2/core/logs" - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/database" + "github.com/datazip-inc/olake-ui/server/internal/constants" + "github.com/datazip-inc/olake-ui/server/utils/logger" ) type jobDetails struct { @@ -25,8 +24,7 @@ type jobDetails struct { } func getJobDetails(jobID int) (*jobDetails, error) { - jobORM := database.NewJobORM() - job, err := jobORM.GetByID(jobID, false) + job, err := instance.db.GetJobByID(jobID, false) if err != nil || job == nil { if job == nil { return nil, fmt.Errorf("job not found") @@ -40,7 +38,7 @@ func getJobDetails(jobID int) (*jobDetails, error) { } if job.CreatedBy != nil { - if user, err := database.NewUserORM().GetByID(job.CreatedBy.ID); err == nil { + if user, err := instance.db.GetUserByID(job.CreatedBy.ID); err == nil { details.CreatedBy = user.Username } } @@ -177,33 +175,33 @@ func TrackSyncStart(ctx context.Context, jobID int, workflowID string) { err := trackSyncEvent(ctx, jobID, workflowID, EventSyncStarted) if err != nil { - logs.Debug("failed to track sync start event: %s", err) + logger.Debug("failed to track sync start event: %s", err) } }() } -func TrackSyncFailed(ctx context.Context, jobID int, workflowID string) { +func TrackSyncFailed(jobID int, workflowID string) { go func() { if instance == nil { return } - err := trackSyncEvent(ctx, jobID, workflowID, EventSyncFailed) + err := trackSyncEvent(context.Background(), jobID, workflowID, EventSyncFailed) if err != nil { - logs.Debug("failed to track sync failed event: %s", err) + logger.Debug("failed to track sync failed event: %s", err) } }() } -func TrackSyncCompleted(ctx context.Context, jobID int, workflowID string) { +func TrackSyncCompleted(jobID int, workflowID string) { go func() { if instance == nil { return } - err := trackSyncEvent(ctx, jobID, workflowID, EventSyncCompleted) + err := trackSyncEvent(context.Background(), jobID, workflowID, EventSyncCompleted) if err != nil { - logs.Debug("failed to track sync completed event: %s", err) + logger.Debug("failed to track sync completed event: %s", err) } }() } diff --git a/server/internal/telemetry/telemetry.go b/server/utils/telemetry/telemetry.go similarity index 86% rename from server/internal/telemetry/telemetry.go rename to server/utils/telemetry/telemetry.go index 9f8d9f86..efda8288 100644 --- a/server/internal/telemetry/telemetry.go +++ b/server/utils/telemetry/telemetry.go @@ -14,6 +14,10 @@ import ( "strconv" "strings" "time" + + "github.com/datazip-inc/olake-ui/server/internal/database" + "github.com/datazip-inc/olake-ui/server/utils/logger" + "github.com/spf13/viper" ) var instance *Telemetry @@ -38,9 +42,10 @@ type Telemetry struct { locationInfo *LocationInfo TempUserID string username string + db *database.Database } -func InitTelemetry() { +func InitTelemetry(db *database.Database) { go func() { if disabled, _ := strconv.ParseBool(os.Getenv("TELEMETRY_DISABLED")); disabled { return @@ -70,17 +75,20 @@ func InitTelemetry() { return string(idBytes) }() + logger.Infof("telemetry initialized with user ID: %s, and App version: %s", tempUserID, viper.GetString("BUILD")) + instance = &Telemetry{ httpClient: &http.Client{Timeout: TelemetryConfigTimeout}, platform: PlatformInfo{ OS: runtime.GOOS, Arch: runtime.GOARCH, - OlakeVersion: OlakeVersion, + OlakeVersion: viper.GetString("BUILD"), DeviceCPU: fmt.Sprintf("%d cores", runtime.NumCPU()), }, ipAddress: ip, TempUserID: tempUserID, locationInfo: getLocationFromIP(ip), + db: db, } }() } @@ -110,7 +118,7 @@ func getLocationFromIP(ip string) *LocationInfo { if ip == IPNotFound || ip == "" { return locationInfo } - + // TODO: remove context.Background() from everywhere creare a context in main.go ctx, cancel := context.WithTimeout(context.Background(), TelemetryConfigTimeout) defer cancel() @@ -144,7 +152,7 @@ func getLocationFromIP(ip string) *LocationInfo { } // TrackEvent sends a custom event to Segment -func TrackEvent(ctx context.Context, eventName string, properties map[string]interface{}) error { +func TrackEvent(_ context.Context, eventName string, properties map[string]interface{}) error { if instance.httpClient == nil { return fmt.Errorf("telemetry client is nil") } @@ -180,8 +188,7 @@ func TrackEvent(ctx context.Context, eventName string, properties map[string]int if err != nil { return err } - - req, err := http.NewRequestWithContext(ctx, "POST", ProxyTrackURL, strings.NewReader(string(propsBody))) + req, err := http.NewRequestWithContext(context.Background(), "POST", ProxyTrackURL, strings.NewReader(string(propsBody))) if err != nil { return err } diff --git a/server/utils/utils.go b/server/utils/utils.go index 1171125e..5cf66610 100644 --- a/server/utils/utils.go +++ b/server/utils/utils.go @@ -12,13 +12,11 @@ import ( "strings" "time" - "github.com/beego/beego/v2/core/logs" "github.com/beego/beego/v2/server/web" "github.com/oklog/ulid" - "github.com/robfig/cron" - "github.com/datazip/olake-frontend/server/internal/constants" - "github.com/datazip/olake-frontend/server/internal/models" + "github.com/datazip-inc/olake-ui/server/internal/models/dto" + "github.com/datazip-inc/olake-ui/server/utils/logger" ) func ToMapOfInterface(structure any) map[string]interface{} { @@ -36,7 +34,7 @@ func ToMapOfInterface(structure any) map[string]interface{} { func RespondJSON(ctx *web.Controller, status int, success bool, message string, data interface{}) { ctx.Ctx.Output.SetStatus(status) - ctx.Data["json"] = models.JSONResponse{ + ctx.Data["json"] = dto.JSONResponse{ Success: success, Message: message, Data: data, @@ -44,11 +42,14 @@ func RespondJSON(ctx *web.Controller, status int, success bool, message string, _ = ctx.ServeJSON() } -func SuccessResponse(ctx *web.Controller, data interface{}) { - RespondJSON(ctx, http.StatusOK, true, "success", data) +func SuccessResponse(ctx *web.Controller, message string, data interface{}) { + RespondJSON(ctx, http.StatusOK, true, message, data) } -func ErrorResponse(ctx *web.Controller, status int, message string) { +func ErrorResponse(ctx *web.Controller, status int, message string, err error) { + if err != nil { + logger.Errorf("error in request %s: %s", ctx.Ctx.Input.URI(), err) + } RespondJSON(ctx, status, false, message, nil) } @@ -117,7 +118,7 @@ func ULID() string { t := time.Now() newUlid, err := ulid.New(ulid.Timestamp(t), entropy) if err != nil { - logs.Critical(err) + logger.Fatal(err) } return newUlid.String() @@ -134,7 +135,7 @@ func Ternary(cond bool, a, b any) any { func CreateDirectory(dirPath string, perm os.FileMode) error { if _, err := os.Stat(dirPath); os.IsNotExist(err) { if err := os.MkdirAll(dirPath, perm); err != nil { - return fmt.Errorf("failed to create directory %s: %v", dirPath, err) + return fmt.Errorf("failed to create directory %s: %s", dirPath, err) } } return nil @@ -148,26 +149,11 @@ func WriteFile(filePath string, data []byte, perm os.FileMode) error { } if err := os.WriteFile(filePath, data, perm); err != nil { - return fmt.Errorf("failed to write to file %s: %v", filePath, err) + return fmt.Errorf("failed to write to file %s: %s", filePath, err) } return nil } -// ParseJSONFile parses a JSON file into a map -func ParseJSONFile(filePath string) (map[string]interface{}, error) { - fileData, err := os.ReadFile(filePath) - if err != nil { - return nil, fmt.Errorf("failed to read file %s: %v", filePath, err) - } - - var result map[string]interface{} - if err := json.Unmarshal(fileData, &result); err != nil { - return nil, fmt.Errorf("failed to parse JSON from file %s: %v", filePath, err) - } - - return result, nil -} - // ToCron converts a frequency string to a cron expression func ToCron(frequency string) string { parts := strings.Split(strings.ToLower(frequency), "-") @@ -201,75 +187,6 @@ func ToCron(frequency string) string { } } -func CleanOldLogs(logDir string, retentionPeriod int) { - logs.Info("Running log cleaner...") - cutoff := time.Now().AddDate(0, 0, -retentionPeriod) - - // check if old logs are present - shouldDelete := func(path string, cutoff time.Time) bool { - entries, _ := os.ReadDir(path) - if len(entries) == 0 { - return true - } - - var foundOldLog bool - _ = filepath.Walk(path, func(filePath string, info os.FileInfo, _ error) error { - if info == nil || info.IsDir() { - return nil - } - if (strings.HasSuffix(filePath, ".log") || strings.HasSuffix(filePath, ".log.gz")) && - info.ModTime().Before(cutoff) { - foundOldLog = true - return filepath.SkipDir - } - return nil - }) - return foundOldLog - } - - entries, err := os.ReadDir(logDir) - if err != nil { - logs.Error("failed to read log dir: %v", err) - return - } - // delete dir if old logs are found or is empty - for _, entry := range entries { - if !entry.IsDir() || entry.Name() == "telemetry" { - continue - } - dirPath := filepath.Join(logDir, entry.Name()) - if toDelete := shouldDelete(dirPath, cutoff); toDelete { - logs.Info("Deleting folder: %s", dirPath) - _ = os.RemoveAll(dirPath) - } - } -} - -// starts a log cleaner that removes old logs from the specified directory based on the retention period -func InitLogCleaner(logDir string, retentionPeriod int) { - logs.Info("Log cleaner started...") - CleanOldLogs(logDir, retentionPeriod) // catchup missed cycles if any - c := cron.New() - err := c.AddFunc("@midnight", func() { - CleanOldLogs(logDir, retentionPeriod) - }) - if err != nil { - logs.Error("Failed to start log cleaner: %v", err) - return - } - c.Start() -} - -// GetRetentionPeriod returns the retention period for logs -func GetLogRetentionPeriod() int { - if val := os.Getenv("LOG_RETENTION_PERIOD"); val != "" { - if retentionPeriod, err := strconv.Atoi(val); err == nil && retentionPeriod > 0 { - return retentionPeriod - } - } - return constants.DefaultLogRetentionPeriod -} - // ExtractJSON extracts and returns the last valid JSON block from output func ExtractJSON(output string) (map[string]interface{}, error) { outputStr := strings.TrimSpace(output) @@ -375,3 +292,25 @@ func ReadLogs(mainLogDir string) ([]map[string]interface{}, error) { return parsedLogs, nil } + +// RetryWithBackoff retries a function with exponential backoff +func RetryWithBackoff(fn func() error, maxRetries int, initialDelay time.Duration) error { + delay := initialDelay + var errMsg error + + for retry := 0; retry < maxRetries; retry++ { + if err := fn(); err != nil { + errMsg = err + if retry < maxRetries-1 { + logger.Warnf("Retry attempt %d/%d failed: %s. Retrying in %v...", retry+1, maxRetries, err, delay) + time.Sleep(delay) + delay *= 2 + continue + } + } else { + return nil + } + } + + return fmt.Errorf("failed after %d retries: %s", maxRetries, errMsg) +} diff --git a/ui/src/api/axios.ts b/ui/src/api/axios.ts index 84c3da3c..42da4a93 100644 --- a/ui/src/api/axios.ts +++ b/ui/src/api/axios.ts @@ -9,6 +9,16 @@ import { HTTP_STATUS, LOCALSTORAGE_TOKEN_KEY, } from "../utils/constants" +import { notificationService } from "./services/notificationService" +/** + * Extend Axios types to support our custom notification flag + */ +declare module "axios" { + export interface AxiosRequestConfig { + showNotification?: boolean // Controls whether the interceptor shows a toast (default: false) + disableErrorNotification?: boolean + } +} /** * Creates and configures an axios instance with default settings @@ -44,9 +54,40 @@ api.interceptors.request.use( */ api.interceptors.response.use( (response: AxiosResponse) => { + const config = response.config + const payload = response.data + + // Show toast only if explicitly enabled for this request + if (config.showNotification === true) { + notificationService.success(payload.message) + } + + // Return only the actual data to the caller (unwrap the envelope) + response.data = payload.data + return response }, (error: AxiosError) => { + const payload = error.response?.data as any + const config = error.config + + // Skip showing errors for canceled requests + if ( + axios.isCancel(error) || + error.code === "ERR_CANCELED" || + config?.disableErrorNotification + ) { + return Promise.reject(error) + } + + // Always show error toasts + if (payload.message) { + notificationService.error( + payload.message || "An error occurred! Please try again.", + ) + } + + // Handle specific HTTP status codes if (error.response) { const { status } = error.response diff --git a/ui/src/api/services/analyticsService.ts b/ui/src/api/services/analyticsService.ts index 1c555514..71d8b864 100644 --- a/ui/src/api/services/analyticsService.ts +++ b/ui/src/api/services/analyticsService.ts @@ -66,7 +66,7 @@ const getSystemInfo = async () => { const getTelemetryID = async (): Promise => { try { const response = await api.get("/telemetry-id") - return response.data.data.user_id || "" + return response.data.user_id || "" } catch (error) { console.error("Error fetching telemetry ID:", error) return "" diff --git a/ui/src/api/services/authService.ts b/ui/src/api/services/authService.ts index f14b6b1c..0f8ee1ce 100644 --- a/ui/src/api/services/authService.ts +++ b/ui/src/api/services/authService.ts @@ -2,7 +2,7 @@ * AuthService handles authentication-related API calls and localStorage management. */ import api from "../axios" -import { APIResponse, LoginArgs, LoginResponse } from "../../types" +import { LoginArgs, LoginResponse } from "../../types" import { LOCALSTORAGE_TOKEN_KEY, LOCALSTORAGE_USERNAME_KEY, @@ -11,7 +11,7 @@ import { export const authService = { login: async ({ username, password }: LoginArgs) => { try { - const response = await api.post>( + const response = await api.post( "/login", { username, @@ -24,25 +24,11 @@ export const authService = { }, ) - if (response.data.success) { - localStorage.setItem( - LOCALSTORAGE_USERNAME_KEY, - response.data.data.username, - ) - localStorage.setItem(LOCALSTORAGE_TOKEN_KEY, "authenticated") - return response.data.data - } - - throw new Error(response.data.message) + localStorage.setItem(LOCALSTORAGE_USERNAME_KEY, response.data.username) + localStorage.setItem(LOCALSTORAGE_TOKEN_KEY, "authenticated") + return response.data } catch (error: any) { - // Handle 400 status code specifically - if (error.response?.status === 400) { - throw new Error(error.response.data.message || "Invalid credentials") - } - // Handle other errors - throw new Error( - error.response?.data?.message || error.message || "Login failed", - ) + throw error } }, diff --git a/ui/src/api/services/destinationService.ts b/ui/src/api/services/destinationService.ts index e8345177..7ab594e3 100644 --- a/ui/src/api/services/destinationService.ts +++ b/ui/src/api/services/destinationService.ts @@ -1,7 +1,6 @@ import api from "../axios" import { API_CONFIG } from "../config" import { - APIResponse, Entity, EntityBase, EntityTestRequest, @@ -13,7 +12,7 @@ import { getConnectorInLowerCase } from "../../utils/utils" const normalizeDestinationType = (type: string): string => { //destination connector typemap const typeMap: Record = { - "amazon s3": "s3", + "amazon s3": "parquet", "apache iceberg": "iceberg", } return typeMap[type.toLowerCase()] || type.toLowerCase() @@ -22,10 +21,10 @@ const normalizeDestinationType = (type: string): string => { export const destinationService = { getDestinations: async () => { try { - const response = await api.get>( + const response = await api.get( API_CONFIG.ENDPOINTS.DESTINATIONS(API_CONFIG.PROJECT_ID), ) - const destinations: Entity[] = response.data.data.map(item => { + const destinations: Entity[] = response.data.map(item => { const config = JSON.parse(item.config) return { ...item, @@ -53,7 +52,7 @@ export const destinationService = { updateDestination: async (id: string, destination: EntityBase) => { try { - const response = await api.put>( + const response = await api.put( `${API_CONFIG.ENDPOINTS.DESTINATIONS(API_CONFIG.PROJECT_ID)}/${id}`, { name: destination.name, @@ -64,6 +63,7 @@ export const destinationService = { ? destination.config : JSON.stringify(destination.config), }, + { showNotification: true }, ) return response.data } catch (error) { @@ -75,6 +75,7 @@ export const destinationService = { deleteDestination: async (id: number) => { await api.delete( `${API_CONFIG.ENDPOINTS.DESTINATIONS(API_CONFIG.PROJECT_ID)}/${id}`, + { showNotification: true }, ) return }, @@ -85,7 +86,7 @@ export const destinationService = { source_version: string = "", ) => { try { - const response = await api.post>( + const response = await api.post( `${API_CONFIG.ENDPOINTS.DESTINATIONS(API_CONFIG.PROJECT_ID)}/test`, { type: getConnectorInLowerCase(destination.type), @@ -95,12 +96,12 @@ export const destinationService = { source_version: source_version, }, //timeout is 0 as test connection takes more time as it needs to connect to the destination - { timeout: 0 }, + { timeout: 0, disableErrorNotification: true }, ) return { - success: response.data.success, - message: response.data.message, - data: response.data.data, + success: true, + message: "success", + data: response.data, } } catch (error) { console.error("Error testing destination connection:", error) @@ -113,7 +114,7 @@ export const destinationService = { }, getDestinationVersions: async (type: string) => { - const response = await api.get>( + const response = await api.get<{ version: string[] }>( `${API_CONFIG.ENDPOINTS.DESTINATIONS(API_CONFIG.PROJECT_ID)}/versions/?type=${type}`, { timeout: 0, @@ -130,7 +131,7 @@ export const destinationService = { signal?: AbortSignal, ) => { const normalizedType = normalizeDestinationType(type) - const response = await api.post>( + const response = await api.post( `${API_CONFIG.ENDPOINTS.DESTINATIONS(API_CONFIG.PROJECT_ID)}/spec`, { type: normalizedType, @@ -139,7 +140,7 @@ export const destinationService = { source_version: source_version, }, //timeout is 300000 as spec takes more time as it needs to fetch the spec from the destination - { timeout: 300000, signal }, + { timeout: 300000, signal, disableErrorNotification: true }, ) return response.data }, diff --git a/ui/src/api/services/jobService.ts b/ui/src/api/services/jobService.ts index 754077ff..40bfa184 100644 --- a/ui/src/api/services/jobService.ts +++ b/ui/src/api/services/jobService.ts @@ -1,15 +1,15 @@ import api from "../axios" import { API_CONFIG } from "../config" -import { APIResponse, Job, JobBase, JobTask, TaskLog } from "../../types" +import { Job, JobBase, JobTask, TaskLog } from "../../types" export const jobService = { getJobs: async (): Promise => { try { - const response = await api.get>( + const response = await api.get( API_CONFIG.ENDPOINTS.JOBS(API_CONFIG.PROJECT_ID), ) - return response.data.data + return response.data } catch (error) { console.error("Error fetching jobs from API:", error) throw error @@ -34,6 +34,7 @@ export const jobService = { const response = await api.put( `${API_CONFIG.ENDPOINTS.JOBS(API_CONFIG.PROJECT_ID)}/${id}`, job, + { showNotification: true }, ) return response.data } catch (error) { @@ -46,6 +47,7 @@ export const jobService = { try { await api.delete( `${API_CONFIG.ENDPOINTS.JOBS(API_CONFIG.PROJECT_ID)}/${id}`, + { showNotification: true }, ) } catch (error) { console.error("Error deleting job:", error) @@ -55,10 +57,11 @@ export const jobService = { cancelJob: async (id: string): Promise => { try { - const response = await api.get>( + const response = await api.get( `${API_CONFIG.ENDPOINTS.JOBS(API_CONFIG.PROJECT_ID)}/${id}/cancel`, + { showNotification: true }, ) - return response.data.data.message + return response.data.message } catch (error) { console.error("Error canceling job:", error) throw error @@ -67,10 +70,10 @@ export const jobService = { syncJob: async (id: string): Promise => { try { - const response = await api.post>( + const response = await api.post( `${API_CONFIG.ENDPOINTS.JOBS(API_CONFIG.PROJECT_ID)}/${id}/sync`, {}, - { timeout: 0 }, // Disable timeout for this request since it can take longer + { timeout: 0, showNotification: true }, // Disable timeout for this request since it can take longer ) return response.data } catch (error) { @@ -79,11 +82,11 @@ export const jobService = { } }, - getJobTasks: async (id: string): Promise> => { + getJobTasks: async (id: string): Promise => { try { - const response = await api.get>( + const response = await api.get( `${API_CONFIG.ENDPOINTS.JOBS(API_CONFIG.PROJECT_ID)}/${id}/tasks`, - { timeout: 0 }, // Disable timeout for this request + { timeout: 0, showNotification: true }, // Disable timeout for this request, no toast for fetching tasks ) return response.data } catch (error) { @@ -96,12 +99,12 @@ export const jobService = { jobId: string, taskId: string, filePath: string, - ): Promise> => { + ): Promise => { try { - const response = await api.post>( + const response = await api.post( `${API_CONFIG.ENDPOINTS.JOBS(API_CONFIG.PROJECT_ID)}/${jobId}/tasks/${taskId}/logs`, { file_path: filePath }, - { timeout: 0 }, // Disable timeout for this request since it can take longer + { timeout: 0, showNotification: true }, // Disable timeout for this request since it can take longer, no toast for logs ) return response.data } catch (error) { @@ -111,14 +114,12 @@ export const jobService = { }, //This either pauses or resumes the job - activateJob: async ( - jobId: string, - activate: boolean, - ): Promise> => { + activateJob: async (jobId: string, activate: boolean): Promise => { try { - const response = await api.post>( + const response = await api.post( `${API_CONFIG.ENDPOINTS.JOBS(API_CONFIG.PROJECT_ID)}/${jobId}/activate`, { activate }, + { showNotification: true }, ) return response.data } catch (error) { @@ -129,11 +130,11 @@ export const jobService = { checkJobNameUnique: async (jobName: string): Promise<{ unique: boolean }> => { try { - const response = await api.post>( + const response = await api.post<{ unique: boolean }>( `${API_CONFIG.ENDPOINTS.JOBS(API_CONFIG.PROJECT_ID)}/check-unique`, { job_name: jobName }, ) - return response.data.data + return response.data } catch (error) { console.error("Error checking job name uniqueness:", error) throw error diff --git a/ui/src/api/services/notificationService.ts b/ui/src/api/services/notificationService.ts new file mode 100644 index 00000000..a5f1b081 --- /dev/null +++ b/ui/src/api/services/notificationService.ts @@ -0,0 +1,19 @@ +import { message } from "antd" + +/** + * A decoupled service for showing UI notifications. + */ +export const notificationService = { + success: (msg: string) => { + if (msg) { + message.destroy() + message.success(msg) + } + }, + error: (msg: string) => { + if (msg) { + message.destroy() + message.error(msg) + } + }, +} diff --git a/ui/src/api/services/sourceService.ts b/ui/src/api/services/sourceService.ts index d36c9d64..a267cd4c 100644 --- a/ui/src/api/services/sourceService.ts +++ b/ui/src/api/services/sourceService.ts @@ -2,7 +2,6 @@ import api from "../axios" import { API_CONFIG } from "../config" import { Entity, - APIResponse, EntityBase, EntityTestRequest, EntityTestResponse, @@ -11,11 +10,11 @@ import { export const sourceService = { getSources: async (): Promise => { try { - const response = await api.get>( + const response = await api.get( API_CONFIG.ENDPOINTS.SOURCES(API_CONFIG.PROJECT_ID), ) - return response.data.data.map(item => ({ + return response.data.map(item => ({ ...item, config: JSON.parse(item.config), })) @@ -27,7 +26,7 @@ export const sourceService = { createSource: async (source: EntityBase) => { try { - const response = await api.post>( + const response = await api.post( API_CONFIG.ENDPOINTS.SOURCES(API_CONFIG.PROJECT_ID), source, ) @@ -40,7 +39,7 @@ export const sourceService = { updateSource: async (id: string, source: EntityBase) => { try { - const response = await api.put>( + const response = await api.put( `${API_CONFIG.ENDPOINTS.SOURCES(API_CONFIG.PROJECT_ID)}/${id}`, { name: source.name, @@ -51,6 +50,7 @@ export const sourceService = { ? source.config : JSON.stringify(source.config), }, + { showNotification: true }, ) return response.data } catch (error) { @@ -63,6 +63,7 @@ export const sourceService = { try { await api.delete( `${API_CONFIG.ENDPOINTS.SOURCES(API_CONFIG.PROJECT_ID)}/${id}`, + { showNotification: true }, ) } catch (error) { console.error("Error deleting source:", error) @@ -72,19 +73,19 @@ export const sourceService = { testSourceConnection: async (source: EntityTestRequest) => { try { - const response = await api.post>( + const response = await api.post( `${API_CONFIG.ENDPOINTS.SOURCES(API_CONFIG.PROJECT_ID)}/test`, { type: source.type.toLowerCase(), version: source.version, config: source.config, }, - { timeout: 0 }, // Disable timeout for this request since it can take longer + { timeout: 0, disableErrorNotification: true }, // Disable timeout for this request since it can take longer ) return { - success: response.data.success, - message: response.data.message, - data: response.data.data, + success: true, + message: "success", + data: response.data, } } catch (error) { console.error("Error testing source connection:", error) @@ -98,7 +99,7 @@ export const sourceService = { getSourceVersions: async (type: string) => { try { - const response = await api.get>( + const response = await api.get<{ version: string[] }>( `${API_CONFIG.ENDPOINTS.SOURCES(API_CONFIG.PROJECT_ID)}/versions/?type=${type}`, { timeout: 0, // Disable timeout for this request since it can take longer @@ -117,13 +118,13 @@ export const sourceService = { signal?: AbortSignal, ) => { try { - const response = await api.post>( + const response = await api.post( `${API_CONFIG.ENDPOINTS.SOURCES(API_CONFIG.PROJECT_ID)}/spec`, { type: type.toLowerCase(), version, }, - { timeout: 300000, signal }, //timeout is 300000 as spec takes more time as it needs to fetch the spec from olake + { timeout: 300000, signal, disableErrorNotification: true }, //timeout is 300000 as spec takes more time as it needs to fetch the spec from olake ) return response.data } catch (error) { @@ -142,7 +143,7 @@ export const sourceService = { job_id?: number, ) => { try { - const response = await api.post>>( + const response = await api.post>( `${API_CONFIG.ENDPOINTS.SOURCES(API_CONFIG.PROJECT_ID)}/streams`, { name, diff --git a/ui/src/modules/common/Modals/DeleteJobModal.tsx b/ui/src/modules/common/Modals/DeleteJobModal.tsx index b040c45d..93a60c62 100644 --- a/ui/src/modules/common/Modals/DeleteJobModal.tsx +++ b/ui/src/modules/common/Modals/DeleteJobModal.tsx @@ -1,5 +1,5 @@ import { useNavigate } from "react-router-dom" -import { Button, message, Modal } from "antd" +import { Button, Modal } from "antd" import { WarningIcon } from "@phosphor-icons/react" import { useAppStore } from "../../../store" @@ -41,10 +41,7 @@ const DeleteJobModal = ({ onClick={() => { setShowDeleteJobModal(false) if (selectedJobId) { - deleteJob(selectedJobId).catch(error => { - message.error("Failed to delete job") - console.error(error) - }) + deleteJob(selectedJobId) } if (fromJobSettings) { setTimeout(() => { diff --git a/ui/src/modules/common/Modals/DeleteModal.tsx b/ui/src/modules/common/Modals/DeleteModal.tsx index f2ed251c..7319b686 100644 --- a/ui/src/modules/common/Modals/DeleteModal.tsx +++ b/ui/src/modules/common/Modals/DeleteModal.tsx @@ -35,20 +35,12 @@ const DeleteModal = ({ fromSource }: DeleteModalProps) => { const handleDeleteSource = () => { message.info(`Deleting source ${selectedSource?.name}`) - deleteSource(selectedSource?.id as unknown as string).catch(error => { - message.error("Failed to delete source") - console.error(error) - }) + deleteSource(selectedSource?.id as unknown as string) setShowDeleteModal(false) } const handleDeleteDestination = () => { message.info(`Deleting destination ${selectedDestination?.name}`) - deleteDestination(selectedDestination?.id as unknown as string).catch( - error => { - message.error("Failed to delete destination") - console.error(error) - }, - ) + deleteDestination(selectedDestination?.id as unknown as string) setShowDeleteModal(false) } diff --git a/ui/src/modules/common/Modals/EntityEditModal.tsx b/ui/src/modules/common/Modals/EntityEditModal.tsx index a3712d4c..0713e85a 100644 --- a/ui/src/modules/common/Modals/EntityEditModal.tsx +++ b/ui/src/modules/common/Modals/EntityEditModal.tsx @@ -82,7 +82,6 @@ const EntityEditModal = ({ entityType }: EntityEditModalProps) => { setTimeout(async () => { setShowSuccessModal(false) await updateEntity(selectedEntity.id.toString(), selectedEntity) - message.success(`${entityType} updated successfully`) navigate(navigatePath) }, 2000) } else { @@ -95,7 +94,6 @@ const EntityEditModal = ({ entityType }: EntityEditModalProps) => { setShowFailureModal(true) } } catch (error) { - message.error(`Failed to update ${entityType}`) console.error(error) } } diff --git a/ui/src/modules/common/Modals/SpecFailedModal.tsx b/ui/src/modules/common/Modals/SpecFailedModal.tsx new file mode 100644 index 00000000..01961bb9 --- /dev/null +++ b/ui/src/modules/common/Modals/SpecFailedModal.tsx @@ -0,0 +1,95 @@ +import { message, Modal } from "antd" +import { CopySimpleIcon } from "@phosphor-icons/react" + +import { useAppStore } from "../../../store" +import ErrorIcon from "../../../assets/ErrorIcon.svg" + +const SpecFailedModal = ({ + fromSource, + error, + onTryAgain, +}: { + fromSource: boolean + error: string + onTryAgain: () => void +}) => { + const { showSpecFailedModal, setShowSpecFailedModal } = useAppStore() + + const handleTryAgain = () => { + setShowSpecFailedModal(false) + onTryAgain() + } + + const handleCopyLogs = async () => { + try { + await navigator.clipboard.writeText(error) + message.success("Logs copied to clipboard!") + } catch { + message.error("Failed to copy logs") + } + } + + const handleClose = () => { + setShowSpecFailedModal(false) + } + + return ( + +
+
+
+ Error +
+
+
+

Failed

+

+ {fromSource ? "Source" : "Destination"} Spec Load Failed +

+
+
+
Error
+ +
+
+
+ {error} +
+
+
+
+
+ + +
+
+
+ ) +} + +export default SpecFailedModal diff --git a/ui/src/modules/destinations/pages/CreateDestination.tsx b/ui/src/modules/destinations/pages/CreateDestination.tsx index b54e137d..ae8714e4 100644 --- a/ui/src/modules/destinations/pages/CreateDestination.tsx +++ b/ui/src/modules/destinations/pages/CreateDestination.tsx @@ -54,6 +54,8 @@ import CustomFieldTemplate from "../../common/components/Form/CustomFieldTemplat import validator from "@rjsf/validator-ajv8" import ArrayFieldTemplate from "../../common/components/Form/ArrayFieldTemplate" import { widgets } from "../../common/components/Form/widgets" +import { AxiosError } from "axios" +import SpecFailedModal from "../../common/Modals/SpecFailedModal" type ConnectorType = (typeof CONNECTOR_TYPES)[keyof typeof CONNECTOR_TYPES] @@ -82,6 +84,7 @@ const CreateDestination = forwardRef< onConnectorChange, onFormDataChange, onVersionChange, + onExistingDestinationIdChange, docsMinimized = false, onDocsMinimizedChange, sourceConnector, @@ -118,6 +121,7 @@ const CreateDestination = forwardRef< const [destinationNameError, setDestinationNameError] = useState< string | null >(null) + const [specError, setSpecError] = useState(null) const navigate = useNavigate() const resetVersionState = () => { @@ -139,6 +143,7 @@ const CreateDestination = forwardRef< setShowFailureModal, setShowSourceCancelModal, setDestinationTestConnectionError, + setShowSpecFailedModal, } = useAppStore() const parseDestinationConfig = ( @@ -226,10 +231,12 @@ const CreateDestination = forwardRef< setLoadingVersions(true) try { const response = await destinationService.getDestinationVersions( - connector.toLowerCase(), + connector === CONNECTOR_TYPES.APACHE_ICEBERG + ? DESTINATION_INTERNAL_TYPES.ICEBERG + : DESTINATION_INTERNAL_TYPES.S3, ) - if (response.data?.version) { - const receivedVersions = response.data.version + if (response?.version) { + const receivedVersions = response?.version setVersions(receivedVersions) if (receivedVersions.length > 0) { let defaultVersion = receivedVersions[0] @@ -258,7 +265,7 @@ const CreateDestination = forwardRef< fetchVersions() }, [connector, onVersionChange, setupType]) - useEffect(() => { + const handleFetchSpec = () => { if (!version) { setSchema(null) setUiSchema(null) @@ -286,9 +293,19 @@ const CreateDestination = forwardRef< setSchema({}) setUiSchema({}) console.error("Error fetching destination spec:", error) + if (error instanceof AxiosError) { + setSpecError(error.response?.data.message) + } else { + setSpecError("Failed to fetch spec, Please try again.") + } + setShowSpecFailedModal(true) }, () => setLoading(false), ) + } + + useEffect(() => { + return handleFetchSpec() }, [ connector, version, @@ -419,6 +436,7 @@ const CreateDestination = forwardRef< setConnector(value as ConnectorType) if (setupType === SETUP_TYPES.EXISTING) { setExistingDestination(null) + onExistingDestinationIdChange?.(null) setDestinationName("") onDestinationNameChange?.("") } @@ -435,6 +453,7 @@ const CreateDestination = forwardRef< const handleSetupTypeChange = (type: SetupType) => { setSetupType(type) setDestinationName("") + onExistingDestinationIdChange?.(null) onDestinationNameChange?.("") if (onDocsMinimizedChange) { @@ -450,6 +469,7 @@ const CreateDestination = forwardRef< setSchema(null) setConnector(CONNECTOR_TYPES.DESTINATION_DEFAULT_CONNECTOR) // Reset to default connector setExistingDestination(null) + onExistingDestinationIdChange?.(null) // Schema will be automatically fetched due to useEffect when connector changes if (onConnectorChange) onConnectorChange(CONNECTOR_TYPES.AMAZON_S3) if (onFormDataChange) onFormDataChange({}) @@ -478,6 +498,7 @@ const CreateDestination = forwardRef< setDestinationName(selectedDestination.name) setFormData(configObj) setExistingDestination(value) + onExistingDestinationIdChange?.(selectedDestination.id) } const handleVersionChange = (value: string) => { @@ -740,6 +761,13 @@ const CreateDestination = forwardRef< type="destination" navigateTo={fromJobFlow ? "jobs/new" : "destinations"} /> + {specError && ( + + )}
) }, diff --git a/ui/src/modules/destinations/pages/DestinationEdit.tsx b/ui/src/modules/destinations/pages/DestinationEdit.tsx index ffcd6e27..685dc532 100644 --- a/ui/src/modules/destinations/pages/DestinationEdit.tsx +++ b/ui/src/modules/destinations/pages/DestinationEdit.tsx @@ -1,16 +1,7 @@ import React, { useState, useEffect, useRef } from "react" import { useParams, Link, useNavigate } from "react-router-dom" import { formatDistanceToNow } from "date-fns" -import { - Input, - Button, - Select, - Switch, - message, - Spin, - Table, - Tooltip, -} from "antd" +import { Input, Button, Select, Switch, Spin, Table, Tooltip } from "antd" import type { ColumnsType } from "antd/es/table" import { ArrowLeftIcon, @@ -63,6 +54,8 @@ import CustomFieldTemplate from "../../common/components/Form/CustomFieldTemplat import ArrayFieldTemplate from "../../common/components/Form/ArrayFieldTemplate" import { widgets } from "../../common/components/Form/widgets" +import { AxiosError } from "axios" +import SpecFailedModal from "../../common/Modals/SpecFailedModal" const DestinationEdit: React.FC = ({ fromJobFlow = false, @@ -93,6 +86,7 @@ const DestinationEdit: React.FC = ({ const [formData, setFormData] = useState>({}) const [isLoading, setIsLoading] = useState(false) const [destination, setDestination] = useState(null) + const [specError, setSpecError] = useState(null) const { destinations, @@ -105,6 +99,7 @@ const DestinationEdit: React.FC = ({ setShowFailureModal, setDestinationTestConnectionError, updateDestination, + setShowSpecFailedModal, } = useAppStore() const navigate = useNavigate() @@ -208,14 +203,14 @@ const DestinationEdit: React.FC = ({ connectorType.toLowerCase(), ) - if (response.data?.version) { - setVersions(response.data.version) + if (response?.version) { + setVersions(response.version) // If no version is selected, set the first one as default - if (!selectedVersion && response.data.version.length > 0) { - setSelectedVersion(response.data.version[0]) + if (!selectedVersion && response.version.length > 0) { + setSelectedVersion(response.version[0]) if (onVersionChange) { - onVersionChange(response.data.version[0]) + onVersionChange(response.version[0]) } } } else { @@ -232,7 +227,7 @@ const DestinationEdit: React.FC = ({ fetchVersions() }, [connector]) - useEffect(() => { + const handleFetchSpec = () => { if (!selectedVersion || !connector) { setSchema(null) setUiSchema(null) @@ -257,9 +252,19 @@ const DestinationEdit: React.FC = ({ setSchema({}) setUiSchema({}) console.error("Error fetching destination spec:", error) + if (error instanceof AxiosError) { + setSpecError(error.response?.data.message) + } else { + setSpecError("Failed to fetch spec, Please try again.") + } + setShowSpecFailedModal(true) }, () => setIsLoading(false), ) + } + + useEffect(() => { + return handleFetchSpec() }, [connector, selectedVersion, fromJobFlow, sourceConnector, sourceVersion]) const handleVersionChange = (value: string) => { @@ -348,15 +353,9 @@ const DestinationEdit: React.FC = ({ const saveDestination = () => { if (destinationId) { - updateDestination(destinationId, getDestinationData()) - .then(() => { - message.success("Destination updated successfully") - navigate("/destinations") - }) - .catch(error => { - message.error("Failed to update source") - console.error(error) - }) + updateDestination(destinationId, getDestinationData()).then(() => { + navigate("/destinations") + }) } } @@ -374,16 +373,8 @@ const DestinationEdit: React.FC = ({ // } const handlePauseJob = async (jobId: string, checked: boolean) => { - try { - await jobService.activateJob(jobId, !checked) - message.success( - `Successfully ${checked ? "paused" : "resumed"} job ${jobId}`, - ) - await fetchDestinations() - } catch (error) { - console.error("Error toggling job status:", error) - message.error(`Failed to ${checked ? "pause" : "resume"} job ${jobId}`) - } + await jobService.activateJob(jobId, !checked) + await fetchDestinations() } const toggleDocsPanel = () => { @@ -764,6 +755,13 @@ const DestinationEdit: React.FC = ({ + {specError && ( + + )}
) } diff --git a/ui/src/modules/destinations/pages/Destinations.tsx b/ui/src/modules/destinations/pages/Destinations.tsx index ec62333c..eb68abce 100644 --- a/ui/src/modules/destinations/pages/Destinations.tsx +++ b/ui/src/modules/destinations/pages/Destinations.tsx @@ -24,10 +24,7 @@ const Destinations: React.FC = () => { } = useAppStore() useEffect(() => { - fetchDestinations().catch(error => { - message.error("Failed to fetch destinations") - console.error(error) - }) + fetchDestinations() }, [fetchDestinations]) const handleCreateDestination = () => { diff --git a/ui/src/modules/jobs/pages/JobCreation.tsx b/ui/src/modules/jobs/pages/JobCreation.tsx index d08d974e..e9c6206d 100644 --- a/ui/src/modules/jobs/pages/JobCreation.tsx +++ b/ui/src/modules/jobs/pages/JobCreation.tsx @@ -62,6 +62,7 @@ const JobCreation: React.FC = () => { const [destinationName, setDestinationName] = useState( initialData.destinationName || "", ) + const [existingSourceId, setExistingSourceId] = useState(null) //state to hold catalog value to open documentation panel const [destinationCatalogType, setDestinationCatalogType] = useState< @@ -77,6 +78,10 @@ const JobCreation: React.FC = () => { const [destinationVersion, setDestinationVersion] = useState( initialData.destinationVersion || "", ) + const [existingDestinationId, setExistingDestinationId] = useState< + number | null + >(null) + const [selectedStreams, setSelectedStreams] = useState( initialData.selectedStreams || [], ) @@ -150,7 +155,6 @@ const JobCreation: React.FC = () => { const response = await jobService.checkJobNameUnique(jobName) return response.unique } catch { - message.error("Failed to check job name uniqueness. Please try again.") return null } } @@ -211,12 +215,14 @@ const JobCreation: React.FC = () => { const newJobData: JobBase = { name: jobName, source: { + ...(existingSourceId && { id: existingSourceId }), name: sourceName, type: getConnectorInLowerCase(sourceConnector), version: sourceVersion, config: JSON.stringify(sourceFormData), }, destination: { + ...(existingDestinationId && { id: existingDestinationId }), name: destinationName, type: getConnectorInLowerCase(destinationConnector), version: destinationVersion, @@ -241,7 +247,6 @@ const JobCreation: React.FC = () => { setShowEntitySavedModal(true) } catch (error) { console.error("Error adding job:", error) - message.error("Failed to create job") } } @@ -380,12 +385,10 @@ const JobCreation: React.FC = () => { job.id === savedJobId ? jobData : job, ) localStorage.setItem("savedJobs", JSON.stringify(updatedSavedJobs)) - message.success("Job saved successfully!") } else { // Create new saved job savedJobs.push(jobData) localStorage.setItem("savedJobs", JSON.stringify(savedJobs)) - message.success("Job saved successfully!") } navigate("/jobs") @@ -423,6 +426,7 @@ const JobCreation: React.FC = () => { stepTitle="Set up your source" onSourceNameChange={setSourceName} onConnectorChange={setSourceConnector} + onExistingSourceIdChange={setExistingSourceId} initialConnector={sourceConnector} onFormDataChange={data => { setSourceFormData(data) @@ -447,6 +451,7 @@ const JobCreation: React.FC = () => { fromJobFlow={true} stepNumber={JOB_STEP_NUMBERS.DESTINATION} stepTitle="Set up your destination" + onExistingDestinationIdChange={setExistingDestinationId} onDestinationNameChange={setDestinationName} onConnectorChange={setDestinationConnector} initialConnector={getConnectorInLowerCase(destinationConnector)} diff --git a/ui/src/modules/jobs/pages/JobEdit.tsx b/ui/src/modules/jobs/pages/JobEdit.tsx index 25eaec1d..fae4efa3 100644 --- a/ui/src/modules/jobs/pages/JobEdit.tsx +++ b/ui/src/modules/jobs/pages/JobEdit.tsx @@ -161,15 +161,9 @@ const JobEdit: React.FC = () => { // Load job data on component mount useEffect(() => { - const loadData = async () => { - try { - await Promise.all([fetchJobs(), fetchSources(), fetchDestinations()]) - } catch (error) { - console.error("Error loading data:", error) - message.error("Failed to load job data. Please try again.") - } - } - loadData() + fetchJobs() + fetchSources() + fetchDestinations() }, []) const initializeFromExistingJob = (job: Job) => { @@ -179,6 +173,7 @@ const JobEdit: React.FC = () => { // Set source data from job setSourceData({ + id: job.source.id, name: job.source.name, type: job.source.type, config: sourceConfig, @@ -190,6 +185,7 @@ const JobEdit: React.FC = () => { // Set destination data from job setDestinationData({ + id: job.destination.id, name: job.destination.name, type: job.destination.type, config: destConfig, @@ -311,6 +307,7 @@ const JobEdit: React.FC = () => { const jobUpdateRequestPayload: JobBase = { name: jobName, source: { + ...(sourceData?.id && { id: sourceData.id }), name: sourceData?.name || "", type: getConnectorInLowerCase(sourceData?.type || ""), config: @@ -320,6 +317,7 @@ const JobEdit: React.FC = () => { version: sourceData?.version || "", }, destination: { + ...(destinationData?.id && { id: destinationData.id }), name: destinationData?.name || "", type: getConnectorInLowerCase(destinationData?.type || ""), config: @@ -364,14 +362,12 @@ const JobEdit: React.FC = () => { const jobUpdatePayload = getjobUpdatePayLoad() await jobService.updateJob(jobId, jobUpdatePayload) - message.success("Job updated successfully!") // Refresh jobs and navigate back to jobs list fetchJobs() navigate("/jobs") } catch (error) { console.error("Error saving job:", error) - message.error("Failed to save job. Please try again.") } finally { setIsSubmitting(false) } diff --git a/ui/src/modules/jobs/pages/JobHistory.tsx b/ui/src/modules/jobs/pages/JobHistory.tsx index ea061c62..0ed14753 100644 --- a/ui/src/modules/jobs/pages/JobHistory.tsx +++ b/ui/src/modules/jobs/pages/JobHistory.tsx @@ -1,7 +1,7 @@ import { useEffect, useState, useRef } from "react" import clsx from "clsx" import { useParams, useNavigate, Link } from "react-router-dom" -import { Table, Button, Input, Spin, message, Pagination, Tooltip } from "antd" +import { Table, Button, Input, Spin, Pagination, Tooltip } from "antd" import { ArrowLeftIcon, ArrowRightIcon, @@ -25,7 +25,8 @@ const JobHistory: React.FC = () => { const pageSize = 8 const [isDelayingCall, setIsDelayingCall] = useState(false) const retryCountRef = useRef(0) - const THROTTLE_DELAY = 1000 + const THROTTLE_DELAY = 500 + const MAX_RETRIES = 2 const { jobs, @@ -41,43 +42,43 @@ const JobHistory: React.FC = () => { fetchJobs() } - if (jobId) { - const fetchWithRetry = async () => { + if (!jobId) { + return + } + + let timeoutId: NodeJS.Timeout + + const fetchWithRetry = async () => { + try { setIsDelayingCall(true) - try { - await fetchJobTasks(jobId) - await new Promise(resolve => setTimeout(resolve, 1000)) - if (jobTasks && jobTasks.length > 0) { - retryCountRef.current = 0 - setIsDelayingCall(false) - return - } + await new Promise(resolve => setTimeout(resolve, THROTTLE_DELAY)) + await fetchJobTasks(jobId) - // try fetching tasks 4 times with a delay of 1 second - if (retryCountRef.current < 4) { - retryCountRef.current++ - setTimeout(fetchWithRetry, THROTTLE_DELAY) - } else { - setIsDelayingCall(false) - } - } catch (error) { - console.error("Error fetching job tasks:", error) - if (retryCountRef.current < 4) { - retryCountRef.current++ - setTimeout(fetchWithRetry, THROTTLE_DELAY) - } else { - setIsDelayingCall(false) - } + // retry MAX_RETRIES times with a delay of THROTTLE_DELAY + if (retryCountRef.current < MAX_RETRIES) { + retryCountRef.current++ + timeoutId = setTimeout(fetchWithRetry, THROTTLE_DELAY) + } else { + setIsDelayingCall(false) + } + } catch (err) { + console.error(err) + if (retryCountRef.current < MAX_RETRIES) { + retryCountRef.current++ + timeoutId = setTimeout(fetchWithRetry, THROTTLE_DELAY) + } else { + setIsDelayingCall(false) } } + } - fetchWithRetry() + fetchWithRetry() - return () => { - retryCountRef.current = 0 - } + return () => { + clearTimeout(timeoutId) + retryCountRef.current = 0 } - }, [jobId, fetchJobTasks, jobs.length, fetchJobs]) + }, [jobId]) const job = jobs.find(j => j.id === Number(jobId)) const handleViewLogs = (filePath: string) => { @@ -211,15 +212,6 @@ const JobHistory: React.FC = () => { onClick={() => { if (jobId) { fetchJobTasks(jobId) - .then(() => { - message.destroy() - message.success("Job history refetched successfully") - }) - .catch(error => { - message.destroy() - message.error("Failed to fetch job history") - console.error("Error fetching job history:", error) - }) } }} className="flex items-center" diff --git a/ui/src/modules/jobs/pages/JobLogs.tsx b/ui/src/modules/jobs/pages/JobLogs.tsx index ed839051..9bf22969 100644 --- a/ui/src/modules/jobs/pages/JobLogs.tsx +++ b/ui/src/modules/jobs/pages/JobLogs.tsx @@ -1,7 +1,7 @@ import { useEffect, useState } from "react" import clsx from "clsx" import { useParams, useNavigate, Link, useSearchParams } from "react-router-dom" -import { Input, Spin, message, Button, Tooltip } from "antd" +import { Input, Spin, Button, Tooltip } from "antd" import { ArrowLeftIcon, ArrowRightIcon, @@ -47,10 +47,7 @@ const JobLogs: React.FC = () => { if (jobId) { if (isTaskLog && filePath) { - fetchTaskLogs(jobId, historyId || "1", filePath).catch(error => { - message.error("Failed to fetch task logs") - console.error(error) - }) + fetchTaskLogs(jobId, historyId || "1", filePath) } } }, [ @@ -174,15 +171,6 @@ const JobLogs: React.FC = () => { onClick={() => { if (isTaskLog && filePath) { fetchTaskLogs(jobId!, historyId || "1", filePath) - .then(() => { - message.destroy() - message.success("Logs refetched successfully") - }) - .catch(error => { - message.destroy() - message.error("Failed to refetch task logs") - console.error(error) - }) } }} className="flex items-center" diff --git a/ui/src/modules/jobs/pages/JobSettings.tsx b/ui/src/modules/jobs/pages/JobSettings.tsx index f3990ac1..fdc15526 100644 --- a/ui/src/modules/jobs/pages/JobSettings.tsx +++ b/ui/src/modules/jobs/pages/JobSettings.tsx @@ -124,13 +124,9 @@ const JobSettings: React.FC = () => { try { await jobService.activateJob(jobId, !checked) - message.success( - `Successfully ${checked ? "paused" : "resumed"} job ${jobId}`, - ) await fetchJobs() } catch (error) { console.error("Error toggling job status:", error) - message.error(`Failed to ${checked ? "pause" : "resume"} job ${jobId}`) // Revert optimistic update on error setPauseJob(!checked) } finally { @@ -252,12 +248,10 @@ const JobSettings: React.FC = () => { } await jobService.updateJob(jobId, jobUpdatePayload) - message.success("Job settings saved successfully") await fetchJobs() navigate("/jobs") } catch (error) { console.error("Error saving job settings:", error) - message.error("Failed to save job settings") } } diff --git a/ui/src/modules/jobs/pages/Jobs.tsx b/ui/src/modules/jobs/pages/Jobs.tsx index 6811b970..91fd82dc 100644 --- a/ui/src/modules/jobs/pages/Jobs.tsx +++ b/ui/src/modules/jobs/pages/Jobs.tsx @@ -27,10 +27,7 @@ const Jobs: React.FC = () => { } = useAppStore() useEffect(() => { - fetchJobs().catch(error => { - message.error("Failed to fetch jobs") - console.error(error) - }) + fetchJobs() }, [fetchJobs]) const handleCreateJob = () => { @@ -42,7 +39,6 @@ const Jobs: React.FC = () => { try { navigate(`/jobs/${id}/history`) // navigate to job history so that user can see the tasks running await jobService.syncJob(id) - message.success("Job sync started successfully") await fetchJobs() } catch (error) { message.error("Failed to sync job") @@ -81,22 +77,16 @@ const Jobs: React.FC = () => { } const handlePauseJob = async (id: string, checked: boolean) => { - const job = jobs.find(j => j.id.toString() === id) await jobService.activateJob(id, !checked) - message.success( - `Successfully ${checked ? "paused" : "resumed"} ${job?.name || id}`, - ) await fetchJobs() } // cancels the running job const handleCancelJob = async (id: string) => { try { - const response = await jobService.cancelJob(id) - message.success(response) + await jobService.cancelJob(id) } catch (error) { console.error("Error canceling job:", error) - message.error("Failed to cancel run") } } @@ -133,17 +123,19 @@ const Jobs: React.FC = () => { const updateJobsList = () => { switch (activeTab) { case JOB_TYPES.ACTIVE: - setFilteredJobs(jobs.filter(job => job.activate === true)) + setFilteredJobs(jobs.filter(job => job?.activate === true)) break case JOB_TYPES.INACTIVE: - setFilteredJobs(jobs.filter(job => job.activate === false)) + setFilteredJobs(jobs.filter(job => job?.activate === false)) break case JOB_TYPES.SAVED: setFilteredJobs(savedJobs) break case JOB_TYPES.FAILED: setFilteredJobs( - jobs.filter(job => job.last_run_state?.toLowerCase() === "failed"), + jobs.filter( + job => (job?.last_run_state ?? "").toLowerCase() === "failed", + ), ) break default: diff --git a/ui/src/modules/jobs/pages/SchemaConfiguration.tsx b/ui/src/modules/jobs/pages/SchemaConfiguration.tsx index 88ed096b..f7cbaf8a 100644 --- a/ui/src/modules/jobs/pages/SchemaConfiguration.tsx +++ b/ui/src/modules/jobs/pages/SchemaConfiguration.tsx @@ -151,7 +151,7 @@ const SchemaConfiguration: React.FC = ({ fromJobEditFlow ? jobId : -1, ) - const rawApiResponse = response.data as any + const rawApiResponse = response as any const processedResponseData: CombinedStreamsData = { streams: [], selected_streams: {}, diff --git a/ui/src/modules/sources/pages/CreateSource.tsx b/ui/src/modules/sources/pages/CreateSource.tsx index 12e068a6..8f7e9aeb 100644 --- a/ui/src/modules/sources/pages/CreateSource.tsx +++ b/ui/src/modules/sources/pages/CreateSource.tsx @@ -47,6 +47,8 @@ import ObjectFieldTemplate from "../../common/components/Form/ObjectFieldTemplat import CustomFieldTemplate from "../../common/components/Form/CustomFieldTemplate" import ArrayFieldTemplate from "../../common/components/Form/ArrayFieldTemplate" import { widgets } from "../../common/components/Form/widgets" +import SpecFailedModal from "../../common/Modals/SpecFailedModal" +import { AxiosError } from "axios" // Create ref handle interface export interface CreateSourceHandle { @@ -70,6 +72,7 @@ const CreateSource = forwardRef( onVersionChange, docsMinimized = false, onDocsMinimizedChange, + onExistingSourceIdChange, }, ref, ) => { @@ -87,6 +90,7 @@ const CreateSource = forwardRef( const [filteredSources, setFilteredSources] = useState([]) const [sourceNameError, setSourceNameError] = useState(null) const [existingSource, setExistingSource] = useState(null) + const [specError, setSpecError] = useState(null) const navigate = useNavigate() @@ -100,6 +104,7 @@ const CreateSource = forwardRef( addSource, setShowFailureModal, setSourceTestConnectionError, + setShowSpecFailedModal, } = useAppStore() useEffect(() => { @@ -155,15 +160,15 @@ const CreateSource = forwardRef( const response = await sourceService.getSourceVersions( connector.toLowerCase(), ) - if (response.data?.version) { - setVersions(response.data.version) + if (response?.version) { + setVersions(response.version) if ( - response.data.version.length > 0 && + response.version.length > 0 && (!initialVersion || connector !== initialConnector || initialVersion === "") ) { - let defaultVersion = response.data.version[0] + let defaultVersion = response.version[0] if ( connector.toLowerCase() === initialConnector && initialVersion @@ -189,7 +194,7 @@ const CreateSource = forwardRef( fetchVersions() }, [connector, initialConnector]) - useEffect(() => { + const handleFetchSpec = () => { if (!selectedVersion) { setSchema(null) return @@ -201,15 +206,26 @@ const CreateSource = forwardRef( return withAbortController( signal => sourceService.getSourceSpec(connector, selectedVersion, signal), - response => - handleSpecResponse(response, setSchema, setUiSchema, "source"), + response => { + handleSpecResponse(response, setSchema, setUiSchema, "source") + }, error => { setSchema({}) setUiSchema({}) console.error("Error fetching source spec:", error) + if (error instanceof AxiosError) { + setSpecError(error.response?.data.message) + } else { + setSpecError("Failed to fetch spec, Please try again.") + } + setShowSpecFailedModal(true) }, () => setLoading(false), ) + } + + useEffect(() => { + return handleFetchSpec() }, [connector, selectedVersion, setupType]) useEffect(() => { @@ -330,6 +346,7 @@ const CreateSource = forwardRef( setConnector(value) if (setupType === SETUP_TYPES.EXISTING) { setExistingSource(null) + onExistingSourceIdChange?.(null) setSourceName("") onSourceNameChange?.("") } @@ -361,6 +378,7 @@ const CreateSource = forwardRef( setSchema(null) setConnector(CONNECTOR_TYPES.SOURCE_DEFAULT_CONNECTOR) // Reset to default connector setExistingSource(null) + onExistingSourceIdChange?.(null) // Schema will be automatically fetched due to useEffect when connector changes if (onConnectorChange) onConnectorChange(CONNECTOR_TYPES.MONGODB) if (onFormDataChange) onFormDataChange({}) @@ -390,6 +408,7 @@ const CreateSource = forwardRef( setSourceName(selectedSource.name) setConnector(getConnectorLabel(selectedSource.type)) setSelectedVersion(selectedSource.version) + onExistingSourceIdChange?.(selectedSource.id) } } @@ -657,6 +676,13 @@ const CreateSource = forwardRef( type="source" navigateTo={fromJobFlow ? "jobs/new" : "sources"} /> + {specError && ( + + )}
) }, diff --git a/ui/src/modules/sources/pages/SourceEdit.tsx b/ui/src/modules/sources/pages/SourceEdit.tsx index 66a13264..829c853f 100644 --- a/ui/src/modules/sources/pages/SourceEdit.tsx +++ b/ui/src/modules/sources/pages/SourceEdit.tsx @@ -1,16 +1,7 @@ import { useState, useEffect, useRef } from "react" import { useParams, useNavigate, Link } from "react-router-dom" import { formatDistanceToNow } from "date-fns" -import { - Input, - Button, - Select, - Switch, - message, - Table, - Spin, - Tooltip, -} from "antd" +import { Input, Button, Select, Switch, Table, Spin, Tooltip } from "antd" import type { ColumnsType } from "antd/es/table" import { GenderNeuterIcon, @@ -54,6 +45,8 @@ import ObjectFieldTemplate from "../../common/components/Form/ObjectFieldTemplat import CustomFieldTemplate from "../../common/components/Form/CustomFieldTemplate" import ArrayFieldTemplate from "../../common/components/Form/ArrayFieldTemplate" import { widgets } from "../../common/components/Form/widgets" +import { AxiosError } from "axios" +import SpecFailedModal from "../../common/Modals/SpecFailedModal" const SourceEdit: React.FC = ({ fromJobFlow = false, @@ -84,6 +77,7 @@ const SourceEdit: React.FC = ({ const [loadingVersions, setLoadingVersions] = useState(false) const [schema, setSchema] = useState(null) const [uiSchema, setUiSchema] = useState(null) + const [specError, setSpecError] = useState(null) const { sources, @@ -94,6 +88,7 @@ const SourceEdit: React.FC = ({ setShowSuccessModal, setShowFailureModal, setSourceTestConnectionError, + setShowSpecFailedModal, } = useAppStore() useEffect(() => { @@ -156,7 +151,7 @@ const SourceEdit: React.FC = ({ } }, [initialData]) - useEffect(() => { + const handleFetchSpec = () => { if (!selectedVersion || !connector) { setSchema(null) return @@ -176,9 +171,19 @@ const SourceEdit: React.FC = ({ setSchema({}) setUiSchema({}) console.error("Error fetching source spec:", error) + if (error instanceof AxiosError) { + setSpecError(error.response?.data.message) + } else { + setSpecError("Failed to fetch spec, Please try again.") + } + setShowSpecFailedModal(true) }, () => setLoading(false), ) + } + + useEffect(() => { + return handleFetchSpec() }, [connector, selectedVersion]) const resetVersionState = () => { @@ -198,8 +203,8 @@ const SourceEdit: React.FC = ({ const response = await sourceService.getSourceVersions( getConnectorInLowerCase(connector), ) - if (response.success && response.data?.version) { - const versions = response.data.version.map((version: string) => ({ + if (response?.version) { + const versions = response.version.map((version: string) => ({ label: version, value: version, })) @@ -316,11 +321,9 @@ const SourceEdit: React.FC = ({ if (sourceId) { updateSource(sourceId, getSourceData()) .then(() => { - message.success("Source updated successfully") navigate("/sources") }) .catch(error => { - message.error("Failed to update source") console.error(error) }) } @@ -346,14 +349,10 @@ const SourceEdit: React.FC = ({ const handlePauseJob = async (jobId: string, checked: boolean) => { try { await jobService.activateJob(jobId, !checked) - message.success( - `Successfully ${checked ? "paused" : "resumed"} job ${jobId}`, - ) // Refetch sources to update the UI with the latest source details await fetchSources() } catch (error) { console.error("Error toggling job status:", error) - message.error(`Failed to ${checked ? "pause" : "resume"} job ${jobId}`) } } @@ -726,6 +725,13 @@ const SourceEdit: React.FC = ({ + {specError && ( + + )}
) } diff --git a/ui/src/modules/sources/pages/Sources.tsx b/ui/src/modules/sources/pages/Sources.tsx index 21f090b7..8394e0ab 100644 --- a/ui/src/modules/sources/pages/Sources.tsx +++ b/ui/src/modules/sources/pages/Sources.tsx @@ -24,10 +24,7 @@ const Sources: React.FC = () => { } = useAppStore() useEffect(() => { - fetchSources().catch(error => { - message.error("Failed to fetch sources") - console.error(error) - }) + fetchSources() }, [fetchSources]) const handleCreateSource = () => { @@ -45,10 +42,7 @@ const Sources: React.FC = () => { // For inactive sources, delete directly without showing modal if (!source?.jobs || source.jobs.length === 0) { message.info(`Deleting source ${source?.name}`) - deleteSource(String(source.id)).catch(error => { - message.error("Failed to delete source") - console.error(error) - }) + deleteSource(String(source.id)) return } diff --git a/ui/src/store/destinationStore.ts b/ui/src/store/destinationStore.ts index 843283ac..ccb04091 100644 --- a/ui/src/store/destinationStore.ts +++ b/ui/src/store/destinationStore.ts @@ -1,7 +1,5 @@ import { StateCreator } from "zustand" -import type { APIResponse, TestConnectionError } from "../types" -import type { EntityBase } from "../types" -import type { Entity } from "../types" +import type { TestConnectionError, EntityBase, Entity } from "../types" import { destinationService } from "../api" export interface DestinationSlice { @@ -16,7 +14,7 @@ export interface DestinationSlice { updateDestination: ( id: string, destination: Partial, - ) => Promise> + ) => Promise deleteDestination: (id: string) => Promise } @@ -54,7 +52,7 @@ export const createDestinationSlice: StateCreator = set => ({ newDestination as unknown as Entity, ], })) - return newDestination + return newDestination as EntityBase } catch (error) { set({ destinationsError: @@ -70,11 +68,12 @@ export const createDestinationSlice: StateCreator = set => ({ id, destinationData as EntityBase, ) - const updatedDestData = updatedDestination.data as Entity set(state => ({ destinations: state.destinations.map(destination => - destination.id.toString() === id ? updatedDestData : destination, + destination.id.toString() === id + ? (updatedDestination as unknown as Entity) + : destination, ), })) return updatedDestination diff --git a/ui/src/store/modalStore.ts b/ui/src/store/modalStore.ts index 46e1e151..527f8e4a 100644 --- a/ui/src/store/modalStore.ts +++ b/ui/src/store/modalStore.ts @@ -17,6 +17,7 @@ export interface ModalSlice { showResetStreamsModal: boolean showIngestionModeChangeModal: boolean ingestionMode: IngestionMode + showSpecFailedModal: boolean setShowTestingModal: (show: boolean) => void setShowSuccessModal: (show: boolean) => void setShowFailureModal: (show: boolean) => void @@ -32,6 +33,7 @@ export interface ModalSlice { setShowResetStreamsModal: (show: boolean) => void setShowIngestionModeChangeModal: (show: boolean) => void setIngestionMode: (mode: IngestionMode) => void + setShowSpecFailedModal: (show: boolean) => void } export const createModalSlice: StateCreator = set => ({ @@ -50,6 +52,7 @@ export const createModalSlice: StateCreator = set => ({ showResetStreamsModal: false, showIngestionModeChangeModal: false, ingestionMode: IngestionMode.UPSERT, + showSpecFailedModal: false, setShowTestingModal: show => set({ showTestingModal: show }), setShowSuccessModal: show => set({ showSuccessModal: show }), setShowFailureModal: show => set({ showFailureModal: show }), @@ -68,4 +71,5 @@ export const createModalSlice: StateCreator = set => ({ setShowIngestionModeChangeModal: show => set({ showIngestionModeChangeModal: show }), setIngestionMode: mode => set({ ingestionMode: mode }), + setShowSpecFailedModal: show => set({ showSpecFailedModal: show }), }) diff --git a/ui/src/store/sourceStore.ts b/ui/src/store/sourceStore.ts index 0b17a323..7ac0f75d 100644 --- a/ui/src/store/sourceStore.ts +++ b/ui/src/store/sourceStore.ts @@ -1,10 +1,5 @@ import { StateCreator } from "zustand" -import type { - APIResponse, - Entity, - EntityBase, - TestConnectionError, -} from "../types" +import type { Entity, EntityBase, TestConnectionError } from "../types" import { sourceService } from "../api" export interface SourceSlice { sources: Entity[] @@ -13,8 +8,8 @@ export interface SourceSlice { sourceTestConnectionError: TestConnectionError | null setSourceTestConnectionError: (error: TestConnectionError | null) => void fetchSources: () => Promise - addSource: (source: EntityBase) => Promise> - updateSource: (id: string, source: EntityBase) => Promise> + addSource: (source: EntityBase) => Promise + updateSource: (id: string, source: EntityBase) => Promise deleteSource: (id: string) => Promise } @@ -46,7 +41,7 @@ export const createSourceSlice: StateCreator = set => ({ addSource: async sourceData => { try { const newSource = await sourceService.createSource(sourceData) - set(state => ({ sources: [...state.sources, newSource.data as Entity] })) + set(state => ({ sources: [...state.sources, newSource as Entity] })) return newSource } catch (error) { set({ @@ -60,7 +55,7 @@ export const createSourceSlice: StateCreator = set => ({ updateSource: async (id, sourceData) => { try { const updatedSource = await sourceService.updateSource(id, sourceData) - const updatedSourceData = updatedSource.data as Entity + const updatedSourceData = updatedSource as Entity set(state => ({ sources: state.sources.map(source => diff --git a/ui/src/store/taskStore.ts b/ui/src/store/taskStore.ts index e49a68b7..cc207781 100644 --- a/ui/src/store/taskStore.ts +++ b/ui/src/store/taskStore.ts @@ -30,7 +30,7 @@ export const createTaskSlice: StateCreator = set => ({ try { const response = await jobService.getJobTasks(jobId) set({ - jobTasks: response.data, + jobTasks: response, isLoadingJobTasks: false, }) } catch (error) { @@ -49,7 +49,7 @@ export const createTaskSlice: StateCreator = set => ({ try { const response = await jobService.getTaskLogs(jobId, taskId, filePath) set({ - taskLogs: response.data, + taskLogs: response, isLoadingTaskLogs: false, }) } catch (error) { diff --git a/ui/src/types/destinationTypes.ts b/ui/src/types/destinationTypes.ts index caec6bfd..0d9a142d 100644 --- a/ui/src/types/destinationTypes.ts +++ b/ui/src/types/destinationTypes.ts @@ -42,6 +42,7 @@ export interface CreateDestinationProps { onFormDataChange?: (formData: DestinationConfig) => void onVersionChange?: (version: string) => void onCatalogTypeChange?: (catalog: string | null) => void + onExistingDestinationIdChange?: (id: number | null) => void docsMinimized?: boolean onDocsMinimizedChange?: React.Dispatch> sourceConnector?: string @@ -65,7 +66,7 @@ export interface DestinationJob extends Omit { } export interface DestinationData { - id?: string + id?: number name: string type: string config: Record diff --git a/ui/src/types/jobTypes.ts b/ui/src/types/jobTypes.ts index 55743fb5..aac3be81 100644 --- a/ui/src/types/jobTypes.ts +++ b/ui/src/types/jobTypes.ts @@ -4,12 +4,14 @@ export interface Job { id: number name: string source: { + id?: number name: string type: string version: string config: string } destination: { + id?: number name: string type: string version: string @@ -28,12 +30,14 @@ export interface Job { export interface JobBase { name: string source: { + id?: number name: string type: string version: string config: string } destination: { + id?: number name: string type: string version: string diff --git a/ui/src/types/sourceTypes.ts b/ui/src/types/sourceTypes.ts index dc849821..4c4ac312 100644 --- a/ui/src/types/sourceTypes.ts +++ b/ui/src/types/sourceTypes.ts @@ -29,6 +29,7 @@ export interface CreateSourceProps { onConnectorChange?: (connector: string) => void onFormDataChange?: (formData: any) => void onVersionChange?: (version: string) => void + onExistingSourceIdChange?: (id: number | null) => void docsMinimized?: boolean onDocsMinimizedChange?: React.Dispatch> } @@ -44,7 +45,7 @@ export interface SourceJob { } export interface SourceData { - id?: string + id?: number name: string type: string config: Record diff --git a/ui/src/utils/constants.ts b/ui/src/utils/constants.ts index 391f36a6..10db2d3e 100644 --- a/ui/src/utils/constants.ts +++ b/ui/src/utils/constants.ts @@ -83,7 +83,7 @@ export const ENTITY_TYPES = { export const DESTINATION_INTERNAL_TYPES = { ICEBERG: "iceberg", - S3: "s3", + S3: "parquet", } export const DESTINATION_LABELS = { diff --git a/ui/src/utils/utils.ts b/ui/src/utils/utils.ts index 769dec86..a59860ab 100644 --- a/ui/src/utils/utils.ts +++ b/ui/src/utils/utils.ts @@ -504,9 +504,9 @@ export const handleSpecResponse = ( errorType: "source" | "destination" = "source", ) => { try { - if (response.success && response.data?.spec?.jsonschema) { - setSchema(response.data.spec.jsonschema) - setUiSchema(JSON.parse(response.data.spec.uischema)) + if (response?.spec?.jsonschema) { + setSchema(response.spec.jsonschema) + setUiSchema(JSON.parse(response.spec.uischema)) } else { console.error(`Failed to get ${errorType} spec:`, response.message) } diff --git a/worker.Dockerfile b/worker.Dockerfile deleted file mode 100644 index 06247864..00000000 --- a/worker.Dockerfile +++ /dev/null @@ -1,26 +0,0 @@ -# Build stage -FROM golang:1.24.2-alpine AS builder -WORKDIR /app/worker - -# Copy go.mod and go.sum first to leverage Docker caching -COPY server/go.mod server/go.sum ./ - -RUN go mod download - -# Copy the entire server directory (since the worker might depend on shared code) -COPY server/ ./ - -# Build the worker binary -RUN go build -o temporal-worker ./cmd/temporal-worker - -# Runtime stage -FROM alpine:3.18 -WORKDIR /app -COPY --from=builder /app/worker/temporal-worker . -RUN mkdir -p ./conf -COPY server/conf/app.conf ./conf/app.conf -RUN apk update && apk add --no-cache docker-cli -RUN mkdir -p ./logger/logs -RUN mkdir -p /mnt/config && chmod -R 777 /mnt/config -ENV TEMPORAL_ADDRESS="temporal:7233" -CMD ["./temporal-worker"]