diff --git a/scripts/validate_bicep_params.py b/scripts/validate_bicep_params.py
index 9320633a..b8d27b8c 100644
--- a/scripts/validate_bicep_params.py
+++ b/scripts/validate_bicep_params.py
@@ -110,7 +110,7 @@ def parse_parameters_env_vars(json_path: Path) -> dict[str, list[str]]:
data = json.loads(sanitized)
params = data.get("parameters", {})
except json.JSONDecodeError:
- pass
+ pass # File is not valid JSON after azd variable substitution; fall through to regex scan
# Walk each top-level parameter and scan its entire serialized value
# for ${VAR} references from the original text.
diff --git a/src/backend-api/src/app/application.py b/src/backend-api/src/app/application.py
index 4100decf..80094ffe 100644
--- a/src/backend-api/src/app/application.py
+++ b/src/backend-api/src/app/application.py
@@ -133,7 +133,7 @@ def _register_dependencies(self):
)
.add_singleton(ILoggerService, ConsoleLoggerService)
.add_transient(IHttpService, HttpClientService)
- .add_singleton(IDataService, lambda: InMemoryDataService())
+ .add_singleton(IDataService, InMemoryDataService)
)
def run(self, host: str = "0.0.0.0", port: int = 8000, reload: bool = True):
diff --git a/src/backend-api/src/app/libs/base/SKLogicBase.py b/src/backend-api/src/app/libs/base/SKLogicBase.py
index 1659712a..b9a2a17f 100644
--- a/src/backend-api/src/app/libs/base/SKLogicBase.py
+++ b/src/backend-api/src/app/libs/base/SKLogicBase.py
@@ -81,7 +81,7 @@ def _init_agent(self):
"""
raise NotImplementedError("This method should be overridden in subclasses")
- async def execute(func_params: dict[str, any]):
+ async def execute(self, func_params: dict[str, any]):
raise NotImplementedError("Execute method not implemented")
@overload
@@ -92,7 +92,7 @@ async def execute_thread(
thread: AgentThread | AssistantAgentThread | AzureAIAgentThread = None,
) -> tuple[str, AgentThread | AssistantAgentThread | AzureAIAgentThread]:
"""When response_format is None, returns string response."""
- ...
+ pass
@overload
async def execute_thread(
@@ -102,7 +102,7 @@ async def execute_thread(
thread: AgentThread | AssistantAgentThread | AzureAIAgentThread = None,
) -> tuple[T, AgentThread | AssistantAgentThread | AzureAIAgentThread]:
"""When response_format is provided, returns typed Pydantic BaseModel response."""
- ...
+ pass
@abstractmethod
async def execute_thread(
diff --git a/src/backend-api/src/app/libs/base/fastapi_protocol.py b/src/backend-api/src/app/libs/base/fastapi_protocol.py
index cf9622d0..1a87bf74 100644
--- a/src/backend-api/src/app/libs/base/fastapi_protocol.py
+++ b/src/backend-api/src/app/libs/base/fastapi_protocol.py
@@ -16,7 +16,8 @@ class FastAPIWithContext(Protocol):
app_context: AppContext
# Include essential FastAPI methods for type checking
- def include_router(self, *args, **kwargs) -> None: ...
+ def include_router(self, *args, **kwargs) -> None:
+ pass
def add_app_context_to_fastapi(
diff --git a/src/backend-api/src/tests/application/test_dependency_injection.py b/src/backend-api/src/tests/application/test_dependency_injection.py
index 15fd9527..7db8c277 100644
--- a/src/backend-api/src/tests/application/test_dependency_injection.py
+++ b/src/backend-api/src/tests/application/test_dependency_injection.py
@@ -126,7 +126,7 @@ def test_factory_registration():
app_context = AppContext()
# Register with factory function
- app_context.add_singleton(IDataService, lambda: InMemoryDataService())
+ app_context.add_singleton(IDataService, InMemoryDataService)
data_service = app_context.get_service(IDataService)
assert isinstance(data_service, InMemoryDataService)
diff --git a/src/frontend/frontend_server.py b/src/frontend/frontend_server.py
index 1ab61f5a..0d8a2113 100644
--- a/src/frontend/frontend_server.py
+++ b/src/frontend/frontend_server.py
@@ -4,7 +4,7 @@
from dotenv import load_dotenv
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
-from fastapi.responses import FileResponse, HTMLResponse, JSONResponse
+from fastapi.responses import FileResponse, JSONResponse
from fastapi.staticfiles import StaticFiles
# Load environment variables from .env file
@@ -42,7 +42,6 @@ async def serve_index():
async def get_config(request: Request):
# Only serve config to same-origin requests by checking the Referer/Origin
origin = request.headers.get("origin") or ""
- referer = request.headers.get("referer") or ""
host = request.headers.get("host") or ""
if origin and not origin.endswith(host):
return JSONResponse(status_code=403, content={"detail": "Forbidden"})
diff --git a/src/frontend/src/api/utils.tsx b/src/frontend/src/api/utils.tsx
index 95ce5459..5bbedf36 100644
--- a/src/frontend/src/api/utils.tsx
+++ b/src/frontend/src/api/utils.tsx
@@ -414,7 +414,7 @@ export const renderErrorSection = (batchSummary, expandedSections, setExpandedSe
export const renderErrorContent = (batchSummary) => {
// Group errors by file
- const errorFiles = batchSummary.files.filter(file => file.error_count && file.error_count);
+ const errorFiles = batchSummary.files.filter(file => file.error_count > 0);
if (errorFiles.length === 0) {
return (
diff --git a/src/frontend/src/commonComponents/ProgressModal/progressModal.tsx b/src/frontend/src/commonComponents/ProgressModal/progressModal.tsx
index 3f741260..f800e29e 100644
--- a/src/frontend/src/commonComponents/ProgressModal/progressModal.tsx
+++ b/src/frontend/src/commonComponents/ProgressModal/progressModal.tsx
@@ -41,8 +41,8 @@ const ProgressModal: React.FC
= ({
}) => {
// Calculate progress percentage based on step (stable step-level identifier)
const getProgressPercentage = () => {
+ if (processingCompleted) return 100;
if (migrationError) return 0; // Show 0% progress for errors
- if (processingCompleted && !migrationError) return 100;
if (!apiData) return 0;
// Use apiData.step (stable: "analysis", "design", "yaml_conversion", "documentation")
diff --git a/src/frontend/src/components/batchHistoryPanel.tsx b/src/frontend/src/components/batchHistoryPanel.tsx
index 0e0259d9..d180b78c 100644
--- a/src/frontend/src/components/batchHistoryPanel.tsx
+++ b/src/frontend/src/components/batchHistoryPanel.tsx
@@ -21,7 +21,7 @@ interface BatchHistoryItem {
status: string;
}
const HistoryPanel: React.FC = ({ isOpen, onClose }) => {
- const headers = {}
+ const headers = {};
const [batchHistory, setBatchHistory] = useState([]);
const [loading, setLoading] = useState(false);
const [error, setError] = useState(null);
@@ -81,46 +81,6 @@ const HistoryPanel: React.FC = ({ isOpen, onClose }) => {
}
};
- // Function to categorize batches
- const categorizeBatches = () => {
- const now = new Date();
- const userTimeZone = Intl.DateTimeFormat().resolvedOptions().timeZone;
-
- // Get start of "Today", "Past 7 days", and "Past 30 days" in LOCAL time
- const todayStart = new Date(now.getFullYear(), now.getMonth(), now.getDate());
- const past7DaysStart = new Date(todayStart);
- const past30DaysStart = new Date(todayStart);
-
- past7DaysStart.setDate(todayStart.getDate() - 7);
- past30DaysStart.setDate(todayStart.getDate() - 30);
-
- const todayBatches: BatchHistoryItem[] = [];
- const past7DaysBatches: BatchHistoryItem[] = [];
- const past30DaysBatches: BatchHistoryItem[] = [];
-
- batchHistory.forEach(batch => {
- // Convert UTC timestamp to user's local date
- const updatedAtUTC = new Date(batch.created_at);
- const updatedAtLocal = new Date(updatedAtUTC.toLocaleString("en-US", { timeZone: userTimeZone }));
-
- // Extract only the local **date** part for comparison
- const updatedDate = new Date(updatedAtLocal.getFullYear(), updatedAtLocal.getMonth(), updatedAtLocal.getDate());
-
- // Categorize based on **exact day comparison**
- if (updatedDate.getTime() === todayStart.getTime()) {
- todayBatches.push(batch);
- } else if (updatedDate.getTime() >= past7DaysStart.getTime()) {
- past7DaysBatches.push(batch);
- } else if (updatedDate.getTime() >= past30DaysStart.getTime()) {
- past30DaysBatches.push(batch);
- }
- });
-
- return { todayBatches, past7DaysBatches, past30DaysBatches };
- };
-
- // const { todayBatches, past7DaysBatches, past30DaysBatches } = categorizeBatches();
-
const deleteBatchFromHistory = (batchId: string) => {
// Get the current URL path
const currentPath = window.location.pathname;
diff --git a/src/frontend/src/components/bottomBar.tsx b/src/frontend/src/components/bottomBar.tsx
index 3041a988..e43aa3d9 100644
--- a/src/frontend/src/components/bottomBar.tsx
+++ b/src/frontend/src/components/bottomBar.tsx
@@ -1,5 +1,5 @@
import { Button, Card, Dropdown, DropdownProps, Option } from "@fluentui/react-components"
-import React, { useState } from "react"
+import React from "react"
import { useNavigate } from "react-router-dom"
// Define possible upload states
diff --git a/src/frontend/src/components/uploadButton.tsx b/src/frontend/src/components/uploadButton.tsx
index 345bba5b..631493ea 100644
--- a/src/frontend/src/components/uploadButton.tsx
+++ b/src/frontend/src/components/uploadButton.tsx
@@ -9,7 +9,7 @@ import {
Tooltip,
} from "@fluentui/react-components";
import { MessageBar, MessageBarType } from "@fluentui/react";
-import { deleteBatch, deleteFileFromBatch, createProcess, uploadFiles, startProcessing, deleteFile } from '../slices/batchSlice';
+import { deleteBatch, createProcess, uploadFiles, startProcessing, deleteFile } from '../slices/batchSlice';
import { useDispatch } from 'react-redux';
import ConfirmationDialog from '../commonComponents/ConfirmationDialog/confirmationDialogue';
import { AppDispatch } from '../store/store'
@@ -53,7 +53,6 @@ const FileUploadZone: React.FC = ({
const [allUploadsComplete, setAllUploadsComplete] = useState(false);
const [fileLimitExceeded, setFileLimitExceeded] = useState(false);
const [showFileLimitDialog, setShowFileLimitDialog] = useState(false);
- const [isCreatingProcess, setIsCreatingProcess] = useState(false);
const [rejectedFiles, setRejectedFiles] = useState([]);
const [showFileRejectionError, setShowFileRejectionError] = useState(false);
const [showNetworkError, setShowNetworkError] = useState(false);
@@ -823,12 +822,6 @@ const FileUploadZone: React.FC = ({
}, [showFileRejectionError]);
- const handleStartProcessing = () => {
- if (uploadState === 'COMPLETED' && onStartTranslating) {
- onStartTranslating();
- }
- };
-
return (
{
const initMsal = async () => {
try {
const response = await fetch('/config');
- let config = defaultConfig;
+ let config;
if (response.ok) {
config = await response.json();
diff --git a/src/frontend/src/msal-auth/apiHeaders.ts b/src/frontend/src/msal-auth/apiHeaders.ts
index 932c23ec..c7432e9f 100644
--- a/src/frontend/src/msal-auth/apiHeaders.ts
+++ b/src/frontend/src/msal-auth/apiHeaders.ts
@@ -1,5 +1,4 @@
// apiHeaders.ts - Utility for creating API headers with user info
-import { getMsalInstance } from './msalInstance';
export interface ApiHeaders {
'Authorization': string;
diff --git a/src/frontend/src/pages/batchView.tsx b/src/frontend/src/pages/batchView.tsx
index 1ec56c81..0f40c131 100644
--- a/src/frontend/src/pages/batchView.tsx
+++ b/src/frontend/src/pages/batchView.tsx
@@ -13,7 +13,6 @@ import {
Card,
tokens,
Spinner,
- Tooltip,
} from "@fluentui/react-components"
import {
DismissCircle24Regular,
@@ -183,7 +182,6 @@ const BatchStoryPage = () => {
const [selectedFileId, setSelectedFileId] = useState
("");
const [expandedSections, setExpandedSections] = useState(["errors"]);
const [batchSummary, setBatchSummary] = useState(null);
- const [selectedFileContent, setSelectedFileContent] = useState("");
const [selectedFileTranslatedContent, setSelectedFileTranslatedContent] = useState("");
const [telemetryData, setTelemetryData] = useState(null);
@@ -339,7 +337,6 @@ const BatchStoryPage = () => {
const data = await apiService.get(`/process/${batchId}/file/${encodeURIComponent(selectedFileId)}`);
if (data) {
- setSelectedFileContent(data.content || "");
setSelectedFileTranslatedContent(data.content || ""); // Use content for both since we only have one version
}
@@ -354,38 +351,6 @@ const BatchStoryPage = () => {
}, [selectedFileId]);
- const renderWarningContent = () => {
- if (!expandedSections.includes("warnings")) return null;
-
- if (!batchSummary) return null;
-
- // Group warnings by file
- const warningFiles = files.filter(file => file.warningCount && file.warningCount > 0 && file.id !== "summary");
-
- if (warningFiles.length === 0) {
- return (
-
- No warnings found.
-
- );
- }
-
- return (
-
- {warningFiles.map((file, fileIndex) => (
-
-
- {file.name} ({file.warningCount})
- source
-
-
- Warning in file processing. See file for details.
-
-
- ))}
-
- );
- };
// Helper function to count JSON/YAML files
const getJsonYamlFileCount = () => {
@@ -601,7 +566,7 @@ const BatchStoryPage = () => {
}
// Show the summary page when summary is selected
- if (selectedFile.id === "summary" && batchSummary) {
+ if (selectedFile.id === "summary") {
// Check if there are no errors and all JSON/YAML files are processed successfully
const noErrors = (batchSummary.error_count === 0);
const jsonYamlFileCount = getJsonYamlFileCount();
diff --git a/src/frontend/src/pages/landingPage.tsx b/src/frontend/src/pages/landingPage.tsx
index 07bea471..619ee84d 100644
--- a/src/frontend/src/pages/landingPage.tsx
+++ b/src/frontend/src/pages/landingPage.tsx
@@ -9,10 +9,6 @@ declare global {
startTranslating?: () => Promise;
}
}
-import {
- Button,
- Tooltip,
-} from "@fluentui/react-components";
import Content from "../components/Content/Content";
import Header from "../components/Header/Header";
import HeaderTools from "../components/Header/HeaderTools";
@@ -29,7 +25,6 @@ export const History = bundleIcon(HistoryFilled, HistoryRegular);
export const LandingPage = (): JSX.Element => {
const dispatch = useDispatch(); // Add dispatch hook
- const batchHistoryRef = useRef<{ triggerDeleteAll: () => void } | null>(null);
const isPanelOpen = useSelector((state: RootState) => state.historyPanel.isOpen);
const navigate = useNavigate();
@@ -43,21 +38,13 @@ export const LandingPage = (): JSX.Element => {
setUploadState(state);
};
- const handleCancelUploads = () => {
- // This function will be called from BottomBar
- if (window.cancelUploads) {
- window.cancelUploads();
- }
- setUploadState('IDLE');
- };
-
const handleStartTranslating = async () => {
console.log('Starting translation...');
try {
if (window.startTranslating) {
// Get the batchId from startTranslating first
- const resultBatchId = await window.startTranslating();
+ await window.startTranslating();
navigate('/start');
// if (resultBatchId) {
// // Once processing is complete, navigate to the modern page
diff --git a/src/frontend/src/pages/modernizationPage.tsx b/src/frontend/src/pages/modernizationPage.tsx
index 233f3891..b3390791 100644
--- a/src/frontend/src/pages/modernizationPage.tsx
+++ b/src/frontend/src/pages/modernizationPage.tsx
@@ -35,7 +35,7 @@ import BatchHistoryPanel from "../components/batchHistoryPanel"
import PanelRight from "../components/Panels/PanelRight";
import PanelRightToolbar from "../components/Panels/PanelRightToolbar";
import PanelRightToggles from "../components/Header/PanelRightToggles";
-import { filesLogsBuilder, BatchSummary, completedFiles, filesErrorCounter, hasFiles, renderFileError, fileErrorCounter, renderErrorContent, filesFinalErrorCounter, formatAgent, formatDescription, fileWarningCounter } from "../api/utils";
+import { filesLogsBuilder, BatchSummary, completedFiles, filesErrorCounter, hasFiles, renderFileError, fileErrorCounter, renderErrorContent, filesFinalErrorCounter, fileWarningCounter } from "../api/utils";
import { format } from "sql-formatter";
export const History = bundleIcon(HistoryFilled, HistoryRegular);
@@ -431,33 +431,6 @@ enum Agents {
Agents = "Agent"
}
-
-
-const getTrackPercentage = (status: string, fileTrackLog: TrackLogMessage[]): number => {
- switch (status?.toLowerCase()) {
- case "completed":
- return ProcessingStage.Completed;
- case "in_process":
- if (fileTrackLog && fileTrackLog.length > 0) {
- if (fileTrackLog.some(entry => entry.agent_type === Agents.Checker)) {
- return ProcessingStage.FinalChecks;
- } else if (fileTrackLog.some(entry => entry.agent_type === Agents.Picker)) {
- return ProcessingStage.Processing;
- } else if (fileTrackLog.some(entry => entry.agent_type === Agents.Migrator)) {
- return ProcessingStage.Parsing;
- }
- return ProcessingStage.Starting;
- }
- return ProcessingStage.Queued;
- case "ready_to_process":
- return ProcessingStage.Queued;
- default:
- return ProcessingStage.NotStarted;
- }
-};
-
-
-
const getPrintFileStatus = (status: string): string => {
switch (status) {
case "completed":
@@ -475,10 +448,10 @@ const getPrintFileStatus = (status: string): string => {
const ModernizationPage = () => {
const { batchId } = useParams<{ batchId: string }>();
- const navigate = useNavigate()
+ const navigate = useNavigate();
const [batchSummary, setBatchSummary] = useState(null);
- const styles = useStyles()
+ const styles = useStyles();
const [text, setText] = useState("");
const [isPanelOpen, setIsPanelOpen] = React.useState(false); // Add state management
@@ -487,16 +460,14 @@ const ModernizationPage = () => {
// State for the loading component
const [showLoading, setShowLoading] = useState(true);
- const [loadingError, setLoadingError] = useState(null);
const [selectedFilebg, setSelectedFile] = useState(null);
- const [selectedFileId, setSelectedFileId] = React.useState("")
- const [fileId, setFileId] = React.useState("");
- const [expandedSections, setExpandedSections] = React.useState([])
- const [progressPercentage, setProgressPercentage] = useState(0);
+ const [selectedFileId, setSelectedFileId] = React.useState("");
+ const fileId = selectedFileId;
+ const [expandedSections, setExpandedSections] = React.useState([]);
+ const [progressPercentage] = useState(0);
const [allFilesCompleted, setAllFilesCompleted] = useState(false);
const [isZipButtonDisabled, setIsZipButtonDisabled] = useState(true);
const [fileLoading, setFileLoading] = useState(false);
- const [selectedFileTranslatedContent, setSelectedFileTranslatedContent] = useState("");
const [processingStarted, setProcessingStarted] = useState(false);
// Fetch file content when a file is selected
@@ -511,11 +482,7 @@ const ModernizationPage = () => {
if (!selectedFile || !selectedFile.translatedCode) {
setFileLoading(true);
const newFileUpdate = await fetchFileFromAPI(selectedFile?.fileId || "");
- setSelectedFileTranslatedContent(newFileUpdate.translatedContent);
setFileLoading(false);
- } else {
-
- setSelectedFileTranslatedContent(selectedFile.translatedCode);
}
} catch (err) {
@@ -575,14 +542,13 @@ const ModernizationPage = () => {
setReduxFileList(updatedFiles);
} else {
- setLoadingError("No data received from server");
+ console.log("No data received from server");
}
if (isInitialLoad) {
setShowLoading(false);
}
} catch (err) {
console.error("Error fetching batch data:", err);
- setLoadingError(err instanceof Error ? err.message : "An unknown error occurred");
if (isInitialLoad) {
setShowLoading(false);
}
@@ -591,7 +557,7 @@ const ModernizationPage = () => {
useEffect(() => {
if (!batchId || batchId.length !== 36) {
- setLoadingError("No valid batch ID provided");
+ console.log("No valid batch ID provided");
setShowLoading(false);
return;
}
@@ -801,9 +767,6 @@ const ModernizationPage = () => {
}
}, [batchId]);
- const highestProgressRef = useRef(0);
- const currentProcessingFileRef = useRef(null);
-
//new PT FR ends
const updateSummaryStatus = async () => {
@@ -875,7 +838,7 @@ useEffect(() => {
useEffect(() => {
const loadingTimeout = setTimeout(() => {
if (progressPercentage < 5 && showLoading) {
- setLoadingError('Processing is taking longer than expected. You can continue waiting or try again later.');
+ console.log('Processing is taking longer than expected. You can continue waiting or try again later.');
}
}, 30000);
@@ -1083,7 +1046,7 @@ useEffect(() => {
}
// Show the full summary page only when all files are completed and summary is selected
- if (allFilesCompleted && selectedFile?.id === "summary") {
+ if (selectedFile?.id === "summary") {
const completedCount = files.filter(file => file.status === "completed" && file.file_result !== "error" && file.id !== "summary").length;
const totalCount = files.filter(file => file.id !== "summary").length;
const errorCount = selectedFile.errorCount || 0;
diff --git a/src/frontend/src/pages/processPage.tsx b/src/frontend/src/pages/processPage.tsx
index dbca75bf..a85f8d30 100644
--- a/src/frontend/src/pages/processPage.tsx
+++ b/src/frontend/src/pages/processPage.tsx
@@ -14,10 +14,9 @@ import PanelRightToolbar from "../components/Panels/PanelRightToolbar";
import PanelRight from "../components/Panels/PanelRight";
import BatchHistoryPanel from "../components/batchHistoryPanel";
import { HistoryRegular, HistoryFilled, bundleIcon } from "@fluentui/react-icons";
-import { CircleCheck, X } from "lucide-react";
+import { CircleCheck } from "lucide-react";
import Lottie from 'lottie-react';
import documentLoader from "../../public/images/loader.json";
-import { getApiUrl, headerBuilder } from '../api/config';
import { apiService } from '../services/ApiService';
import ProgressModal from "../commonComponents/ProgressModal/progressModal";
@@ -115,7 +114,6 @@ const ProcessPage: React.FC = () => {
const [currentPhase, setCurrentPhase] = useState("");
const [phaseSteps, setPhaseSteps] = useState([]);
const [apiData, setApiData] = useState(null);
- const [lastUpdateTime, setLastUpdateTime] = useState("");
const [processingCompleted, setProcessingCompleted] = useState(false);
const stepsContainerRef = useRef(null);
// Track the last seen phase to prevent duplicate phase messages
@@ -210,10 +208,6 @@ const ProcessPage: React.FC = () => {
setShowProgressModal(true);
}
- // Update the stored last update time
- if (response.last_update_time) {
- setLastUpdateTime(response.last_update_time);
- }
// Update current phase - only add a new message when the phase actually changes
// This prevents duplicate messages from agent activity changes within the same phase
diff --git a/src/frontend/vite.config.js b/src/frontend/vite.config.js
index 0d05262f..d239b70e 100644
--- a/src/frontend/vite.config.js
+++ b/src/frontend/vite.config.js
@@ -1,6 +1,5 @@
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
-import path from 'path'
import tailwindcss from '@tailwindcss/vite'
export default defineConfig({
diff --git a/src/processor/src/libs/agent_framework/agent_framework_helper.py b/src/processor/src/libs/agent_framework/agent_framework_helper.py
index d892b1f7..61da842a 100644
--- a/src/processor/src/libs/agent_framework/agent_framework_helper.py
+++ b/src/processor/src/libs/agent_framework/agent_framework_helper.py
@@ -148,7 +148,7 @@ def create_client(
env_file_encoding: str | None = None,
instruction_role: str | None = None,
) -> "AzureOpenAIChatClient":
- ...
+ pass
@overload
@staticmethod
@@ -172,7 +172,7 @@ def create_client(
env_file_path: str | None = None,
env_file_encoding: str | None = None,
) -> "AzureOpenAIAssistantsClient":
- ...
+ pass
@overload
@staticmethod
@@ -194,7 +194,7 @@ def create_client(
env_file_encoding: str | None = None,
instruction_role: str | None = None,
) -> "AzureOpenAIResponsesClient":
- ...
+ pass
@overload
@staticmethod
@@ -217,7 +217,7 @@ def create_client(
instruction_role: str | None = None,
retry_config: RateLimitRetryConfig | None = None,
) -> AzureOpenAIResponseClientWithRetry:
- ...
+ pass
@overload
@staticmethod
@@ -234,7 +234,7 @@ def create_client(
env_file_path: str | None = None,
env_file_encoding: str | None = None,
) -> "AzureAIAgentClient":
- ...
+ pass
@staticmethod
def create_client(
diff --git a/src/processor/src/libs/agent_framework/azure_openai_response_retry.py b/src/processor/src/libs/agent_framework/azure_openai_response_retry.py
index 5851b809..48b829b3 100644
--- a/src/processor/src/libs/agent_framework/azure_openai_response_retry.py
+++ b/src/processor/src/libs/agent_framework/azure_openai_response_retry.py
@@ -654,7 +654,7 @@ async def _tail():
try:
await close()
except Exception:
- pass
+ logger.debug("Best-effort close of response stream failed", exc_info=True)
# Progressive retry for context-length failures.
if (
diff --git a/src/processor/src/libs/agent_framework/qdrant_memory_store.py b/src/processor/src/libs/agent_framework/qdrant_memory_store.py
index b71d9371..d5fbc455 100644
--- a/src/processor/src/libs/agent_framework/qdrant_memory_store.py
+++ b/src/processor/src/libs/agent_framework/qdrant_memory_store.py
@@ -263,7 +263,11 @@ async def close(self) -> None:
try:
await self._client.delete_collection(self.collection_name)
except Exception:
- pass
+ logger.debug(
+ "Best-effort delete of collection %s failed",
+ self.collection_name,
+ exc_info=True,
+ )
await self._client.close()
self._client = None
self._initialized = False
diff --git a/src/processor/src/libs/mcp_server/datetime/mcp_datetime.py b/src/processor/src/libs/mcp_server/datetime/mcp_datetime.py
index dc6e5cd9..77d9b73b 100644
--- a/src/processor/src/libs/mcp_server/datetime/mcp_datetime.py
+++ b/src/processor/src/libs/mcp_server/datetime/mcp_datetime.py
@@ -550,7 +550,6 @@ def convert_timezone(
)
return "\n".join(error_report)
- return f"Error converting timezone: {str(e)}"
@mcp.tool()
@@ -864,7 +863,6 @@ def calculate_time_difference(
[PROCESSING] EXAMPLE WORKING CALLS:
calculate_time_difference('2023-12-25 10:00:00', '2023-12-25 15:30:00', 'hours')
calculate_time_difference('2023-12-25', '2023-12-26', 'days')"""
- return f"Error calculating time difference: {str(e)}"
@mcp.tool()
diff --git a/src/processor/src/libs/mcp_server/mermaid/mcp_mermaid.py b/src/processor/src/libs/mcp_server/mermaid/mcp_mermaid.py
index 7652ca76..c55c0700 100644
--- a/src/processor/src/libs/mcp_server/mermaid/mcp_mermaid.py
+++ b/src/processor/src/libs/mcp_server/mermaid/mcp_mermaid.py
@@ -388,7 +388,7 @@ def _mermaid_render_check(code: str, timeout: int = 10) -> tuple[bool, str]:
:200
]
except json.JSONDecodeError:
- pass
+ pass # Non-JSON output from mermaid CLI; fall through to accept
return True, ""
except (subprocess.TimeoutExpired, OSError):
diff --git a/src/processor/src/libs/reporting/migration_report_generator.py b/src/processor/src/libs/reporting/migration_report_generator.py
index 5749d5bc..5afdf508 100644
--- a/src/processor/src/libs/reporting/migration_report_generator.py
+++ b/src/processor/src/libs/reporting/migration_report_generator.py
@@ -96,7 +96,7 @@ def set_current_file(
if os.path.exists(file_path):
file_size = os.path.getsize(file_path)
except Exception:
- pass
+ logger.debug("Failed to get file size for %s", file_path, exc_info=True)
self._file_contexts[file_name] = FileContext(
file_name=file_name,
diff --git a/src/processor/src/services/control_api.py b/src/processor/src/services/control_api.py
index de0d38d5..56911dc5 100644
--- a/src/processor/src/services/control_api.py
+++ b/src/processor/src/services/control_api.py
@@ -159,12 +159,12 @@ async def stop(self) -> None:
try:
await self._site.stop()
except Exception:
- pass
+ logger.debug("Best-effort stop of control API site failed", exc_info=True)
self._site = None
if self._runner:
try:
await self._runner.cleanup()
except Exception:
- pass
+ logger.debug("Best-effort cleanup of control API runner failed", exc_info=True)
self._runner = None
diff --git a/src/processor/src/services/queue_service.py b/src/processor/src/services/queue_service.py
index a6b1be1e..41d26a4b 100644
--- a/src/processor/src/services/queue_service.py
+++ b/src/processor/src/services/queue_service.py
@@ -348,7 +348,7 @@ async def start_service(self):
self._control_watcher_task, return_exceptions=True
)
except Exception:
- pass
+ logger.debug("Best-effort cancel of control watcher failed", exc_info=True)
self._control_watcher_task = None
self._worker_tasks.clear()
@@ -391,7 +391,7 @@ async def stop_service(self):
try:
await asyncio.gather(self._control_watcher_task, return_exceptions=True)
except Exception:
- pass
+ logger.debug("Best-effort cancel of control watcher failed", exc_info=True)
self._control_watcher_task = None
# Clear inflight tracking
@@ -405,12 +405,12 @@ async def stop_service(self):
if self.main_queue:
self.main_queue.close()
except Exception:
- pass
+ logger.debug("Best-effort close of main_queue failed", exc_info=True)
try:
self.queue_service.close()
except Exception:
- pass
+ logger.debug("Best-effort close of queue_service failed", exc_info=True)
async def stop_process(
self, process_id: str, timeout_seconds: float = 10.0
@@ -478,7 +478,11 @@ async def stop_process(
target_worker_id,
)
except Exception:
- pass
+ logger.debug(
+ "Unexpected error during job cancellation for process_id=%s",
+ process_id,
+ exc_info=True,
+ )
return True
@@ -1116,7 +1120,7 @@ async def _process_queue_message(self, worker_id: int, queue_message: QueueMessa
task_param=task_param,
)
finally:
- migration_processor = None
+ migration_processor = None # noqa: F841 — release reference for GC
except asyncio.CancelledError:
# When cancelled, we assume stop_process has already deleted the message
@@ -1146,7 +1150,7 @@ async def _process_queue_message(self, worker_id: int, queue_message: QueueMessa
task_param=None,
)
except Exception:
- pass
+ logger.debug("Failed to record failure for message_id=%s", getattr(queue_message, "id", ""), exc_info=True)
finally:
self._worker_inflight.pop(worker_id, None)
self._worker_inflight_message.pop(worker_id, None)
diff --git a/src/processor/src/steps/migration_processor.py b/src/processor/src/steps/migration_processor.py
index 0ded130f..73b2954a 100644
--- a/src/processor/src/steps/migration_processor.py
+++ b/src/processor/src/steps/migration_processor.py
@@ -403,7 +403,7 @@ async def _generate_report_summary(
),
}
except Exception:
- pass
+ logger.debug("Failed to generate report summary for failure details", exc_info=True)
await telemetry.record_failure_outcome(
process_id=input_data.process_id,
@@ -506,7 +506,7 @@ async def _generate_report_summary(
),
}
except Exception:
- pass
+ logger.debug("Failed to generate report summary for hard termination", exc_info=True)
await telemetry.record_failure_outcome(
process_id=input_data.process_id,
@@ -557,7 +557,7 @@ async def _generate_report_summary(
"migration_report_summary"
] = await _generate_report_summary(ReportStatus.SUCCESS)
except Exception:
- pass
+ logger.debug("Failed to generate report summary for outcome", exc_info=True)
await telemetry.record_final_outcome(
process_id=input_data.process_id,
@@ -565,7 +565,7 @@ async def _generate_report_summary(
success=True,
)
except Exception:
- pass
+ logger.debug("Failed to record final outcome telemetry", exc_info=True)
await telemetry.update_process_status(
process_id=input_data.process_id, status="completed"
@@ -623,7 +623,7 @@ async def _generate_report_summary(
),
}
except Exception:
- pass
+ logger.debug("Failed to generate report summary for executor failure", exc_info=True)
await telemetry.record_failure_outcome(
process_id=input_data.process_id,
@@ -688,7 +688,7 @@ async def _generate_report_summary(
mem_count,
)
except Exception:
- pass
+ logger.debug("Failed to log memory store count after step", exc_info=True)
# step name -> executor_id
# output result -> event.data => if event.data is not None
@@ -740,3 +740,5 @@ async def _generate_report_summary(
start_dt.isoformat(timespec="seconds"),
end_dt.isoformat(timespec="seconds"),
)
+
+ return None
diff --git a/src/processor/src/tests/conftest.py b/src/processor/src/tests/conftest.py
index 26493453..f2adbaaf 100644
--- a/src/processor/src/tests/conftest.py
+++ b/src/processor/src/tests/conftest.py
@@ -16,7 +16,8 @@
# pick up our `src/sitecustomize.py` unless `PYTHONPATH=src` is set. Import it
# explicitly after adding `src/` to `sys.path` so test collection works.
try:
- import sitecustomize # noqa: F401
+ import sitecustomize # noqa: F401 # Intentional side-effect import for compatibility hook
+ _ = sitecustomize # Prevent unused-import warnings
except Exception:
# Tests should still be able to run even if the compatibility hook is absent.
pass
diff --git a/src/processor/src/utils/agent_telemetry.py b/src/processor/src/utils/agent_telemetry.py
index 9e574377..dd1ec0de 100644
--- a/src/processor/src/utils/agent_telemetry.py
+++ b/src/processor/src/utils/agent_telemetry.py
@@ -120,18 +120,6 @@ def get_orchestration_agents() -> set[str]:
}
-# def get_common_agents() -> list[str]:
-# """Get common agent names."""
-# return [
-# "Chief_Architect",
-# "EKS_Expert",
-# "GKE_Expert",
-# "Azure_Expert",
-# "Technical_Writer",
-# "QA_Engineer",
-# ]
-
-
def _get_utc_timestamp() -> str:
"""Get current UTC timestamp in human-readable format"""
return datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S UTC")
@@ -362,15 +350,6 @@ async def init_process(self, process_id: str, phase: str, step: str):
"""Initialize telemetry for a new process."""
initial_agents = {}
- # Initialize orchestration agents
- # for agent_name in get_orchestration_agents():
- # initial_agents[agent_name] = AgentActivity(
- # name=agent_name,
- # current_action="ready",
- # participation_status="standby",
- # is_active=False,
- # )
-
# Initialize core system agents (not actual responding agents)
for agent_name in get_orchestration_agents():
initial_agents[agent_name] = AgentActivity(
@@ -612,9 +591,6 @@ async def track_tool_usage(
async def update_process_status(self, process_id: str, status: str):
"""Update the overall process status."""
- # if self.current_process:
- # self.current_process.status = status
- # self.current_process.last_update_time = _get_utc_timestamp()
current_process: ProcessStatus | None = None
if self.repository:
@@ -752,15 +728,6 @@ async def transition_to_phase(self, process_id: str, phase: str, step: str):
step,
)
- # async def _cleanup_phase_agents(self, process_id: str, previous_phase: str):
- # """Remove or mark inactive agents not relevant to current phase."""
- # if not self.current_process:
- # return
-
- # # Note: Removed fake orchestration agent cleanup since we no longer create them
- # # Phase orchestrators are Python classes, not agents to be tracked
- # logger.debug(f"[TELEMETRY] Phase cleanup completed: {previous_phase}")
-
async def _initialize_phase_agents(self, process_id: str, phase: str):
"""Initialize agents relevant to the new phase."""
current_process: ProcessStatus | None = None
@@ -844,6 +811,7 @@ async def get_current_process(self, process_id: str) -> ProcessStatus | None:
"""Get the current process status."""
if self.repository:
return await self.repository.get_async(process_id)
+ return None
async def get_process_outcome(self, process_id: str) -> str:
"""Get a human-readable process outcome."""
@@ -1129,7 +1097,7 @@ async def record_step_result(
):
current_process.step_results[step_name]["result"] = stored[0]
except Exception:
- pass
+ logger.debug("Failed to unwrap singleton step result for step=%s", step_name, exc_info=True)
# Lap time: end the timer for this step.
if step_name:
@@ -1444,7 +1412,7 @@ async def record_failure_outcome(
else current_process.failure_details
)
except Exception:
- pass
+ logger.debug("Failed to serialize failure_details for Cosmos", exc_info=True)
current_process.failure_step = failed_step or current_process.step
current_process.failure_timestamp = _get_utc_timestamp()