Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
150 changes: 150 additions & 0 deletions ignore/file-operations.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
// import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
// import { registerFileOperationsTool } from '../tools/file-operations.js';

// describe('File Operations Tool', () => {
// let mockServer: any;
// let mockFs: any;

// beforeEach(() => {
// mockServer = {
// registerTool: vi.fn()
// };

// // Mock fs/promises
// mockFs = {
// readFile: vi.fn().mockResolvedValue('test content'),
// writeFile: vi.fn().mockResolvedValue(undefined),
// mkdir: vi.fn().mockResolvedValue(undefined),
// unlink: vi.fn().mockResolvedValue(undefined),
// readdir: vi.fn().mockResolvedValue([
// { name: 'test.txt', isDirectory: () => false },
// { name: 'test-dir', isDirectory: () => true }
// ]),
// stat: vi.fn().mockResolvedValue({ isDirectory: () => true })
// };

// // Mock the fs/promises module
// vi.doMock('fs/promises', () => mockFs);
// vi.doMock('path', () => ({
// dirname: vi.fn().mockReturnValue('/test'),
// join: vi.fn().mockImplementation((...args) => args.join('/'))
// }));
// });

// afterEach(() => {
// vi.restoreAllMocks();
// });

// describe('Tool Registration', () => {
// it('should register with correct name and config', () => {
// registerFileOperationsTool(mockServer);

// expect(mockServer.registerTool).toHaveBeenCalledWith(
// 'file-operations',
// expect.objectContaining({
// title: 'File Operations Tool',
// description: 'Perform basic file operations with proper error handling and validation'
// }),
// expect.any(Function)
// );
// });
// });

// describe('Read Operation', () => {
// it('should read file successfully', async () => {
// const mockHandler = mockServer.registerTool.mock.calls[0][2];
// const result = await mockHandler({ operation: 'read', path: '/test.txt' });

// expect(result.content[0].text).toBe('test content');
// expect(result.isError).toBe(false);
// });

// it('should handle read errors', async () => {
// mockFs.readFile.mockRejectedValue(new Error('Permission denied'));

// const mockHandler = mockServer.registerTool.mock.calls[0][2];
// const result = await mockHandler({ operation: 'read', path: '/test.txt' });

// expect(result.isError).toBe(true);
// expect(result.content[0].text).toContain('Read error: Permission denied');
// });
// });

// describe('Write Operation', () => {
// it('should write file successfully', async () => {
// const mockHandler = mockServer.registerTool.mock.calls[0][2];
// const result = await mockHandler({
// operation: 'write',
// path: '/test.txt',
// content: 'hello world'
// });

// expect(result.content[0].text).toContain('Successfully wrote 11 characters');
// expect(result.isError).toBe(false);
// });

// it('should require content for write operation', async () => {
// const mockHandler = mockServer.registerTool.mock.calls[0][2];
// const result = await mockHandler({
// operation: 'write',
// path: '/test.txt'
// });

// expect(result.isError).toBe(true);
// expect(result.content[0].text).toContain('Content required for write operation');
// });
// });

// describe('List Operation', () => {
// it('should list directory successfully', async () => {
// const mockHandler = mockServer.registerTool.mock.calls[0][2];
// const result = await mockHandler({
// operation: 'list',
// path: '/test-dir'
// });

// expect(result.content[0].text).toContain('[FILE] test.txt');
// expect(result.content[0].text).toContain('[DIR] test-dir');
// expect(result.isError).toBe(false);
// });

// it('should handle non-directory path for list', async () => {
// mockFs.stat.mockResolvedValue({ isDirectory: () => false });

// const mockHandler = mockServer.registerTool.mock.calls[0][2];
// const result = await mockHandler({
// operation: 'list',
// path: '/test.txt'
// });

// expect(result.isError).toBe(true);
// expect(result.content[0].text).toContain('Path must be a directory');
// });
// });

// describe('Input Validation', () => {
// it('should reject empty path', async () => {
// registerFileOperationsTool(mockServer);
// const mockHandler = mockServer.registerTool.mock.calls[0][2];
// const result = await mockHandler({
// operation: 'read',
// path: ''
// });

// expect(result.isError).toBe(true);
// expect(result.content[0].text).toContain('Path cannot be empty');
// });

// it('should reject unknown operations', async () => {
// registerFileOperationsTool(mockServer);
// const mockHandler = mockServer.registerTool.mock.calls[0][2];
// const result = await mockHandler({
// operation: 'unknown',
// path: '/test.txt'
// });

// expect(result.isError).toBe(true);
// expect(result.content[0].text).toContain('Unknown operation');
// });
// });
// });
66 changes: 66 additions & 0 deletions ignore/server-logging.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
// import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
// import { beginSimulatedLogging } from '../server/logging.js';
// // import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';

// describe('Logging Module', () => {
// let mockServer: any;
// let clearIntervalSpy: any;

// beforeEach(() => {
// mockServer = {
// sendLoggingMessage: vi.fn()
// };
// clearIntervalSpy = vi.spyOn(global, 'clearInterval').mockImplementation(() => { });
// vi.useFakeTimers();
// });

// afterEach(() => {
// clearIntervalSpy.mockRestore();
// vi.useRealTimers();
// });

// describe('beginSimulatedLogging', () => {
// it('should start logging without session ID', () => {
// beginSimulatedLogging(mockServer, undefined);

// vi.advanceTimersByTime(1000);

// expect(mockServer.sendLoggingMessage).toHaveBeenCalledWith({
// level: 'debug',
// data: 'Debug-level message'
// });
// });

// it('should start logging with session ID', () => {
// const sessionId = 'test-session-123';
// beginSimulatedLogging(mockServer, sessionId);

// vi.advanceTimersByTime(1000);

// expect(mockServer.sendLoggingMessage).toHaveBeenCalledWith({
// level: 'debug',
// data: 'Debug-level message - SessionId test-session-123'
// });
// });

// it('should send different log levels', () => {
// beginSimulatedLogging(mockServer, 'test');

// // Advance through multiple intervals to get different log levels
// for (let i = 0; i < 5; i++) {
// vi.advanceTimersByTime(5000);
// }

// expect(mockServer.sendLoggingMessage).toHaveBeenCalledTimes(6); // 1 initial + 5 intervals

// const calls = mockServer.sendLoggingMessage.mock.calls;
// const levels = calls.map((call: any) => call[0].level);

// expect(levels).toContain('debug');
// expect(levels).toContain('info');
// expect(levels).toContain('notice');
// expect(levels).toContain('warning');
// expect(levels).toContain('error');
// });
// });
// });
90 changes: 67 additions & 23 deletions scripts/release.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
import subprocess
from dataclasses import dataclass
from typing import Any, Iterator, NewType, Protocol
from concurrent.futures import ThreadPoolExecutor
from functools import lru_cache


Version = NewType("Version", str)
Expand Down Expand Up @@ -62,47 +64,61 @@ def update_version(self, version: Version) -> None: ...
@dataclass
class NpmPackage:
path: Path
_name_cache: str | None = None

def package_name(self) -> str:
with open(self.path / "package.json", "r") as f:
return json.load(f)["name"]
if self._name_cache is None:
with open(self.path / "package.json", "r", encoding="utf-8") as f:
self._name_cache = json.load(f)["name"]
return self._name_cache

def update_version(self, version: Version):
with open(self.path / "package.json", "r+") as f:
package_json_path = self.path / "package.json"
with open(package_json_path, "r+", encoding="utf-8") as f:
data = json.load(f)
data["version"] = version
f.seek(0)
json.dump(data, f, indent=2)
json.dump(data, f, indent=2, ensure_ascii=False)
f.truncate()


@dataclass
class PyPiPackage:
path: Path
_name_cache: str | None = None

def package_name(self) -> str:
with open(self.path / "pyproject.toml") as f:
toml_data = tomlkit.parse(f.read())
name = toml_data.get("project", {}).get("name")
if not name:
raise Exception("No name in pyproject.toml project section")
return str(name)
if self._name_cache is None:
pyproject_path = self.path / "pyproject.toml"
with open(pyproject_path, "r", encoding="utf-8") as f:
toml_data = tomlkit.parse(f.read())
name = toml_data.get("project", {}).get("name")
if not name:
raise ValueError(f"No name in pyproject.toml project section for {self.path}")
self._name_cache = str(name)
return self._name_cache

def update_version(self, version: Version):
pyproject_path = self.path / "pyproject.toml"

# Update version in pyproject.toml
with open(self.path / "pyproject.toml") as f:
with open(pyproject_path, "r", encoding="utf-8") as f:
data = tomlkit.parse(f.read())
data["project"]["version"] = version

with open(self.path / "pyproject.toml", "w") as f:
with open(pyproject_path, "w", encoding="utf-8") as f:
f.write(tomlkit.dumps(data))

# Regenerate uv.lock to match the updated pyproject.toml
subprocess.run(["uv", "lock"], cwd=self.path, check=True)
subprocess.run(["uv", "lock"], cwd=self.path, check=True, capture_output=True)


def has_changes(path: Path, git_hash: GitHash) -> bool:
@lru_cache(maxsize=128)
def has_changes(path_str: str, git_hash_str: str) -> bool:
"""Check if any files changed between current state and git hash"""
path = Path(path_str)
git_hash = GitHash(git_hash_str)

try:
output = subprocess.run(
["git", "diff", "--name-only", git_hash, "--", "."],
Expand All @@ -112,9 +128,9 @@ def has_changes(path: Path, git_hash: GitHash) -> bool:
text=True,
)

changed_files = [Path(f) for f in output.stdout.splitlines()]
relevant_files = [f for f in changed_files if f.suffix in [".py", ".ts"]]
return len(relevant_files) >= 1
changed_files = output.stdout.splitlines()
# Use any() for early exit
return any(f.endswith(('.py', '.ts')) for f in changed_files)
except subprocess.CalledProcessError:
return False

Expand All @@ -126,12 +142,34 @@ def gen_version() -> Version:


def find_changed_packages(directory: Path, git_hash: GitHash) -> Iterator[Package]:
git_hash_str = str(git_hash)

# Collect all potential packages first
potential_packages = []

for path in directory.glob("*/package.json"):
if has_changes(path.parent, git_hash):
yield NpmPackage(path.parent)
# if has_changes(path.parent, git_hash):
# yield NpmPackage(path.parent)
potential_packages.append((path.parent, NpmPackage))


for path in directory.glob("*/pyproject.toml"):
if has_changes(path.parent, git_hash):
yield PyPiPackage(path.parent)
# if has_changes(path.parent, git_hash):
# yield PyPiPackage(path.parent)
potential_packages.append((path.parent, PyPiPackage))

# Check changes in parallel for better performance
with ThreadPoolExecutor(max_workers=min(4, len(potential_packages))) as executor:
def check_and_create(pkg_path, pkg_class):
if has_changes(str(pkg_path), git_hash_str):
return pkg_class(pkg_path)
return None

results = executor.map(lambda args: check_and_create(*args), potential_packages)

for result in results:
if result is not None:
yield result


@click.group()
Expand Down Expand Up @@ -195,14 +233,20 @@ def generate_version() -> int:
def generate_matrix(directory: Path, git_hash: GitHash, pypi: bool, npm: bool) -> int:
# Detect package type
path = directory.resolve(strict=True)
version = gen_version()
# version = gen_version()

# Early exit if neither flag is set
if not npm and not pypi:
click.echo(json.dumps([]))
return 0

changes = []
for package in find_changed_packages(path, git_hash):
pkg = package.path.relative_to(path)
if npm and isinstance(package, NpmPackage):
changes.append(str(pkg))
if pypi and isinstance(package, PyPiPackage):
# if pypi and isinstance(package, PyPiPackage):
elif pypi and isinstance(package, PyPiPackage): # Use elif for efficiency
changes.append(str(pkg))

click.echo(json.dumps(changes))
Expand Down
Loading