Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
e4eda16
feat: add parameter resolver for placeholder substitution
JE-Chen Apr 28, 2026
607fb54
feat: enhance HTTP/FastHttp tasks with headers, body, auth, assertions
JE-Chen Apr 28, 2026
790bd18
feat: add scenario modes (sequence, weighted, conditional)
JE-Chen Apr 28, 2026
1636c60
feat: add WebSocket user template
JE-Chen Apr 28, 2026
ccd95ac
feat: add gRPC user template
JE-Chen Apr 28, 2026
3d0866f
feat: add MQTT user template
JE-Chen Apr 28, 2026
670a7d1
feat: add raw TCP/UDP socket user template
JE-Chen Apr 28, 2026
a93ff1d
feat: support distributed master/worker runners
JE-Chen Apr 28, 2026
8a0e7b6
feat: add Prometheus metrics exporter
JE-Chen Apr 28, 2026
b4a2ea2
feat: add InfluxDB metrics sink
JE-Chen Apr 28, 2026
03077ce
feat: add OpenTelemetry OTLP metrics exporter
JE-Chen Apr 28, 2026
79f29d1
fix: harden TCP control socket with framing, token, and TLS
JE-Chen Apr 28, 2026
017a44a
refactor: move CLI to subcommands (run/run-dir/run-str/init/serve)
JE-Chen Apr 28, 2026
76b5100
feat: add HAR record/replay importer
JE-Chen Apr 28, 2026
8b8f541
feat: add CSV, JUnit XML, and percentile summary reports
JE-Chen Apr 28, 2026
3f64d4c
feat: persist test records to SQLite for cross-run comparison
JE-Chen Apr 28, 2026
8eb623a
feat: add live stats panel to GUI
JE-Chen Apr 28, 2026
0c65e70
feat: add Japanese and Korean GUI translations
JE-Chen Apr 28, 2026
4ab6efe
feat: add MCP server exposing LoadDensity tools to Claude
JE-Chen Apr 28, 2026
2be98b2
feat: register all new functions as LD_* executor commands
JE-Chen Apr 28, 2026
9a26900
build: add optional-dependencies for new feature modules
JE-Chen Apr 28, 2026
cd875b0
fix: address Codacy issues from PR #105
JE-Chen Apr 28, 2026
831da77
fix: address SonarCloud security hotspots on PR #105
JE-Chen Apr 28, 2026
9b48a4f
fix: suppress Codacy semgrep workflow_run checkout warning
JE-Chen Apr 28, 2026
732eca8
fix: place nosemgrep marker directly above the checkout step
JE-Chen Apr 28, 2026
417dbd0
fix: address SonarCloud quality issues on PR #105
JE-Chen Apr 28, 2026
fa7de7b
fix: use ssl.create_default_context for the TLS server
JE-Chen Apr 28, 2026
c58fbb3
fix: tag the TLS context line with NOSONAR S4423
JE-Chen Apr 28, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
107 changes: 107 additions & 0 deletions .github/workflows/publish-pypi.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
name: Publish to PyPI

on:
workflow_run:
workflows: ["CI Stable"]
types: [completed]
branches: [main]

permissions:
contents: write

jobs:
publish:
# Run only when the upstream CI on the trusted main branch finished
# successfully. workflow_run carries head_branch + head_sha so we can
# gate strictly on the main branch and check out the exact commit
# that passed CI, avoiding any race with a later push to main and
# ensuring no fork-originated ref is ever materialised here.
if: >-
${{ github.event.workflow_run.conclusion == 'success'
&& github.event.workflow_run.head_branch == 'main'
&& github.event.workflow_run.event != 'pull_request' }}
runs-on: ubuntu-latest

steps:
# The job's `if` already gates on workflow_run.head_branch == 'main'
# and workflow_run.event != 'pull_request', so a fork PR head can
# never reach this checkout. We pin to workflow_run.head_sha to
# publish exactly the commit that passed CI on main.
# nosemgrep: yaml.github-actions.security.workflow-run-target-code-checkout.workflow-run-target-code-checkout
- name: Checkout the exact commit that passed CI
uses: actions/checkout@v4
with:
ref: ${{ github.event.workflow_run.head_sha }}
fetch-depth: 0
persist-credentials: true
token: ${{ secrets.GITHUB_TOKEN }}

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"

- name: Install build tools
run: |
python -m pip install --upgrade pip
pip install build twine tomlkit

- name: Bump patch version in pyproject.toml
id: bump
run: |
python <<'PYEOF'
import os
import tomlkit

path = "pyproject.toml"
with open(path, encoding="utf-8") as f:
doc = tomlkit.parse(f.read())

old = str(doc["project"]["version"])
parts = old.split(".")
parts[-1] = str(int(parts[-1]) + 1)
new = ".".join(parts)
doc["project"]["version"] = new

with open(path, "w", encoding="utf-8") as f:
f.write(tomlkit.dumps(doc))

with open(os.environ["GITHUB_OUTPUT"], "a") as f:
f.write(f"old_version={old}\n")
f.write(f"new_version={new}\n")

print(f"Bumped version: {old} -> {new}")
PYEOF

- name: Build distributions
run: python -m build

- name: Publish to PyPI
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
run: twine upload dist/*

- name: Commit and tag bumped version
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add pyproject.toml
git commit -m "chore: bump version to ${{ steps.bump.outputs.new_version }} [skip ci]"
git tag "v${{ steps.bump.outputs.new_version }}"
# Push the bump commit onto main directly. We checked out a
# detached HEAD at workflow_run.head_sha, so push HEAD into
# refs/heads/main. If main has moved since the CI run, this
# rejects as non-fast-forward rather than overwriting history.
git push origin "HEAD:refs/heads/main"
git push origin "v${{ steps.bump.outputs.new_version }}"

- name: Create GitHub Release
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh release create "v${{ steps.bump.outputs.new_version }}" \
dist/* \
--title "v${{ steps.bump.outputs.new_version }}" \
--notes "Automated release for version ${{ steps.bump.outputs.new_version }}. Published to PyPI: https://pypi.org/project/je-load-density/${{ steps.bump.outputs.new_version }}/" \
--target main
126 changes: 93 additions & 33 deletions je_load_density/__init__.py
Original file line number Diff line number Diff line change
@@ -1,46 +1,95 @@
# hook (side-effect import: registers Locust request hooks)
from je_load_density.wrapper.event.request_hook import request_hook # noqa: F401
# env
from je_load_density.utils.executor.action_executor import add_command_to_executor
# executor
from je_load_density.utils.executor.action_executor import execute_action
from je_load_density.utils.executor.action_executor import execute_files
from je_load_density.utils.executor.action_executor import executor
# file

# Executor + action plumbing
from je_load_density.utils.executor.action_executor import (
add_command_to_executor,
execute_action,
execute_files,
executor,
)
from je_load_density.utils.file_process.get_dir_file_list import get_dir_files_as_list
# html
from je_load_density.utils.generate_report.generate_html_report import generate_html
from je_load_density.utils.generate_report.generate_html_report import generate_html_report
# json
from je_load_density.utils.generate_report.generate_json_report import generate_json
from je_load_density.utils.generate_report.generate_json_report import generate_json_report

# Reports
from je_load_density.utils.generate_report.generate_csv_report import generate_csv_report
from je_load_density.utils.generate_report.generate_html_report import (
generate_html,
generate_html_report,
)
from je_load_density.utils.generate_report.generate_json_report import (
generate_json,
generate_json_report,
)
from je_load_density.utils.generate_report.generate_junit_report import generate_junit_report
from je_load_density.utils.generate_report.generate_summary_report import (
build_summary,
generate_summary_report,
)
from je_load_density.utils.generate_report.generate_xml_report import (
generate_xml,
generate_xml_report,
)

# JSON IO
from je_load_density.utils.json.json_file.json_file import read_action_json
# xml
from je_load_density.utils.generate_report.generate_xml_report import generate_xml
from je_load_density.utils.generate_report.generate_xml_report import generate_xml_report
# server
from je_load_density.utils.socket_server.load_density_socket_server import start_load_density_socket_server
# test record

# Metrics
from je_load_density.utils.metrics import (
start_influxdb_sink,
start_opentelemetry_exporter,
start_prometheus_exporter,
stop_influxdb_sink,
stop_opentelemetry_exporter,
stop_prometheus_exporter,
)

# Parameterisation
from je_load_density.utils.parameterization import (
parameter_resolver,
register_csv_source,
register_csv_sources,
register_variable,
register_variables,
resolve,
)

# Recording / replay
from je_load_density.utils.recording.har_importer import (
har_to_action_json,
har_to_tasks,
load_har,
)

# Project scaffolding
from je_load_density.utils.project.create_project_structure import create_project_dir

# Control socket
from je_load_density.utils.socket_server.load_density_socket_server import (
start_load_density_socket_server,
)

# Test records
from je_load_density.utils.test_record.sqlite_persistence import (
fetch_run_records,
list_runs,
persist_records,
)
from je_load_density.utils.test_record.test_record_class import test_record_instance
# start
from je_load_density.wrapper.create_locust_env.create_locust_env import prepare_env
from je_load_density.wrapper.create_locust_env.create_locust_env import create_env

# Proxy
# Locust environment + start
from je_load_density.wrapper.create_locust_env.create_locust_env import (
create_env,
prepare_env,
)
from je_load_density.wrapper.proxy.proxy_user import locust_wrapper_proxy

from je_load_density.wrapper.start_wrapper.start_test import start_test

# Locust
from locust import SequentialTaskSet
from locust import task
from locust import TaskSet
# Locust re-exports
from locust import SequentialTaskSet, TaskSet, task

# Callback
# Callback executor
from je_load_density.utils.callback.callback_function_executor import callback_executor

from je_load_density.utils.project.create_project_structure import create_project_dir

__all__ = [
"create_env", "start_test",
"locust_wrapper_proxy",
Expand All @@ -49,9 +98,20 @@
"execute_action", "execute_files", "executor", "add_command_to_executor",
"get_dir_files_as_list",
"generate_html", "generate_html_report",
"generate_json", "generate_json_report", "read_action_json",
"generate_json", "generate_json_report",
"generate_xml", "generate_xml_report",
"generate_csv_report", "generate_junit_report", "generate_summary_report",
"build_summary",
"read_action_json",
"start_load_density_socket_server",
"SequentialTaskSet", "task", "TaskSet",
"callback_executor", "create_project_dir"
"callback_executor", "create_project_dir",
"parameter_resolver", "resolve",
"register_variable", "register_variables",
"register_csv_source", "register_csv_sources",
"har_to_action_json", "har_to_tasks", "load_har",
"persist_records", "list_runs", "fetch_run_records",
"start_prometheus_exporter", "stop_prometheus_exporter",
"start_influxdb_sink", "stop_influxdb_sink",
"start_opentelemetry_exporter", "stop_opentelemetry_exporter",
]
Loading
Loading