diff --git a/.github/Workflow/build.yaml b/.github/Workflow/build.yaml
deleted file mode 100644
index df1c5630..00000000
--- a/.github/Workflow/build.yaml
+++ /dev/null
@@ -1,17 +0,0 @@
-name: Build Java SDK
-
-on: [push]
-
-jobs:
- build:
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v3
- - name: Set up JDK 8
- uses: actions/setup-java@v3
- with:
- java-version: '8'
- distribution: 'adopt'
- - name: Build with Maven
- run: mvn clean install
\ No newline at end of file
diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml
deleted file mode 100644
index 39a82ee9..00000000
--- a/.github/workflows/maven.yml
+++ /dev/null
@@ -1,39 +0,0 @@
-# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
-# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
-
-name: JavaSDK build check
-
-on:
- push:
- branches: [ "main" ]
- pull_request:
- branches: [ "main" ]
-
-jobs:
- build:
-
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v3
- - name: Set up JDK 11
- uses: actions/setup-java@v3
- with:
- java-version: '11'
- distribution: 'temurin'
- cache: maven
- - name: Build with Maven
- run: mvn -B package --file pom.xml
-
- checkstyle:
- name: checkstyle linting
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - uses: dbelyaev/action-checkstyle@v0.6.1
- with:
- reporter: github-pr-review
- checkstyle_config: checkstyle-config.xml
- fail_on_error: true
-
-
diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml
new file mode 100644
index 00000000..d9b863ab
--- /dev/null
+++ b/.woodpecker/build.yml
@@ -0,0 +1,30 @@
+when:
+ - event: pull_request
+ - event: push
+ branch: main
+
+labels:
+ platform: linux/amd64
+
+clone:
+ git:
+ image: woodpeckerci/plugin-git
+ settings:
+ lfs: false
+ depth: 1
+
+steps:
+ build-jdk-8:
+ image: maven:3.9-eclipse-temurin-8
+ commands:
+ - mvn -B -DskipTests clean verify
+
+ build-jdk-17:
+ image: maven:3.9-eclipse-temurin-17
+ commands:
+ - mvn -B -DskipTests clean verify
+
+ build-jdk-21:
+ image: maven:3.9-eclipse-temurin-21
+ commands:
+ - mvn -B -DskipTests clean verify
diff --git a/.woodpecker/release.yml b/.woodpecker/release.yml
new file mode 100644
index 00000000..9559526d
--- /dev/null
+++ b/.woodpecker/release.yml
@@ -0,0 +1,48 @@
+when:
+ - event: tag
+ ref: refs/tags/v*
+
+labels:
+ platform: linux/amd64
+
+clone:
+ git:
+ image: woodpeckerci/plugin-git
+ settings:
+ lfs: false
+ depth: 1
+
+steps:
+ release:
+ image: maven:3.9-eclipse-temurin-17
+ environment:
+ MAVEN_GPG_PRIVATE_KEY:
+ from_secret: maven_gpg_private_key
+ MAVEN_GPG_PASSPHRASE:
+ from_secret: maven_gpg_passphrase
+ CENTRAL_USERNAME:
+ from_secret: central_username
+ CENTRAL_PASSWORD:
+ from_secret: central_password
+ commands:
+ - apt-get update -qq && apt-get install -y -qq gnupg
+ - |
+ echo "$MAVEN_GPG_PRIVATE_KEY" | gpg --batch --pinentry-mode loopback \
+ --passphrase "$MAVEN_GPG_PASSPHRASE" --import
+ - mkdir -p ~/.m2
+ - |
+ cat > ~/.m2/settings.xml <<'XML'
+
+
+
+ central
+ ${env.CENTRAL_USERNAME}
+ ${env.CENTRAL_PASSWORD}
+
+
+
+ XML
+ - VERSION="${CI_COMMIT_TAG#v}"
+ - echo "Releasing version $VERSION"
+ - mvn -B versions:set -DnewVersion="$VERSION" -DprocessAllModules=true -DgenerateBackupPoms=false
+ - mvn -B -P release clean deploy -DskipTests -Dgpg.passphrase="$MAVEN_GPG_PASSPHRASE"
diff --git a/README.md b/README.md
index 8f86b726..70795a79 100644
--- a/README.md
+++ b/README.md
@@ -1,292 +1,141 @@
-[](CODE_OF_CONDUCT.md)
-[](https://join.slack.com/t/keploy/shared_invite/zt-12rfbvc01-o54cOG0X1G6eVJTuI_orSA)
-[](https://opensource.org/licenses/Apache-2.0)
-[](https://search.maven.org/search?q=g:%22io.keploy%22%20AND%20a:%22keploy-sdk%22)
+# Keploy Java Coverage Agent
-[//]: # ([](https://search.maven.org/search?q=g:%22io.keploy%22%20AND%20a:%22keploy-sdk%22%20AND%20v:%221.2.6%22))
+This repository contains the Java dynamic dedup coverage agent for Keploy Enterprise.
-# Keploy
+It collects per-testcase Java coverage during Keploy replay and sends that coverage back to Enterprise so duplicate testcases can be identified and removed.
-[Keploy](https://keploy.io) is a no-code testing platform that generates tests from API calls.
+The repository contains only the dedup-focused `keploy-sdk` module.
-This is the client SDK for Keploy API testing platform. There are 2 modes:
+Supported runtimes in CI today are Java 8, 17, and 21.
+
+## How It Works
+
+Keploy Enterprise drives dynamic dedup per testcase.
+
+1. Enterprise sends `START /` on `/tmp/coverage_control.sock`.
+2. The Java agent resets JaCoCo coverage counters for that testcase.
+3. Enterprise replays the testcase.
+4. Enterprise sends `END /` on `/tmp/coverage_control.sock`.
+5. The Java agent dumps JaCoCo execution data, resolves executed Java lines, and sends them as JSON on `/tmp/coverage_data.sock`.
+6. Enterprise writes the result to `dedupData.yaml` and uses it to identify duplicates.
+
+Coverage is collected at per-testcase granularity, not process granularity.
+
+## How to Use
+
+### 1. Add the SDK
+
+Add `keploy-sdk` to your application:
+
+```xml
+
+ io.keploy
+ keploy-sdk
+ 2.0.0
+
+```
+
+### 2. Activate the Agent
+
+For Spring Boot, import the middleware in your application:
-1. **Record mode**
- 1. Record requests, response and all external calls and sends to Keploy server.
- 2. It runs the request on the API again to identify noisy fields.
- 3. Sends the noisy fields to the keploy server to be saved along with the testcase.
-2. **Test mode**
- 1. Fetches testcases for the app from keploy server.
- 2. Calls the API with same request payload in testcase.
- 3. Validates the respones and uploads results to the keploy server
-
-The Keploy Java SDK helps you to integrate keploy with java applications. (For other languages,
-see [KEPLOY SDKs](https://docs.keploy.io/application-development))
-
-## Contents
-
-1. [Requirements](#requirements)
-2. [Build configuration](#build-configuration)
-3. [Usage](#usage)
-4. [Community support](#community-support)
-5. [Documentation(WIP)](#documentationwip)
-
-## Requirements
-
-- Java 1.8+
-
-## Build configuration
-
-[Find the latest release](https://search.maven.org/artifact/io.keploy/keploy-sdk) of the Keploy Java SDK at maven
-central.
-
-Add *keploy-sdk* as a dependency to your *pom.xml*:
-
-
- io.keploy
- keploy-sdk
- N.N.N (eg: 1.2.9)
-
-
-or to *build.gradle*:
-
- implementation 'io.keploy:keploy-sdk:N.N.N' (eg: 1.2.9)
-
-### KEPLOY_MODE
-
-There are 3 modes:
-
-- **Record**: Sets to record mode.
-- **Test**: Sets to test mode.
-- **Off**: Turns off all the functionality provided by the API
-
-**Note:** `KEPLOY_MODE` value is case sensitive.
-
-
-## Usage
-
-- **Start keploy server [refer](https://github.com/keploy/keploy#start-keploy-server)**
-
-- **For Spring based application**
- - Add `@Import(KeployMiddleware.class)` below `@SpringBootApplication` in your main class.
-- **For Java EE application**
- - Specify the below filter above all other filters and servlets in the **web.xml** file.
- ```xml
-
- middleware
- io.keploy.servlet.KeployMiddleware
-
-
- middleware
- /*
-
- ```
-
-- **Run along with agent to mock external calls of your API 🤩🔥**
-
- - Download the latest - Download the latest agent jar
- from [here](https://repo1.maven.org/maven2/io/keploy/agent/1.2.9/) (eg: 1.2.9)
-
- - Prefix `-javaagent:` with absolute classpath of agent jar (
- eg: `-javaagent:/agent-1.2.9.jar`).
-
- 1. **Using Intellij:** Go to Edit Configuration-> add VM options -> paste _java agent_ edited above.
-
- 2. **Using command line:**
- - First add below plugins in your *pom.xml* file.
- ```xml
-
- org.apache.maven.plugins
- maven-dependency-plugin
- 3.1.1
-
-
- copy-dependencies
- package
-
- copy-dependencies
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-jar-plugin
- 3.2.2
-
-
-
- true
- dependency/
- {your main class}
-
-
-
-
- ```
- - And then run this
- command:`java -javaagent:/agent-1.2.9.jar -jar .jar`
- . This command will attach agent jar and also run the application. You need to set some required env
- variables written below in order to generate test cases. So, run this command after setting the env
- variables.
-
-
-- **Configure Environment Variables**
- - `APP_NAME` (default APP_NAME = myApp)
- - `APP_PORT` (default APP_PORT = 8080)
- - `DELAY` (default DELAY = 5)(It is the estimate application startup time (in sec))
- - `KEPLOY_URL` (default KEPLOY_URL = http://localhost:6789/api)
- - `KEPLOY_MODE` (default KEPLOY_MODE = off)
- - `KEPLOY_TEST_PATH` (default **/src/test/e2e/keploy-tests** directory of your application)
- - `KEPLOY_MOCK_PATH` (default **/src/test/e2e/mocks** directory of your application)
- - `KEPLOY_ASSET_PATH` (default **/src/test/e2e/assets** directory of your application)
- - `DENOISE` (default DENOISE = false)
- **Note:** By enabling denoise, it will filter out noisy fields for the testcase.
- - `RUN_TEST_BEFORE_RECORD` (default RUN_TEST_BEFORE_RECORD = false)
- **Note:** It is used to maintain the same database state when mocking is disabled.
- - `ACCEPT_URL_REGEX_LIST` (default ACCEPT_URL_REGEX_LIST = [])
- **Note:** A list of regex which is used to filter out the urls which are required to be recorded and by default it is empty list. **e.g.,ACCEPT_URL_REGEX_LIST=^/api**
- - `ACCEPT_HEADER_REGEX_LIST` (default ACCEPT_HEADER_REGEX_LIST = [])
- **Note:** A list of regex which is used to filter out the headers which are required to be recorded and by default it is empty list. **e.g.,ACCEPT_HEADER_REGEX_LIST=token:bearer tgffd,content-type\s\*:\s\*application\\\/json**
- - `REJECT_URL_REGEX_LIST` (default REJECT_URL_REGEX_LIST = [])
- **Note:** A list of regex which is used to filter out the urls which are not required to be recorded and by default it is empty list. **e.g.,REJECT_URL_REGEX_LIST=^/assets**
- - `REJECT_HEADER_REGEX_LIST` (default REJECT_HEADER_REGEX_LIST = [])
- **Note:** A list of regex which is used to filter out the headers which are not required to be recorded and by default it is empty list. **e.g.,REJECT_HEADER_REGEX_LIST=token:token tgffd,content-length:\s\*:\s\*"100"**
- - `SKIP_MOCK_OKHTTP` for okhttp service to be mocked or not (default SKIP_MOCK_OKHTTP = false)
- - `SKIP_MOCK_APACHE` for apache service to be mocked or not (default SKIP_MOCK_APACHE = false)
- - `SKIP_MOCK_GOOGLE_MAPS` for google_maps service to be mocked or not (default SKIP_MOCK_GOOGLE_MAPS = false)
- - `SKIP_MOCK_SQL` for sql service to be mocked or not (default SKIP_MOCK_SQL = false)
- - `SKIP_MOCK_REDIS` for redis service to be mocked or not (default SKIP_MOCK_REDIS = false)
-
-
-- **Generate testcases**
- - To generate/capture TestCases set and run your application.
- 1. Set `KEPLOY_MODE = record` (default "off")
- 2. Run your application.
- 3. Make some API calls.
-
-- **Run the testcases**
- - **Note:** Before running tests stop the sample application.
-
- - Set `KEPLOY_MODE = test` (default "off")
- - **Using IDE:** _(for local use-case we prefer running tests via IDE)_
- 1. Run your application.
- 2. You can also run the application with coverage to see the test coverage.
-
- - If you want to run keploy tests along with other unit testcases. You will be required to set the `javaagent` again in your test profile just like below.
-
- 
-
- 1. Add below code in your testfile and run it with or without coverage.
-
- ```java
- @Test
- public void TestKeploy() throws InterruptedException {
-
- CountDownLatch countDownLatch = HaltThread.getInstance().getCountDownLatch();
- Mode.setTestMode();
-
- new Thread(() -> {
- .main(new String[]{""});
- countDownLatch.countDown();
- }).start();
-
- countDownLatch.await();
- assertTrue(AssertKTests.result(), "Keploy Test Result");
- }
- ```
-
- 2. **Using command line**
- - Add maven-surefire-plugin to your *pom.xml*. In ` ` don't add jacoco agent if you don't want coverage report.
-
- ```xml
-
- org.apache.maven.plugins
- maven-surefire-plugin
- 2.22.2
-
-
-
-
- -javaagent:.jar
- -javaagent:${settings.localRepository}/org/jacoco/org.jacoco.agent/0.8.7/org.jacoco.agent-0.8.7-runtime.jar=destfile=target/jacoco.exec-->
-
-
-
- target/jacoco.exec
-
-
-
-
- ```
- - If you want coverage report also add Jacoco plugin to your *pom.xml*.
- ```xml
-
- org.jacoco
- jacoco-maven-plugin
- 0.8.5
-
-
- prepare-agent
-
- prepare-agent
-
-
-
- report
- prepare-package
-
- report
-
-
-
- post-unit-test
- test
-
- report
-
-
-
-
- target/jacoco.exec
-
- target/my-reports
-
-
-
-
- ```
- - Run your tests using command : `mvn test`.
-
-## Want stubs for unit test cases as well?
-- Java-sdk also supports mocking feature for unit testcase, where you write your own unit test cases and use keploy generated mocks as stubs.
-
-### Usage
-- Set `javaagent` in your unit test file configuration.
-- You just need to set the name of the mock as shown below.
```java
- @Test
- public void testHttpCall() throws Exception {
- new MockLib("okhttpCall"); //setting name of the mock
- ``` your unit test case code goes here ```
- }
+import io.keploy.servlet.KeployMiddleware;
+import org.springframework.context.annotation.Import;
+
+@Import(KeployMiddleware.class)
+public class Application {
+}
+```
+
+For servlet-based applications, register the filter early in `web.xml`:
+
+```xml
+
+ middleware
+ io.keploy.servlet.KeployMiddleware
+
+
+ middleware
+ /*
+
+```
+
+The middleware starts the Java dedup control server automatically.
+
+For Jakarta Servlet stacks, non-servlet frameworks, or any application where the `javax.servlet` filter is not available, start the agent directly during application startup:
+
+```java
+import io.keploy.dedup.KeployDedupAgent;
+
+KeployDedupAgent.start();
+```
+
+### 3. Run the App with the JaCoCo Java Agent
+
+The dedup agent reads coverage in-process via JaCoCo's runtime API (`org.jacoco.agent.rt.RT.getAgent()`), so attaching the JaCoCo Java agent is the only runtime requirement in the common cases below:
+
+- Maven/Gradle dev runs where application classes are under `target/classes` or `build/classes/java/main`
+- packaged `java -jar` runs where the application classes live inside the executable jar
+
+```bash
+java -javaagent:/path/to/jacocoagent.jar -jar your-app.jar
+```
+
+If the in-process API is unavailable (for example because the JaCoCo agent is loaded into an isolated classloader), the SDK transparently falls back to JaCoCo's TCP server mode. To use the fallback explicitly, start JaCoCo in `tcpserver` mode and set `KEPLOY_JACOCO_HOST` / `KEPLOY_JACOCO_PORT`:
+
+```bash
+java -javaagent:/path/to/jacocoagent.jar=address=127.0.0.1,port=36320,output=tcpserver \
+ -jar your-app.jar
+```
+
+### 4. Replay with Keploy Enterprise
+
+Run replay with dynamic dedup enabled:
+
+```bash
+keploy test -c "java -javaagent:/path/to/jacocoagent.jar -jar your-app.jar" \
+ --dedup \
+ --language java
+```
+
+When using the TCP fallback, also pass `--pass-through-ports ` so Keploy does not try to mock the JaCoCo control connection.
+
+After replay, run:
+
+```bash
+keploy dedup
+```
+
+To remove duplicates:
+
+```bash
+keploy dedup --rm
```
-- You can also provide location where your mocks can be stored using `KEPLOY_MOCK_PATH`. (default **/src/test/e2e/mocks** directory of your application)
+## Docker and Restricted Docker
+
+Java dedup works in native, Docker, and restricted Docker environments as long as `/tmp` is shared and writable between Keploy Enterprise and the Java process. In Docker Compose flows, Enterprise can inject that shared `/tmp` mount when it rewrites the Compose file for replay.
+
+Keploy Enterprise and the Java SDK communicate over these Unix sockets:
+
+- `/tmp/coverage_control.sock`
+- `/tmp/coverage_data.sock`
+
+Without a shared `/tmp`, dedup will not work inside containers because Enterprise and the Java process will be writing to different socket paths.
+
+## Configuration
-- **Generate mocks**
- 1. Record mocks by setting `KEPLOY_MODE=record` and run your test file.
- 2. You will be able to see _editable_ and _readable_ mocks at your provided location.
-- **Run your unit testcases**
- 1. Just set `KEPLOY_MODE=test`, run your test file and you are good to go.
+- `KEPLOY_JACOCO_HOST`: JaCoCo TCP host used when the in-process runtime API is unavailable. Default: `127.0.0.1`
+- `KEPLOY_JACOCO_PORT`: JaCoCo TCP port used when the in-process runtime API is unavailable. Default: `36320`
+- `KEPLOY_JAVA_CLASS_DIRS`: optional comma-separated class or jar locations to analyze for executed lines when your build output lives outside the standard locations
+- `KEPLOY_JAVA_CLASSPATH_FALLBACK`: scans the full classpath if standard class roots and the executable jar do not provide application classes. Default: `false`
+- `KEPLOY_JAVA_DEDUP_DISABLED`: disables the Java dedup agent when set to `true`, `1`, or `yes`
-#### 🤩 See, you didn't even need to create a stub for your unit test cases, it's all generated using the java-sdk mock library.
+## Sample
-## Community support
+For a working reference, see the Java dedup sample in `keploy/samples-java`:
-We'd love to collaborate with you to make Keploy great. To get started:
+- `samples-java/java-dedup`
-* [Slack](https://join.slack.com/t/keploy/shared_invite/zt-12rfbvc01-o54cOG0X1G6eVJTuI_orSA) - Discussions with the
- community and the team.
-* [GitHub](https://github.com/keploy/java-sdk/issues) - For bug reports and feature requests.
+That sample is used in CI to validate Java dynamic dedup for JDK 8, 17, and 21 across native, Docker, and restricted Docker runs.
diff --git a/agent/README.md b/agent/README.md
deleted file mode 100644
index 096193d1..00000000
--- a/agent/README.md
+++ /dev/null
@@ -1 +0,0 @@
-Module for java-agent.
\ No newline at end of file
diff --git a/agent/pom.xml b/agent/pom.xml
deleted file mode 100644
index e91b3c76..00000000
--- a/agent/pom.xml
+++ /dev/null
@@ -1,114 +0,0 @@
-
-
- java-sdk
- io.keploy
- 1.0.0-SNAPSHOT
-
- 4.0.0
-
- agent
-
-
- UTF-8
- 1.8
- 1.8
-
- Agent For Keploy SDK
- This agent is used to mock infra calls for an API
-
- https://github.com/keploy/java-sdk
-
-
- The Apache License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
-
-
-
-
- sarthak160
- Sarthak
- sarthak@keploy.io
-
-
- gouravkrosx
- Gourav Kumar
- gourav.kumar@keploy.io
-
-
-
- scm:git@github.com:keploy/java-sdk.git
- scm:git@github.com:keploy/java-sdk.git
- https://github.com/keploy/java-sdk.git
-
-
-
-
- io.keploy
- integration
- 1.0.0-SNAPSHOT
-
-
- net.bytebuddy
- byte-buddy
- 1.12.14
-
-
- net.bytebuddy
- byte-buddy-agent
- 1.12.14
-
-
- org.apache.httpcomponents
- httpcore
- 4.4.13
- compile
-
-
- com.squareup.okhttp3
- okhttp
-
- 3.14.9
-
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-shade-plugin
- 3.2.1
-
-
-
- package
-
- shade
-
-
-
-
-
- io.keploy.agent.KAgent
-
-
-
-
-
- *:*
-
- META-INF/*.SF
- META-INF/*.DSA
- META-INF/*.RSA
-
-
-
-
-
-
-
-
-
-
diff --git a/agent/src/main/java/io/keploy/advice/CustomGoogleResponseAdvice.java b/agent/src/main/java/io/keploy/advice/CustomGoogleResponseAdvice.java
deleted file mode 100644
index e7f50283..00000000
--- a/agent/src/main/java/io/keploy/advice/CustomGoogleResponseAdvice.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package io.keploy.advice;
-
-import io.keploy.regression.Mode;
-import io.keploy.regression.context.Context;
-import io.keploy.regression.context.Kcontext;
-import io.keploy.googleMaps.CustomHttpResponses;
-import net.bytebuddy.asm.Advice;
-import okhttp3.Response;
-import okio.BufferedSource;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.lang.reflect.Method;
-import java.util.Objects;
-
-/**
- * This class is used for intercepting method parseResponse of OkHttpPendingResult class and to record required data from
- * the method
- */
-public class CustomGoogleResponseAdvice {
-
- /**
- * This method gets executed before method parseResponse of OkHttpPendingResult class. In record mode it saves the
- * data from the parseResponse method arguments and help in recording mocks/tests.
- */
- @Advice.OnMethodEnter
- static void enterMethods(@Advice.Origin Method method, @Advice.AllArguments Object[] obj) throws Exception {
- final Logger logger = LogManager.getLogger(CustomGoogleResponseAdvice.class);
-
- logger.debug("inside OnMethodEnterAdvice of CustomGoogleResponseAdvice for method: {}", method);
- Response response = (Response) obj[1];
-
- Kcontext kctx = Context.getCtx();
-
- if (kctx == null) {
- logger.debug("[CustomGoogleResponseAdvice] keploy context is null");
- } else if (kctx.getMode().getModeFromContext().equals(Mode.ModeType.MODE_RECORD)) {
- logger.debug("[CustomGoogleResponseAdvice] keploy mode: " + kctx.getMode());
- CustomHttpResponses.googleMapResponse = response;
-
- if (response.body() != null) {
- final BufferedSource source = Objects.requireNonNull(response.body()).source();
- source.request(Integer.MAX_VALUE);
- okio.ByteString snapshot = source.buffer().snapshot();
- String body = "";
- if (!response.body().contentType().type().contains("image")) {
- logger.debug("not an image");
- body = snapshot.utf8();
- }
- CustomHttpResponses.googleMapResBody = body;
- }
- } else if (kctx.getMode().getModeFromContext().equals(Mode.ModeType.MODE_OFF) || kctx.getMode().getModeFromContext().equals(Mode.ModeType.MODE_TEST)) {
- logger.debug("[CustomGoogleResponseAdvice] keploy mode: " + kctx.getMode());
- }
- }
-}
diff --git a/agent/src/main/java/io/keploy/advice/OkHttpAdvice_Kotlin.java b/agent/src/main/java/io/keploy/advice/OkHttpAdvice_Kotlin.java
deleted file mode 100644
index de6e5e60..00000000
--- a/agent/src/main/java/io/keploy/advice/OkHttpAdvice_Kotlin.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package io.keploy.advice;
-
-import io.keploy.httpClients.OkHttpInterceptor_Kotlin;
-import net.bytebuddy.asm.Advice;
-import okhttp3.OkHttpClient;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.lang.reflect.Constructor;
-
-
-/**
- * This class is used for intercepting constructor of OkHttpClient$Builder class and to add an interceptor to its builder
- */
-public class OkHttpAdvice_Kotlin {
-
- /**
- * This method gets executed before the constructor of OkHttpClient$Builder class.This does nothing as we don't
- * want to change anything before the invocation of OkHttpClient$Builder constructor.
- */
- @Advice.OnMethodEnter
- static void enterMethods(@Advice.Origin Constructor constructor) throws Exception {
- }
-
- /**
- * This method gets executed after constructor of OkHttpClient$Builder class and Adds a interceptor to its builder
- *
- * @param builder - OkHttpClient.Builder
- */
- @Advice.OnMethodExit
- static void exitMethods(@Advice.Origin Constructor constructor, @Advice.This OkHttpClient.Builder builder) {
- final Logger logger = LogManager.getLogger(OkHttpAdvice_Kotlin.class);
-
- logger.debug("inside OnMethodExitAdvice of OkHttpAdvice_Kotlin for constructor: {}", constructor);
-
- OkHttpInterceptor_Kotlin okHttpInterceptor = new OkHttpInterceptor_Kotlin();
- builder.addInterceptor(okHttpInterceptor);
- }
-}
diff --git a/agent/src/main/java/io/keploy/advice/ksql/DataBaseMetaData_Advice.java b/agent/src/main/java/io/keploy/advice/ksql/DataBaseMetaData_Advice.java
deleted file mode 100644
index f01d76eb..00000000
--- a/agent/src/main/java/io/keploy/advice/ksql/DataBaseMetaData_Advice.java
+++ /dev/null
@@ -1,38 +0,0 @@
-package io.keploy.advice.ksql;
-
-import io.keploy.ksql.KDatabaseMetaData;
-import net.bytebuddy.asm.Advice;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.lang.reflect.Constructor;
-import java.sql.DatabaseMetaData;
-
-
-/**
- * This class is used for intercepting constructor of NewProxyDatabaseMetaData class and to replace value of a field to
- * a custom value i.e. KDatabaseMetaData on exit of that constructor method.
- */
-public class DataBaseMetaData_Advice {
-
- /**
- * This method gets executed before the constructor of NewProxyDatabaseMetaData class.This does nothing as we don't
- * want to change anything before the invocation of NewProxyDatabaseMetaData constructor.
- */
- @Advice.OnMethodEnter
- static void enterMethods(@Advice.Origin Constructor constructor) throws Exception {
- }
-
- /**
- * This method gets executed after constructor of NewProxyDatabaseMetaData class and replaces the value of field metaData
- * to KDatabaseMetaData .
- *
- * @param metaData - a field in NewProxyDatabaseMetaData class
- */
- @Advice.OnMethodExit
- static void exitMethods(@Advice.Origin Constructor constructor, @Advice.FieldValue(value = "inner", readOnly = false) DatabaseMetaData metaData) {
- final Logger logger = LogManager.getLogger(DataBaseMetaData_Advice.class);
- logger.debug("inside OnMethodExitAdvice for constructor: {}", constructor);
- metaData = new KDatabaseMetaData();
- }
-}
diff --git a/agent/src/main/java/io/keploy/advice/ksql/HealthCheckInterceptor.java b/agent/src/main/java/io/keploy/advice/ksql/HealthCheckInterceptor.java
deleted file mode 100644
index c030a0ce..00000000
--- a/agent/src/main/java/io/keploy/advice/ksql/HealthCheckInterceptor.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package io.keploy.advice.ksql;
-
-import net.bytebuddy.asm.Advice;
-
-import java.lang.reflect.Method;
-
-/**
- * This class is used for intercepting method withDetail of Health$Builder class and to replace an argument value to
- * a custom value on entry of that withDetail method.
- */
-public class HealthCheckInterceptor {
-
- /**
- * This method gets executed before the withDetail method of Health$Builder class.In test mode of keploy this method
- * will throw error as we don't get health check from database. so we mock the health check response and feed it to
- * the method. Whenever the value of the health check argument is NULL we replace it with "HI" so that the method
- * never fails
- */
- @Advice.OnMethodEnter
- public static void enterMethod(@Advice.Origin Method method, @Advice.Argument(value = 0, readOnly = false) String key, @Advice.Argument(value = 1, readOnly = false) Object value) {
- if (value == null) {
- value = "HI";
- }
- }
-
- /**
- * This method gets executed after the withDetail method of Health$Builder class.This does nothing as we don't
- * want to change anything after the completion of withDetail method of Health$Builder class.
- */
- @Advice.OnMethodExit
- public static void exitMethod(@Advice.Origin Method method) {
- }
-}
diff --git a/agent/src/main/java/io/keploy/advice/ksql/RegisterDialect.java b/agent/src/main/java/io/keploy/advice/ksql/RegisterDialect.java
deleted file mode 100644
index 709125c1..00000000
--- a/agent/src/main/java/io/keploy/advice/ksql/RegisterDialect.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package io.keploy.advice.ksql;
-
-
-import io.keploy.ksql.KDriver;
-import net.bytebuddy.asm.Advice;
-
-import java.lang.reflect.Constructor;
-import java.util.Map;
-
-/**
- * This class is used for intercepting constructor of JpaProperties class and to modify the value of a field value of
- * the class on exit of that constructor method.
- */
-public class RegisterDialect {
-
- /**
- * This method gets executed before the constructor of JpaProperties class.This does nothing as we don't
- * want to change anything before the invocation of JpaProperties constructor.
- */
- @Advice.OnMethodEnter
- static void enterMethods(@Advice.Origin Constructor constructor) throws Exception {
- }
-
- /**
- * This method gets executed after constructor of JpaProperties class and modifies the value of the field - properties.
- *
- * @param properties - a field in JpaProperties class
- */
- @Advice.OnMethodExit
- static void exitMethods(@Advice.Origin Constructor constructor, @Advice.FieldValue(readOnly = false, value = "properties") Map properties) throws Exception {
- properties.put("hibernate.dialect", KDriver.Dialect);
- }
-}
diff --git a/agent/src/main/java/io/keploy/advice/ksql/RegisterDriverAdvice.java b/agent/src/main/java/io/keploy/advice/ksql/RegisterDriverAdvice.java
deleted file mode 100644
index 2307be3c..00000000
--- a/agent/src/main/java/io/keploy/advice/ksql/RegisterDriverAdvice.java
+++ /dev/null
@@ -1,53 +0,0 @@
-package io.keploy.advice.ksql;
-
-import io.keploy.ksql.KDriver;
-import net.bytebuddy.asm.Advice;
-
-import java.lang.reflect.Method;
-
-/**
- * This class is used for intercepting method setDriverClassName of DataSourceProperties class and to replace an argument value to
- * a custom value on entry of setDriverClassName method.
- */
-public class RegisterDriverAdvice {
-
- /**
- * This method gets executed before the setDriverClassName method of DataSourceProperties class. According to the
- * driverClassName that is present, Dialect of KDriver is changed and driverClassName value is replaced with KDriver
- * path
- */
- @Advice.OnMethodEnter
- public static void enterMethod(@Advice.Origin Method method, @Advice.Argument(value = 0, readOnly = false) String driverClassName) {
-
- if (driverClassName != null && !driverClassName.equals("io.keploy.ksql.KDriver")) {
- KDriver.DriverName = driverClassName;
- switch (driverClassName) {
- case "org.postgresql.Driver":
- KDriver.Dialect = "org.hibernate.dialect.PostgreSQLDialect";
- break;
- case "com.mysql.cj.jdbc.Driver":
- case "com.mysql.jdbc.Driver":
- KDriver.Dialect = "org.hibernate.dialect.MySQLDialect";
- break;
- case "oracle.jdbc.driver.OracleDriver":
- case "oracle.jdbc.OracleDriver":
- KDriver.Dialect = "org.hibernate.dialect.Oracle10gDialect";
- break;
- case "org.h2.Driver":
- KDriver.Dialect = "org.hibernate.dialect.H2Dialect";
- break;
- default:
- System.out.println("Dialect for driver: " + driverClassName + " is not supported yet");
- }
- }
- driverClassName = "io.keploy.ksql.KDriver";
- }
-
- /**
- * This method gets executed after the setDriverClassName method of DataSourceProperties class.This does nothing as we don't
- * want to change anything after the completion of the setDriverClassName method of DataSourceProperties class.
- */
- @Advice.OnMethodExit
- public static void exitMethod(@Advice.Origin Method method) {
- }
-}
diff --git a/agent/src/main/java/io/keploy/advice/ksql/RegisterDriverAdvice_Interceptor.java b/agent/src/main/java/io/keploy/advice/ksql/RegisterDriverAdvice_Interceptor.java
deleted file mode 100644
index fe8d314d..00000000
--- a/agent/src/main/java/io/keploy/advice/ksql/RegisterDriverAdvice_Interceptor.java
+++ /dev/null
@@ -1,54 +0,0 @@
-package io.keploy.advice.ksql;
-
-import io.keploy.ksql.KDriver;
-import net.bytebuddy.implementation.bind.annotation.Origin;
-import net.bytebuddy.implementation.bind.annotation.SuperCall;
-
-import java.lang.reflect.Method;
-import java.util.concurrent.Callable;
-
-
-/**
- * This class is used for intercepting method determineDriverClassName of DataSourceProperties class and returns a
- * custom value instead of value returned by the determineDriverClassName method .
- */
-public class RegisterDriverAdvice_Interceptor {
-
- /**
- * This method will get called instead of determineDriverClassName
- * @param client - original method client
- * @param method - contains all the details regarding original method
- * @return - path to Driver class
- */
- public static String execute(@SuperCall Callable client, @Origin Method method) throws Exception {
-
- // Getting actual response from original method
- String s = client.call();
-
- // Changing KDriver Dialect according to the response from original method
- if (s != null && !s.equals("io.keploy.ksql.KDriver")) {
- KDriver.DriverName = s;
- switch (s) {
- case "org.postgresql.Driver":
- KDriver.Dialect = "org.hibernate.dialect.PostgreSQLDialect";
- break;
- case "com.mysql.cj.jdbc.Driver":
- case "com.mysql.jdbc.Driver":
- KDriver.Dialect = "org.hibernate.dialect.MySQLDialect";
- break;
- case "oracle.jdbc.driver.OracleDriver":
- case "oracle.jdbc.OracleDriver":
- KDriver.Dialect = "org.hibernate.dialect.Oracle10gDialect";
- break;
- case "org.h2.Driver":
- KDriver.Dialect = "org.hibernate.dialect.H2Dialect";
- break;
- default:
- System.out.println("Dialect for driver: " + s + " is not supported yet");
- }
- }
-
- // returning wrapped Driver class path
- return "io.keploy.ksql.KDriver";
- }
-}
\ No newline at end of file
diff --git a/agent/src/main/java/io/keploy/advice/ksql/SetDdlAuto_Advice.java b/agent/src/main/java/io/keploy/advice/ksql/SetDdlAuto_Advice.java
deleted file mode 100644
index c1b9695c..00000000
--- a/agent/src/main/java/io/keploy/advice/ksql/SetDdlAuto_Advice.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package io.keploy.advice.ksql;
-
-import net.bytebuddy.asm.Advice;
-
-import java.lang.reflect.Method;
-
-/**
- * This class is used for intercepting method setDdlAuto of HibernateProperties class and to replace an argument value to
- * a custom value on entry of setDdlAuto method.
- */
-public class SetDdlAuto_Advice {
-
- /**
- * This method gets executed before the setDdlAuto method of HibernateProperties class. According to the
- * Keploy mode that is present, The argument value will be changed.
- */
- @Advice.OnMethodEnter
- public static void enterMethod(@Advice.Origin Method method, @Advice.Argument(value = 0, readOnly = false) String ddlAuto) {
- ddlAuto = (System.getenv("KEPLOY_MODE").equals("test")) ? "none" : ddlAuto;
- }
-
- /**
- * This method gets executed after the setDdlAuto method of HibernateProperties class.This does nothing as we don't
- * want to change anything after the completion of the setDdlAuto method of HibernateProperties class.
- */
- @Advice.OnMethodExit
- public static void exitMethod(@Advice.Origin Method method) {
- }
-}
diff --git a/agent/src/main/java/io/keploy/advice/ksql/SetEnabled_Advice.java b/agent/src/main/java/io/keploy/advice/ksql/SetEnabled_Advice.java
deleted file mode 100644
index 3a460e7d..00000000
--- a/agent/src/main/java/io/keploy/advice/ksql/SetEnabled_Advice.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package io.keploy.advice.ksql;
-
-import net.bytebuddy.asm.Advice;
-
-import java.lang.reflect.Method;
-
-/**
- * This class is used for intercepting method setEnabled of LiquibaseProperties class and to replace an argument value to
- * a custom value on entry of setEnabled method.
- */
-public class SetEnabled_Advice {
-
- /**
- * This method gets executed before the setEnabled method of LiquibaseProperties class. According to the
- * Keploy mode that is present, The argument value will be changed
- */
- @Advice.OnMethodEnter
- public static void enterMethod(@Advice.Origin Method method, @Advice.Argument(value = 0, readOnly = false) boolean enabled) {
- enabled = (System.getenv("KEPLOY_MODE").equals("test")) ? false : enabled;
- }
-
- /**
- * This method gets executed after the setEnabled method of LiquibaseProperties class.This does nothing as we don't
- * want to change anything after the completion of the setEnabled method of LiquibaseProperties class.
- */
- @Advice.OnMethodExit
- public static void exitMethod(@Advice.Origin Method method) {
- }
-}
diff --git a/agent/src/main/java/io/keploy/agent/KAgent.java b/agent/src/main/java/io/keploy/agent/KAgent.java
deleted file mode 100644
index 711db04f..00000000
--- a/agent/src/main/java/io/keploy/agent/KAgent.java
+++ /dev/null
@@ -1,520 +0,0 @@
-package io.keploy.agent;
-
-import net.bytebuddy.ByteBuddy;
-import net.bytebuddy.agent.builder.AgentBuilder;
-import net.bytebuddy.asm.Advice;
-import net.bytebuddy.asm.AsmVisitorWrapper;
-import net.bytebuddy.description.field.FieldDescription;
-import net.bytebuddy.description.field.FieldList;
-import net.bytebuddy.description.method.MethodDescription;
-import net.bytebuddy.description.method.MethodList;
-import net.bytebuddy.description.type.TypeDescription;
-import net.bytebuddy.dynamic.ClassFileLocator;
-import net.bytebuddy.dynamic.scaffold.TypeValidation;
-import net.bytebuddy.implementation.MethodDelegation;
-import net.bytebuddy.jar.asm.ClassVisitor;
-import net.bytebuddy.jar.asm.MethodVisitor;
-import net.bytebuddy.jar.asm.signature.SignatureReader;
-import net.bytebuddy.jar.asm.signature.SignatureVisitor;
-import net.bytebuddy.jar.asm.signature.SignatureWriter;
-import net.bytebuddy.matcher.ElementMatcher;
-import net.bytebuddy.pool.TypePool;
-import net.bytebuddy.utility.OpenedClassReader;
-import org.apache.http.HttpResponse;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import net.bytebuddy.dynamic.DynamicType.Builder;
-
-import java.lang.instrument.Instrumentation;
-import java.lang.reflect.Field;
-import java.sql.DatabaseMetaData;
-import java.util.*;
-
-
-import static net.bytebuddy.matcher.ElementMatchers.*;
-
-/**
- * This KAgent is a specially crafted Class and entry point for instrumentation. It utilizes the Instrumentation API that
- * JVM provides to alter existing byte-code that is loaded in a JVM. This runs before main class. so whatever changes
- * that are needed for running keploy in different modes are done here.
- */
-public class KAgent {
-
- private static final Logger logger = LogManager.getLogger(KAgent.class);
-
- /**
- * premain is the method which runs before main. This will statically load the agent using -javaagent parameter at
- * JVM startup
- *
- * @param arg - It is a String that can be used to pass arguments to the agent. These arguments can be used to
- * configure the behavior of the agent.
- * @param instrumentation - It is an instance of the Instrumentation class, which provides a mechanism for the agent
- * to inspect and modify the byte code of classes that are loaded into the JVM.
- */
- public static void premain(String arg, Instrumentation instrumentation) {
-
- logger.debug("inside premain method");
- logger.debug("KeployMode:{}", System.getenv("KEPLOY_MODE"));
-
- if (System.getenv("KEPLOY_MODE") == null || Objects.equals(System.getenv("KEPLOY_MODE"), "off")) {
- return;
- }
-
- String apacheClient = "org.apache.http.impl.client.CloseableHttpClient";
- String okhttpClientBuilder = "okhttp3.OkHttpClient$Builder";
- String okhttp_java = "com.squareup.okhttp.OkHttpClient";
- String okHttpPendingResult = "com.google.maps.internal.OkHttpPendingResult";
- String jdbc = "org.springframework.boot.autoconfigure.jdbc.DataSourceProperties";
- String jpaHibernate= "org.springframework.boot.autoconfigure.orm.jpa.HibernateProperties";
- String liquibase = "org.springframework.boot.autoconfigure.liquibase.LiquibaseProperties";
- String jpaProperties ="org.springframework.boot.autoconfigure.orm.jpa.JpaProperties";
- String health = "org.springframework.boot.actuate.health.Health$Builder";
- String proxyDB = "com.mchange.v2.c3p0.impl.NewProxyDatabaseMetaData";
- String redisJedisPool = "redis.clients.jedis.JedisPool";
- String redisJedisBinary = "redis.clients.jedis.BinaryClient";
- // String mongo= "org.springframework.boot.autoconfigure.data.mongo.MongoDataProperties"; //TODO: add mongo support
-
- // String asyncApacheClient = "org.apache.http.impl.nio.client.CloseableHttpAsyncClient";
- // String internalhttpasyncClient = "org.apache.http.impl.nio.client.InternalHttpAsyncClient";
-
- // Using the AgentBuilder class we can define a Java agent.
- new AgentBuilder.Default(new ByteBuddy().with(TypeValidation.DISABLED))
-
- // to see the full logs in case of debugging, comment out the below line.
- // .with(AgentBuilder.Listener.StreamWriting.toSystemOut())
- // .with(AgentBuilder.Listener.StreamWriting.toSystemOut().withErrorsOnly())
-
- /*
- Transformer for okhttp client up to version 2.7.5. This transformer intercepts and runs
- OkHttpAdvice_Java Advice before and after the execution of OkHttpClient class constructor.
- OkHttpAdvice_Java Advice will modify things according to the Keploy mode and allows to record tests,
- mocks and test them.
- */
- .type(named(okhttp_java))
- .transform(((builder, typeDescription, classLoader, javaModule, protectionDomain) -> {
- logger.debug("inside OkHttpInterceptor_Java transformer");
-
- // if the service (for e.g.: OKHTTP) is not set or set to true, then mock it
- if (System.getenv("SKIP_MOCK_OKHTTP") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_OKHTTP")) )
- {
- logger.debug("mocking OKHTTP");
- return builder
- .constructor(isDefaultConstructor()).intercept(Advice.to(TypePool.Default.ofSystemLoader().describe("io.keploy.advice.OkHttpAdvice_Java").resolve(), ClassFileLocator.ForClassLoader.ofSystemLoader()));
- }
- logger.debug("skip mocking OKHTTP");
- return builder;
- }))
-
- /*
- Transformer for okhttp client for version 3.0+. This transformer intercepts and runs
- OkHttpAdvice_Kotlin Advice before and after the execution of OkHttpClient$Builder class constructor.
- OkHttpAdvice_Kotlin Advice will modify things according to the Keploy mode and allows to record tests,
- mocks and test them.
- */
- .type(named(okhttpClientBuilder))
- .transform(((builder, typeDescription, classLoader, javaModule, protectionDomain) -> {
-
- logger.debug("inside OkHttpInterceptor_Kotlin transformer");
-
- if (System.getenv("SKIP_MOCK_OKHTTP") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_OKHTTP")) )
- {
- logger.debug("mocking OKHTTP");
- return builder.constructor(isDefaultConstructor())
- .intercept(Advice.to(TypePool.Default.ofSystemLoader().describe("io.keploy.advice.OkHttpAdvice_Kotlin").resolve(), ClassFileLocator.ForClassLoader.ofSystemLoader()));
- }
- logger.debug("skip mocking OKHTTP");
- return builder;
- }))
-
- /*
- Transformer for apache client. This transformer runs methods of ApacheInterceptor instead of method
- execute of CloseableHttpClient class. ApacheInterceptor methods modify things according to the Keploy
- mode and allows to record tests, mocks and test them.
- */
- .type(named(apacheClient))
- .transform(((builder, typeDescription, classLoader, javaModule, protectionDomain) -> {
-
- logger.debug("inside ApacheInterceptor transformer");
-
- if (System.getenv("SKIP_MOCK_APACHE") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_APACHE")) )
- {
- logger.debug("mocking APACHE");
- try {
- String apacheInterceptor = "io.keploy.httpClients.ApacheInterceptor";
-
- ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
- new ClassFileLocator.Compound(ClassFileLocator.ForClassLoader.of(contextClassLoader),
- ClassFileLocator.ForClassLoader.ofSystemLoader());
- TypePool.Resolution resolution = TypePool.Default.of(ClassFileLocator.ForClassLoader.of(contextClassLoader)).describe(apacheInterceptor);
-
- String request = "org.apache.http.client.methods.HttpUriRequest";
- String context = "org.apache.http.protocol.HttpContext";
- String host = "org.apache.http.HttpHost";
-
- String response = "org.apache.http.client.methods.CloseableHttpResponse";
-
- ElementMatcher.Junction md1 = takesArgument(0, named(request)).and(takesArgument(1, named(context)));
- ElementMatcher.Junction md2 = takesArgument(0, named(host)).and(takesArgument(1, named(request)));
- ElementMatcher.Junction md3 = takesArgument(0, named(host)).and(takesArgument(1, named(request))).and(takesArgument(2, named(context)));
-
- return builder.method(named("execute").and(md1.or(md2).or(md3))
- .and(returns(isSubTypeOf(HttpResponse.class))))
- // .intercept(MethodDelegation.to(resolution.resolve())); // contains spring class loader also.
- .intercept(MethodDelegation.to(TypePool.Default.ofSystemLoader().describe(apacheInterceptor).resolve()));
- } catch (Exception e) {
- logger.error("unable to intercept apache client");
- e.printStackTrace();
- return builder;
- }
- }
- logger.debug("skip mocking APACHE");
- return builder;
- }))
-
- /*
- Transformer for google-maps-services.This transformer intercepts and runs CustomGoogleResponseAdvice
- before parseResponse method of OkHttpPendingResult class and runs GoogleMapsInterceptor methods
- instead of await method of OkHttpPendingResult. These will modify things according to the Keploy mode
- and allows to record tests, mocks and test them.
- */
- .type(named(okHttpPendingResult))
- .transform((builder, typeDescription, classLoader, javaModule, protectionDomain) -> {
-
- logger.debug("inside GoogleMapsInterceptor transformer");
-
- if (System.getenv("SKIP_MOCK_GOOGLE_MAPS") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_GOOGLE_MAPS")) )
- {
- logger.debug("mocking google maps");
- return builder
- .method(named("await")).intercept(MethodDelegation.to(TypePool.Default.ofSystemLoader().describe("io.keploy.googleMaps.GoogleMapsInterceptor").resolve()))
- .method(named("parseResponse")).intercept(Advice.to(TypePool.Default.ofSystemLoader().describe("io.keploy.advice.CustomGoogleResponseAdvice").resolve(), ClassFileLocator.ForClassLoader.ofSystemLoader()));
- }
- logger.debug("skip mocking google maps");
- return builder;
- })
-
- // Following Transformers are for sql mocking.
- .type(named(jdbc))
- .transform((builder, typeDescription, classLoader, javaModule, protectionDomain) -> {
-
- logger.debug("Inside RegisterDriverAdvice1 Transformer");
-
- if (System.getenv("SKIP_MOCK_SQL") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_SQL")) )
- {
- logger.debug("mocking sql RegisterDriverAdvice1");
- return builder.method(named("setDriverClassName"))
- .intercept(Advice.to(TypePool.Default.ofSystemLoader().describe("io.keploy.advice.ksql.RegisterDriverAdvice").resolve(), ClassFileLocator.ForClassLoader.ofSystemLoader()));
- }
- logger.debug("skip mocking sql");
- return builder;
- })
- .type(named(jdbc))
- .transform((builder, typeDescription, classLoader, javaModule, protectionDomain) -> {
-
- logger.debug("Inside RegisterDriverAdvice2 Transformer");
-
- if (System.getenv("SKIP_MOCK_SQL") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_SQL")) )
- {
- logger.debug("mocking sql RegisterDriverAdvice2");
- return builder.method(named("determineDriverClassName"))
- .intercept(MethodDelegation.to(TypePool.Default.ofSystemLoader().describe("io.keploy.advice.ksql.RegisterDriverAdvice_Interceptor").resolve()));
- }
- logger.debug("skip mocking sql");
- return builder;
- })
- .type(named(jpaHibernate))
- .transform((builder, typeDescription, classLoader, javaModule, protectionDomain) -> {
- logger.debug("Inside HibernateProperties Transformer for setDdlAuto");
- if (System.getenv("SKIP_MOCK_SQL") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_SQL")) )
- {
- logger.debug("mocking sql HibernateProperties");
- return builder.method(named("setDdlAuto").and(takesArgument(0, String.class)))
- .intercept(Advice.to(TypePool.Default.ofSystemLoader().describe("io.keploy.advice.ksql.SetDdlAuto_Advice").resolve(), ClassFileLocator.ForClassLoader.ofSystemLoader()));
- }
- logger.debug("skip mocking sql");
- return builder;
- })
- .type(named(liquibase))
- .transform((builder, typeDescription, classLoader, javaModule, protectionDomain) -> {
-
- logger.debug("Inside LiquibaseProperties Transformer for setEnabled");
-
- if (System.getenv("SKIP_MOCK_SQL") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_SQL")) )
- {
- logger.debug("mocking sql LiquibaseProperties");
- return builder.method(named("setEnabled")
- .and(takesArgument(0, Boolean.class)))
- .intercept(Advice.to(TypePool.Default.ofSystemLoader().describe("io.keploy.advice.ksql.SetEnabled_Advice").resolve(), ClassFileLocator.ForClassLoader.ofSystemLoader()));
- }
- logger.debug("skip mocking sql");
- return builder;
- })
- .type(named(jpaProperties))
- .transform(((builder, typeDescription, classLoader, module, protectionDomain) -> {
-
- logger.debug("Inside RegisterDialect Transformer");
-
- if (System.getenv("SKIP_MOCK_SQL") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_SQL")) )
- {
- logger.debug("mocking sql RegisterDialect");
- return builder.constructor(isDefaultConstructor())
- .intercept(Advice.to(TypePool.Default.ofSystemLoader().describe("io.keploy.advice.ksql.RegisterDialect").resolve(), ClassFileLocator.ForClassLoader.ofSystemLoader()));
- }
- logger.debug("skip mocking sql");
- return builder;
- }))
- .type(named(health))
- .transform(((builder, typeDescription, classLoader, javaModule, protectionDomain) -> {
-
- logger.debug("Inside HealthEndpoint Transformer");
-
- if (System.getenv("SKIP_MOCK_SQL") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_SQL")) )
- {
- logger.debug("mocking sql HealthEndpoint");
- return builder.method(named("withDetail"))
- .intercept(Advice.to(TypePool.Default.ofSystemLoader().describe("io.keploy.advice.ksql.HealthCheckInterceptor").resolve(), ClassFileLocator.ForClassLoader.ofSystemLoader()));
- }
- logger.debug("skip mocking sql");
- return builder;
- }))
- .type(named(proxyDB))
- .transform(((builder, typeDescription, classLoader, module, protectionDomain) -> {
-
- logger.debug("Inside DatabaseMetaData transformer");
-
- if (System.getenv("SKIP_MOCK_SQL") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_SQL")) )
- {
- logger.debug("mocking sql DatabaseMetaData");
- return builder.constructor(takesArgument(0, DatabaseMetaData.class))
- .intercept(Advice.to(TypePool.Default.ofSystemLoader().describe("io.keploy.advice.ksql.DataBaseMetaData_Advice").resolve(), ClassFileLocator.ForClassLoader.ofSystemLoader()));
- }
- logger.debug("skip mocking sql");
- return builder;
- }))
-
- /*
- Intercepting getResource method of JedisPool. getResource is a method where the redis client(Jedis)
- returns a Jedis object and also checks the connection with the server. As connection should not be
- established when Keploy is in TEST_MODE this method should be intercepted and return a Jedis object
- without checking connection.
- */
- .type(named(redisJedisPool))
- .transform(((builder, typeDescription, classLoader, module, protectionDomain) -> {
- if (System.getenv("SKIP_MOCK_REDIS") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_REDIS")) )
- {
- logger.debug("mocking redis");
- return builder.method(named("getResource"))
- .intercept(Advice.to(TypePool.Default.ofSystemLoader().describe("io.keploy.advice.redis.jedis.JedisPoolResource_Advice").resolve(), ClassFileLocator.ForClassLoader.ofSystemLoader()));
- }
- logger.debug("skip mocking redis");
- return builder;
- }))
-
- /*
- The whole logic and connection with Redis Server boils down to one Class that is Connection. But
- Connection is not directly used rather used as a super class for a Class BinaryClient. This
- interceptor wraps the super class of BinaryClient i.e. Connection . As a final result BinaryClient
- will be extended to a wrapped class of Connection.
- */
- .type(named(redisJedisBinary))
- .transform(((builder, typeDescription, classLoader, module, protectionDomain) -> {
- if (System.getenv("SKIP_MOCK_REDIS") == null || !Boolean.parseBoolean(System.getenv("SKIP_MOCK_REDIS")) )
- {
- logger.debug("mocking redis");
- return getBuilderForClassWrapper(builder, "redis/clients/jedis/Connection", "io/keploy/redis/jedis/KConnection");
- }
- logger.debug("skip mocking redis");
- return builder;
- }))
-
- // Interceptor for apache async-client
- // .type(named(asyncApacheClient))
- // .transform(new AgentBuilder.Transformer() {
- // @Override
- // public DynamicType.Builder> transform(DynamicType.Builder> builder, TypeDescription typeDescription, ClassLoader classLoader, JavaModule javaModule, ProtectionDomain protectionDomain) {
- // logger.debug("inside Async-ApacheInterceptor");
- // System.out.println("inside Async-ApacheInterceptor");
- //
- // String context = "org.apache.http.protocol.HttpContext";
- // String host = "org.apache.http.HttpHost";
- // String response = "org.apache.http.HttpResponse";
- //
- // String asyncApacheInterceptor = "io.keploy.httpClients.AsyncApacheInterceptor";
- //
- // ElementMatcher.Junction futureResponse = returnsGeneric(type -> type.asErasure().represents(Future.class)
- // && type.getSort().isParameterized()
- // && type.getTypeArguments().get(0).represents(HttpResponse.class));
- //
- //
- // ElementMatcher.Junction args = takesArgument(0, named(host)).and(takesArgument(1, isSubTypeOf(HttpRequest.class))).and(takesArgument(2, named(context))).and(takesGenericArgument(3, type ->
- // type.asErasure().represents(FutureCallback.class)
- // && type.getSort().isParameterized()
- // && type.getTypeArguments().get(0).represents(HttpResponse.class)
- // ));
- //
- // return builder.method(named("execute").and(args)
- // .and(futureResponse))
- // .intercept(MethodDelegation.to(TypePool.Default.ofSystemLoader().describe(asyncApacheInterceptor).resolve()));
- // }
- // })
-
- // Interceptor for elastic search -> apache InternalHttpAsyncClient
- // .type(named(internalhttpasyncClient))
- // .transform((builder, typeDescription, classLoader, javaModule, protectionDomain) -> {
- // System.out.println("Inside InternalHttpAsyncClient transformer");
- // logger.debug("inside InternalHttpAsyncClient transformer");
- //
- // System.out.println("Methods inside internalhttpAsync: " + typeDescription.getDeclaredMethods());
- //
- // String requestProducer = "org.apache.http.nio.protocol.HttpAsyncRequestProducer";
- // String context = "org.apache.http.protocol.HttpContext";
- // String internalAsyncInterceptor = "io.keploy.httpClients.ElasticSearchInterceptor";
- //
- //
- // ElementMatcher.Junction args = takesArgument(0, named(requestProducer))
- // .and(takesGenericArgument(1, type -> type.asErasure().represents(HttpAsyncResponseConsumer.class)))
- // .and(takesArgument(2, named(context)))
- // .and(takesGenericArgument(3, type -> type.asErasure().represents(FutureCallback.class)));
- //
- // return builder.method(named("execute").and(returnsGeneric(type -> type.asErasure().represents(Future.class))).and(args))
- // .intercept(MethodDelegation.to(TypePool.Default.ofSystemLoader().describe(internalAsyncInterceptor).resolve()));
- //
- // })
-
- .installOn(instrumentation);
- }
-
- // TODO Add Java Doc
- // private static boolean isJUnitTest() {
- // for (StackTraceElement element : Thread.currentThread().getStackTrace()) {
- // if (element.getClassName().startsWith("org.junit.")) {
- // return true;
- // }
- // }
- // return false;
- // }
-
- // TODO Add Java Doc
- // protected static void setEnv(Map newenv) throws Exception {
- // try {
- // Class> processEnvironmentClass = Class.forName("java.lang.ProcessEnvironment");
- // Field theEnvironmentField = processEnvironmentClass.getDeclaredField("theEnvironment");
- // theEnvironmentField.setAccessible(true);
- // Map env = (Map) theEnvironmentField.get(null);
- // env.putAll(newenv);
- // Field theCaseInsensitiveEnvironmentField = processEnvironmentClass.getDeclaredField("theCaseInsensitiveEnvironment");
- // theCaseInsensitiveEnvironmentField.setAccessible(true);
- // Map cienv = (Map) theCaseInsensitiveEnvironmentField.get(null);
- // cienv.putAll(newenv);
- // } catch (NoSuchFieldException e) {
- // Class[] classes = Collections.class.getDeclaredClasses();
- // Map env = System.getenv();
- // for (Class cl : classes) {
- // if ("java.util.Collections$UnmodifiableMap".equals(cl.getName())) {
- // Field field = cl.getDeclaredField("m");
- // field.setAccessible(true);
- // Object obj = field.get(env);
- // Map map = (Map) obj;
- // map.clear();
- // map.putAll(newenv);
- // }
- // }
- // }
- // }
-
- // A class will be replaced by another class in run time using this builder
- private static Builder getBuilderForClassWrapper(Builder builder, String host, String guest) {
- return builder.visit(
- new AsmVisitorWrapper() {
- @Override
- public int mergeWriter(int arg0) {
- return arg0;
- }
-
- @Override
- public int mergeReader(int arg0) {
- return arg0;
- }
-
- @Override
- public ClassVisitor wrap(TypeDescription instrumentedType,
- ClassVisitor classVisitor,
- net.bytebuddy.implementation.Implementation.Context implementationContext,
- TypePool typePool,
- FieldList fields,
- MethodList> methods,
- int writerFlags,
- int readerFlags) {
- return new ClassVisitor(OpenedClassReader.ASM_API, classVisitor) {
- private boolean wasMarked = false;
-
- @Override
- public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
- if (host.equals(superName)) {
- superName = guest;
- if (signature != null) {
- SignatureWriter sw = new SignatureWriter() {
- private boolean superclass = false;
-
- @Override
- public void visitFormalTypeParameter(String name) {
- superclass = false;
- super.visitFormalTypeParameter(name);
- }
-
- @Override
- public SignatureVisitor visitSuperclass() {
- superclass = true;
- return super.visitSuperclass();
- }
-
- @Override
- public void visitEnd() {
- superclass = false;
- super.visitEnd();
- }
-
- @Override
- public SignatureVisitor visitInterface() {
- superclass = false;
- return super.visitInterface();
- }
-
- @Override
- public void visitClassType(String name) {
- if (superclass && host.equals(name)) {
- name = guest;
- }
- super.visitClassType(name);
- }
- };
- new SignatureReader(signature).accept(sw);
- signature = sw.toString();
- }
- wasMarked = true;
- }
- super.visit(version, access, name, signature, superName, interfaces);
- }
-
- @Override
- public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) {
- if (wasMarked && "".equals(name)) {
- return new MethodVisitor(OpenedClassReader.ASM_API, super.visitMethod(access, name, descriptor, signature, exceptions)) {
- @Override
- public void visitMethodInsn(int opcode, String owner, String name, String descriptor, boolean isInterface) {
- if (host.equals(owner)) {
- owner = guest;
- }
- super.visitMethodInsn(opcode, owner, name, descriptor, isInterface);
- }
- };
- }
- return super.visitMethod(access, name, descriptor, signature, exceptions);
- }
- };
- }
- }
- );
- }
-
-}
\ No newline at end of file
diff --git a/agent/src/main/resources/log4j2.properties b/agent/src/main/resources/log4j2.properties
deleted file mode 100644
index 9127ebba..00000000
--- a/agent/src/main/resources/log4j2.properties
+++ /dev/null
@@ -1,9 +0,0 @@
-appenders=console
-appender.console.type=Console
-appender.console.name=STDOUT
-appender.console.layout.type=PatternLayout
-#appender.console.layout.pattern=%highlight{%d{HH:mm:ss.SSS}} [%thread] %-5level %logger{36} - %msg %n
-appender.console.layout.pattern=%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} %highlight{${LOG_LEVEL_PATTERN:-%5p}}{FATAL=red blink, ERROR=red, WARN=yellow bold, INFO=green, DEBUG=green bold, TRACE=blue} %style{}{magenta} [%M] %style{%40C}{cyan} : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%xEx}
-rootLogger.level=info
-rootLogger.appenderRefs=stdout
-rootLogger.appenderRef.stdout.ref=STDOUT
\ No newline at end of file
diff --git a/api/README.md b/api/README.md
deleted file mode 100644
index 9c2ff6b5..00000000
--- a/api/README.md
+++ /dev/null
@@ -1 +0,0 @@
-Module for GRPC or anyother Client to send data to the keploy server.
\ No newline at end of file
diff --git a/api/pom.xml b/api/pom.xml
deleted file mode 100644
index f940d8d6..00000000
--- a/api/pom.xml
+++ /dev/null
@@ -1,55 +0,0 @@
-
-
-
- java-sdk
- io.keploy
- 1.0.0-SNAPSHOT
-
- 4.0.0
-
- api
-
- 1.8
- 1.8
-
-
-
- io.keploy
- core
- 1.0.0-SNAPSHOT
- compile
-
-
- io.keploy
- models
- 1.0.0-SNAPSHOT
- compile
-
-
- io.keploy
- common
- 1.0.0-SNAPSHOT
- compile
-
-
-
- com.google.protobuf
- protobuf-java
- 3.21.7
-
-
-
- io.grpc
- grpc-stub
- 1.49.2
-
-
-
- io.grpc
- grpc-core
- 1.49.2
-
-
-
\ No newline at end of file
diff --git a/api/src/main/java/io/keploy/service/GrpcService.java b/api/src/main/java/io/keploy/service/GrpcService.java
deleted file mode 100644
index 176c81aa..00000000
--- a/api/src/main/java/io/keploy/service/GrpcService.java
+++ /dev/null
@@ -1,968 +0,0 @@
-package io.keploy.service;
-
-import com.google.protobuf.ProtocolStringList;
-import io.grpc.ManagedChannel;
-import io.grpc.ManagedChannelBuilder;
-import io.keploy.grpc.stubs.RegressionServiceGrpc;
-import io.keploy.grpc.stubs.Service;
-import io.keploy.regression.KeployInstance;
-import io.keploy.regression.Mode;
-import io.keploy.regression.context.Context;
-import io.keploy.regression.context.Kcontext;
-import io.keploy.regression.keploy.Filter;
-import io.keploy.regression.keploy.Keploy;
-import io.keploy.utils.AssertKTests;
-import io.keploy.utils.MultipartContent;
-import io.keploy.utils.Utility;
-import me.tongfei.progressbar.ProgressBar;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import javax.servlet.http.HttpServletRequest;
-import java.io.*;
-import java.net.HttpURLConnection;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.nio.charset.StandardCharsets;
-import java.time.Duration;
-import java.time.Instant;
-import java.util.*;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.regex.Pattern;
-
-import static io.keploy.regression.Mock.Kind.HTTP_EXPORT;
-import static io.keploy.utils.Utility.createFolder;
-
-/**
- * This is a service class. This is the class where Java-sdk communicates with Keploy server to
- * record/store tests and to perform testing.
- */
-public class GrpcService {
-
- private static final Logger logger = LogManager.getLogger(GrpcService.class);
-
- private static final String CROSS = new String(Character.toChars(0x274C));
- public static RegressionServiceGrpc.RegressionServiceBlockingStub blockingStub = null;
- private static Keploy k = null;
- public static ManagedChannel channel;
-
- private static final String SET_PLAIN_TEXT = "\033[0;0m";
-
- private static final String SET_BOLD_TEXT = "\033[0;1m";
- public static String AppPath = "";
-
- /**
- * Initialising GRPC server ang Keploy instance
- */
- public GrpcService() {
- // Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid
- // needing certificates.
- k = KeployInstance.getInstance().getKeploy();
- channel = ManagedChannelBuilder.forTarget(getTarget())
- .usePlaintext()
- .build();
- blockingStub = RegressionServiceGrpc.newBlockingStub(channel);
- AppPath = System.getProperty("user.dir");
- }
-
- /**
- * Get the url to connect to the server
- *
- * @return String which contains host and port of the server
- */
- private String getTarget() {
- String target;
- URL url;
- try {
- url = new URL(k.getCfg().getServer().getURL());
- } catch (MalformedURLException e) {
- logger.error(CROSS + " unable to make GrpcConnection", e);
- return "localhost:6789";
- }
-
- return url.getAuthority();
- }
-
- /**
- * Modifies all the test cases and mocks that are present into the format which GRPC accepts and send request to GRPC
- * to save tests and mocks that are recorded
- *
- * @param reqBody - http request body recorded from the filter
- * @param params - http query params recorded from the filter
- * @param httpResp - http response body recorded from the filter
- * @param protocolType - http protocolType recorded from the filter
- * @param formData - http form data
- */
- public static void CaptureTestCases(String reqBody, Map params, Service.HttpResp httpResp, String protocolType, Map> formData) {
- logger.debug("inside CaptureTestCases");
-
- Kcontext kctx = Context.getCtx();
- HttpServletRequest ctxReq = kctx.getRequest();
- if (ctxReq == null) {
- logger.error(CROSS + " failed to get keploy context");
- return;
- }
-
- Service.TestCaseReq.Builder testCaseReqBuilder = Service.TestCaseReq.newBuilder();
-
- Service.HttpReq.Builder httpReqBuilder = Service.HttpReq.newBuilder();
- String url = ctxReq.getQueryString() == null ? ctxReq.getRequestURI() :
- ctxReq.getRequestURI() + "?" + ctxReq.getQueryString();
-
- httpReqBuilder.setMethod(ctxReq.getMethod()).setURL(url);
- httpReqBuilder.putAllURLParams(params);
- Map headerMap = getRequestHeaderMap(ctxReq);
- httpReqBuilder.putAllHeader(headerMap);
- httpReqBuilder.setBody(reqBody);
- httpReqBuilder.setProtoMajor(Character.getNumericValue(protocolType.charAt(protocolType.length() - 3)));
- httpReqBuilder.setProtoMinor(Character.getNumericValue(protocolType.charAt(protocolType.length() - 1)));
-
- testCaseReqBuilder.setAppID(k.getCfg().getApp().getName()).setAppPath(AppPath);
- testCaseReqBuilder.setCaptured(Instant.now().getEpochSecond());
-
- /*
- * The order of path parameters, we are getting from request is not proper.
- * Storing in different order will not block the existing functionality.
- * It's only for grouping the testcases.
- * Below code gives unordered mapping of path variables or path parameters
- * Map pathVariables = ((Map) request.getAttribute(HandlerMapping.URI_TEMPLATE_VARIABLES_ATTRIBUTE));
- * Hence we are storing the actual Uri not according to the routing pattern.
- * */
-
- testCaseReqBuilder.setURI(ctxReq.getRequestURI());
- testCaseReqBuilder.setHttpResp(httpResp);
- testCaseReqBuilder.setTestCasePath(k.getCfg().getApp().getTestPath());
- testCaseReqBuilder.setMockPath(k.getCfg().getApp().getMockPath());
- testCaseReqBuilder.addAllMocks(kctx.getMock());
-
- Capture(testCaseReqBuilder, formData, httpReqBuilder);
- }
-
- /**
- * This method runs in threads asynchronously and sends request to Server to capture tests and mocks
- *
- * @param testCaseReqBuilder - test case object builder
- * @param formData - form data from http request
- * @param httpReqBuilder - http request builder
- */
- public static void Capture(Service.TestCaseReq.Builder testCaseReqBuilder, Map> formData, Service.HttpReq.Builder httpReqBuilder) {
- new Thread(() -> {
- try {
-
- // for multipart-request
- List form = saveFiles(formData);
- Service.HttpReq httpReq = httpReqBuilder.addAllForm(form).build();
- Service.TestCaseReq testCaseReq = testCaseReqBuilder.setHttpReq(httpReq).setType(HTTP_EXPORT.value).build();
-
- put(testCaseReq);
- } catch (Exception e) {
- logger.error(CROSS + " failed to send test case to backend", e);
- }
- }).start();
- }
-
- /**
- * This method sends the testcases to the server
- *
- * @param testCaseReq - test case object
- */
- public static void put(Service.TestCaseReq testCaseReq) {
- Service.postTCResponse postTCResponse;
- try {
- // if no filter is added or the test request should be excluded then return
- if (k.getCfg().getApp().getFilter() != null && !isValidTestCaseToBeRecorded(testCaseReq)) return;
- logger.debug("record test case");
- postTCResponse = blockingStub.postTC(testCaseReq);
- } catch (Exception e) {
- logger.error(CROSS + " failed to send testcase to backend, please ensure keploy server is up!", e);
- logger.error(CROSS + " please check keploy server logs if server is up");
- return;
- }
- Map tcsId = postTCResponse.getTcsIdMap();
- String id = tcsId.get("id");
- if (id == null) return;
-
- boolean noise = k.getCfg().getServer().getDenoise();
- if (noise) {
- denoise(id, testCaseReq);
- }
- }
-
- /**
- * Is valid test case to be recorded boolean.
- * This method is used to check if the test case should be recorded or discarded.
- *
- * @param testCaseReq the test case req object which contains all the information about the test case
- * @return the boolean value which indicates if the test case should be recorded or discarded
- */
- public static boolean isValidTestCaseToBeRecorded(Service.TestCaseReq testCaseReq) {
- // acc rej record/discard
- // true true => discard
- // true false => record
- // false true => discard
- // false false => discard
- return doesFollowAcceptanceRegex(testCaseReq) && !doesFollowRejectionRegex(testCaseReq);
-
- }
-
- /**
- * Does follow acceptance regex boolean.
- * This method is used to check if the test case url and header matches the acceptance regex.
- *
- * @param testCaseReq the test case req object which contains all the information about the test case
- * @return the boolean value which indicates if the test case url and header matches the acceptance regex
- */
- public static boolean doesFollowAcceptanceRegex(Service.TestCaseReq testCaseReq) {
- Filter filter = k.getCfg().getApp().getFilter();
-
- //if user doesn't provide any accept regex or provide both empty, then it's valid to be recorded.
- if ((filter.getAcceptHeaderRegex() == null || filter.getAcceptHeaderRegex().length == 0)
- && (filter.getAcceptUrlRegex() == null || filter.getAcceptUrlRegex().length == 0)) {
- return true;
- }
-
- boolean isIncludedUrl = false;
- boolean isIncludedHeader = false;
-
- // get test case url regex
- if (filter.getAcceptUrlRegex() != null && filter.getAcceptUrlRegex().length > 0) {
- // check if test case url match the regex
- String testCaseUrl = testCaseReq.getHttpReq().getURL();
- for (String value : filter.getAcceptUrlRegex()) {
- Pattern pattern = Pattern.compile(value);
- logger.debug("accept url regex: " + value);
- logger.debug("test case url: " + testCaseUrl);
- if (pattern.matcher(testCaseUrl).find()) {
- isIncludedUrl = true;
- break;
- }
- }
- }
-
- // get test case header regex
- if (filter.getAcceptHeaderRegex() != null && filter.getAcceptHeaderRegex().length > 0) {
- // check if test case header match the regex
- Map headerMap = testCaseReq.getHttpReq().getHeaderMap();
- for (Map.Entry entry : headerMap.entrySet()) {
- String key = entry.getKey();
- Service.StrArr value = entry.getValue();
- String header = key + ": " + value.getValueList().get(0);
- for (String regex : filter.getAcceptHeaderRegex()) {
- Pattern pattern = Pattern.compile(regex);
- logger.debug("accept header regex: " + regex);
- logger.debug("test case header: " + header);
- if (pattern.matcher(header).find()) {
- isIncludedHeader = true;
- break;
- }
- if (isIncludedHeader) break;
- }
- }
- }
-
- logger.debug("isIncludedUrl: " + isIncludedUrl);
- logger.debug("isIncludedHeader: " + isIncludedHeader);
- return isIncludedUrl || isIncludedHeader;
- }
-
- /**
- * Does follow rejection regex boolean.
- * This method is used to check if the test case url and header matches the rejection regex.
- *
- * @param testCaseReq the test case req object which contains all the information about the test case.
- * @return the boolean value which indicates if the test case url and header matches the rejection regex
- */
- public static boolean doesFollowRejectionRegex(Service.TestCaseReq testCaseReq) {
- Filter filter = k.getCfg().getApp().getFilter();
-
-
- //if user doesn't provide any reject regex or provide both empty, then it's valid to be recorded and do not reject it.
- if ((filter.getAcceptHeaderRegex() == null || filter.getAcceptHeaderRegex().length == 0)
- && (filter.getAcceptUrlRegex() == null || filter.getAcceptUrlRegex().length == 0)) {
- return false;
- }
-
- boolean isExcludedHeader = false;
- boolean isExcludedUrl = false;
-
- // get test case url regex
- if (filter.getRejectUrlRegex() != null && filter.getRejectUrlRegex().length > 0) {
- // check if test case url match the regex
- String testCaseUrl = testCaseReq.getHttpReq().getURL();
- for (String value : filter.getRejectUrlRegex()) {
- Pattern pattern = Pattern.compile(value);
- logger.debug("reject url regex: " + value);
- logger.debug("test case url: " + testCaseUrl);
- if (pattern.matcher(testCaseUrl).find()) {
- isExcludedUrl = true;
- break;
- }
- }
- }
-
- // get test case header regex
- if (filter.getRejectHeaderRegex() != null && filter.getRejectHeaderRegex().length > 0) {
- // check if test case header match the regex
- Map headerMap = testCaseReq.getHttpReq().getHeaderMap();
- for (Map.Entry entry : headerMap.entrySet()) {
- String key = entry.getKey();
- Service.StrArr value = entry.getValue();
- String header = key + ": " + value.getValueList().get(0);
- for (String regex : filter.getRejectHeaderRegex()) {
- Pattern pattern = Pattern.compile(regex);
- logger.debug("reject header regex: " + regex);
- logger.debug("test case header: " + header);
- if (pattern.matcher(header).find()) {
- isExcludedHeader = true;
- break;
- }
- }
-
- if (isExcludedHeader) break;
- }
- }
- logger.debug("isExcludedUrl: " + isExcludedUrl);
- logger.debug("isExcludedHeader: " + isExcludedHeader);
- return isExcludedHeader || isExcludedUrl;
- }
-
- /**
- * Denoising while recording test cases
- *
- * @param id - test case id
- * @param testCaseReq - test case object
- */
- public static void denoise(String id, Service.TestCaseReq testCaseReq) {
- // run the request again to find noisy fields
- try {
- TimeUnit.SECONDS.sleep(3);
- } catch (InterruptedException e) {
- logger.error(CROSS + " (denoise): unable to sleep", e);
- }
-
- Service.TestCase.Builder testCaseBuilder = Service.TestCase.newBuilder();
- testCaseBuilder.setId(id);
- testCaseBuilder.setCaptured(testCaseReq.getCaptured());
- testCaseBuilder.setURI(testCaseReq.getURI());
- testCaseBuilder.setHttpReq(testCaseReq.getHttpReq());
- testCaseBuilder.addAllMocks(testCaseReq.getMocksList());
- Service.TestCase testCase = testCaseBuilder.build();
-
- Service.HttpResp resp2 = simulate(testCase);
-
- logger.debug("response got from simulate request: {}", resp2);
-
- Service.TestReq.Builder testReqBuilder = Service.TestReq.newBuilder();
- testReqBuilder.setID(id);
- testReqBuilder.setResp(resp2);
- testReqBuilder.setAppID(k.getCfg().getApp().getName());
- testReqBuilder.setTestCasePath(k.getCfg().getApp().getTestPath());
- testReqBuilder.setMockPath(k.getCfg().getApp().getMockPath());
- testReqBuilder.setType(HTTP_EXPORT.value);
- Service.TestReq bin2 = testReqBuilder.build();
-
- // send de-noise request to server
- try {
- Service.deNoiseResponse deNoiseResponse = blockingStub.deNoise(bin2);
- logger.debug("denoise message received from server: {}", deNoiseResponse.getMessage());
- } catch (Exception e) {
- logger.error(CROSS + " failed to send de-noise request to backend, please check keploy server logs", e);
- }
-
- }
-
- /**
- * This method runs each test on client application in test mode by setting mocks and data in context.
- *
- * @param testCase - test case object
- * @return - response for the test
- */
- public static Service.HttpResp simulate(Service.TestCase testCase) {
- logger.debug("inside simulate");
-
- //add mocks to shared context
- k.getMocks().put(testCase.getId(), new ArrayList<>(testCase.getMocksList()));
- k.getMocktime().put(testCase.getId(), testCase.getCaptured());
-
- //add dependency to shared context
- k.getDeps().put(testCase.getId(), new ArrayList<>(testCase.getDepsList()));
-
- // execute request on client application
- executeSimulateRequest(testCase);
-
- Service.HttpResp.Builder resp = GetResp(testCase.getId());
-
- k.getDeps().remove(testCase.getId());
- k.getMocks().remove(testCase.getId());
- k.getMocktime().remove(testCase.getId());
-
- return resp.build();
- }
-
- /**
- * Executes http request on client application in test mode
- *
- * @param testCase - test case object
- */
- private static void executeSimulateRequest(Service.TestCase testCase) {
- String url = testCase.getHttpReq().getURL();
- String host = k.getCfg().getApp().getHost();
- String port = k.getCfg().getApp().getPort();
- String method = testCase.getHttpReq().getMethod();
- String body = testCase.getHttpReq().getBody();
- String targetUrl = "http://" + host + ":" + port + url;
- String testId = testCase.getId();
- Map headerMap = testCase.getHttpReq().getHeaderMap();
-
- logger.debug("simulate request's url: {}", targetUrl);
- logger.debug("simulate request's method: {}", method);
- logger.debug("simulate request's headers: {}", headerMap);
-
- String contentType = headerMap.containsKey("content-type") ? headerMap.get("content-type").getValue(0) : "application/json; charset=utf-8";
-
-
- try {
- URL obj = new URL(targetUrl);
- HttpURLConnection conn = (HttpURLConnection) obj.openConnection();
-// conn.setReadTimeout(60000);
-// conn.setConnectTimeout(60000);
- conn.setRequestMethod(method);
- conn.setInstanceFollowRedirects(false);
-
- setCustomRequestHeaderMap(conn, headerMap);
- conn.setRequestProperty("KEPLOY_TEST_ID", testId);
-
- if (contentType.contains("multipart")) {
- HttpPostMultipart multipart = new HttpPostMultipart("utf-8", conn);
-
- List formList = testCase.getHttpReq().getFormList();
- for (Service.FormData part : formList) {
- List vals = new ArrayList<>(part.getValuesList());
- List paths = new ArrayList<>(part.getPathsList());
-
- if (!paths.isEmpty()) {
- for (String path : paths) {
- File file = new File(path);
- multipart.addFilePart(part.getKey(), file);
- }
- } else if (!vals.isEmpty()) {
- for (String val : vals) {
- multipart.addFormField(part.getKey(), val);
- }
- }
- }
- //execute multipart request
- multipart.finish();
- conn.disconnect();
- return;
- }
-
- if ((method.equals("GET") || method.equals("DELETE")) && !body.isEmpty()) {
- logger.warn("keploy doesn't support {} request with body", method);
- }
-
-
- //POST, PUT, PATCH <- requests containing body
- if (method.equals("POST") || method.equals("PUT") || method.equals("PATCH")) {
- conn.setDoOutput(true);
- OutputStream os = conn.getOutputStream();
- os.write(body.getBytes());
- os.flush();
- os.close();
- logger.debug("simulate request body set");
- }
-
- final int responseCode = conn.getResponseCode();
- logger.debug("status code got from simulate request: {}", responseCode);
-
- final Map> responseHeaders = conn.getHeaderFields();
- logger.debug("response headers got from simulate request: {}", responseHeaders);
-
- if (isSuccessfulResponse(conn)) {
- String resBody = getSimulateResponseBody(conn);
- logger.debug("response body got from simulate request: {}", resBody);
- }
-
- conn.disconnect();
- } catch (IOException e) {
- logger.error(CROSS + " failed sending testcase request to app", e);
- }
- }
-
- public static boolean isSuccessfulResponse(HttpURLConnection connection) {
- try {
- int responseCode = connection.getResponseCode();
- return responseCode >= 200 && responseCode < 300;
- } catch (IOException e) {
- return false;
- }
- }
-
- public static String getSimulateResponseBody(HttpURLConnection httpConn) throws IOException {
- String responseBody;
- BufferedReader in = new BufferedReader(new InputStreamReader(httpConn.getInputStream()));
- String inputLine;
- StringBuilder response = new StringBuilder();
-
- while ((inputLine = in.readLine()) != null) {
- response.append(inputLine);
- }
- in.close();
-
- responseBody = response.toString();
- return responseBody;
- }
-
- public static Service.HttpResp.Builder GetResp(String id) {
-
- logger.debug("inside GetResp");
- Service.HttpResp httpResp = k.getResp().get(id);
- if (httpResp == null) {
- logger.debug("response is not present in keploy resp map");
- return Service.HttpResp.newBuilder();
- }
-
- Service.HttpResp.Builder respBuilder = Service.HttpResp.newBuilder();
-
- try {
- respBuilder.setBody(httpResp.getBody())
- .setStatusCode(httpResp.getStatusCode())
- .setStatusMessage(httpResp.getStatusMessage())
- .setProtoMajor(httpResp.getProtoMajor())
- .setProtoMinor(httpResp.getProtoMinor())
- .putAllHeader(httpResp.getHeaderMap());
- } catch (Exception e) {
- logger.error(CROSS + " failed getting response for http request", e);
- return Service.HttpResp.newBuilder();
- }
-
- logger.debug("response from keploy resp map");
- return respBuilder;
- }
-
- /**
- * Starts testing of the recorded test cases
- */
- public static void Test() {
- String delay = "5";
- try {
- delay = System.getenv("DELAY");
- if (delay != null) {
- k.getCfg().getApp().setDelay(Duration.ofSeconds(Long.parseLong(delay)));
- } else {
- delay = "5";
- }
- TimeUnit.SECONDS.sleep(k.getCfg().getApp().getDelay().getSeconds());
- } catch (InterruptedException e) {
- logger.error(CROSS + " (Test): unable to sleep", e);
- }
- logger.debug("entering test mode");
- logger.info("test starting in " + delay + " sec");
-
- List tcs = fetch();
-
- final String RED_CIRCLE = "\uD83D\uDD34";
-
- if (Mode.getMode().equals(Mode.ModeType.MODE_RECORD) && tcs == null) {
- logger.info("No existing tests found at {} directory", k.getCfg().getApp().getTestPath());
- System.out.println("--------------------------------------------------------------------------------------------\n");
- String endTest = RED_CIRCLE + " You can record your new test cases now.";
- System.out.println(bold(endTest));
- System.out.println("\n--------------------------------------------------------------------------------------------");
- return;
- }
-
- int total = tcs.size();
- String id;
- try {
- id = start(String.valueOf(total));
- } catch (Exception e) {
- logger.error(CROSS + " failed to start test run", e);
- return;
- }
- logger.info("starting test execution id: {} total tests: {}", id, total);
-
- AtomicBoolean ok = new AtomicBoolean(true);
- AtomicInteger failedtestCount = new AtomicInteger(0);
- CountDownLatch wg = new CountDownLatch(tcs.size());
-
- String async_test = System.getenv("ASYNC_TESTING");
- int nThreads = (Boolean.parseBoolean(async_test)) ? 10 : 1;
-
- ExecutorService service = Executors.newFixedThreadPool(nThreads);
- // call the service for each test case
-
- String runTestBeforeRecord = System.getenv("RUN_TEST_BEFORE_RECORD");
- boolean runExistingTests = true;
- if (runTestBeforeRecord != null) {
- runExistingTests = Boolean.parseBoolean(runTestBeforeRecord);
- }
-
- //running tests in record mode in order to maintain the same state of database.
- if (Mode.getMode().equals(Mode.ModeType.MODE_RECORD) && runExistingTests) {
- try (ProgressBar pb = new ProgressBar("KEPLOY-TESTS", total)) {
- runTests(service, pb, ok, wg, total, tcs, id, failedtestCount);
- pb.setExtraMessage("Tests Completed");
- }
- } else if (Mode.getMode().equals(Mode.ModeType.MODE_TEST)) {
- runTests(service, null, ok, wg, total, tcs, id, failedtestCount);
- }
-
- // wait for all tests to get completed.
- try {
- wg.await();
- } catch (InterruptedException e) {
- logger.error(CROSS + " (Test): unable to wait for tests to get completed", e);
- AssertKTests.finalTestResult.set(false);
- }
-
- Boolean finalResult = ok.get();
- AssertKTests.finalTestResult.set(finalResult);
- end(id, finalResult);
-
- logger.info("test run completed with run id [{}]", id);
- logger.info("|| passed overall: {} ||", String.valueOf(finalResult).toUpperCase());
-
- if (Mode.getMode().equals(Mode.ModeType.MODE_RECORD) && runExistingTests) {
- if (!finalResult) {
- final String test = (failedtestCount.get() > 1) ? "tests" : "test";
- String WARN = "\u26A0\uFE0F";
- String inconsistentState = WARN + " " + bold(failedtestCount.get() + " " + test + " failed, Please make sure your database state is consistent.");
- System.out.println(inconsistentState);
- }
- System.out.println("--------------------------------------------------------------------------------------------\n");
- String endTest = RED_CIRCLE + " Tests have been completed, You can record your new test cases now.";
- System.out.println(bold(endTest));
- System.out.println("\n--------------------------------------------------------------------------------------------");
- }
- }
-
- /**
- * Runs all tests and shows the progress at last
- */
- private static void runTests(ExecutorService service, ProgressBar pb, AtomicBoolean ok, CountDownLatch wg, int total, List tcs, String id, AtomicInteger failedtestCount) {
- for (int i = 0; i < tcs.size(); i++) {
- Service.TestCase tc = tcs.get(i);
- logger.info("testing {} of {} testcase id: [{}]", (i + 1), total, tc.getId());
- service.submit(() -> {
- boolean pass = check(id, tc);
- if (!pass) {
- failedtestCount.getAndIncrement();
- ok.set(false);
- }
-
- logger.info("result : testcase id: [{}] passed: {}", tc.getId(), pass);
- wg.countDown();
- });
- if (Mode.getMode().equals(Mode.ModeType.MODE_RECORD)) {
- pb.step(); // for progress bar
- }
- }
- }
-
- private static String bold(String str) {
- return (SET_BOLD_TEXT + str + SET_PLAIN_TEXT);
- }
-
- /**
- * Provides the test run id
- *
- * @param total - total no of test cases
- * @return - test run id
- */
- public static String start(String total) {
- logger.debug("inside start function");
- Service.startRequest startRequest = Service.startRequest.newBuilder()
- .setApp(k.getCfg().getApp().getName())
- .setTestCasePath(k.getCfg().getApp().getTestPath())
- .setMockPath(k.getCfg().getApp().getMockPath())
- .setAppPath(AppPath)
- .setTotal(total).build();
-
- Service.startResponse startResponse = null;
-
- try {
- startResponse = blockingStub.start(startRequest);
- } catch (Exception e) {
- logger.error(CROSS + " failed to start test run, please check keploy server logs", e);
- AssertKTests.finalTestResult.set(false);
- System.exit(1);
- }
-
- return (startResponse != null) ? startResponse.getId() : "";
- }
-
- /**
- * Send request to server that test run is done. So that post-processing will be done
- *
- * @param id - test run id
- * @param status - status of the test run
- */
- public static void end(String id, boolean status) {
- logger.debug("inside end function");
- Service.endRequest endRequest = Service.endRequest.newBuilder().setId(id).setStatus(String.valueOf(status)).build();
- Service.endResponse endResponse;
- try {
- endResponse = blockingStub.end(endRequest);
- logger.debug("response after ending test run: {}", endResponse);
- } catch (Exception e) {
- logger.error(CROSS + " failed to complete test runs, please check keploy server logs", e);
- AssertKTests.finalTestResult.set(false);
- System.exit(1);
- }
- }
-
- /**
- * Fetch all the test cases and mocks that are recorded
- *
- * @return - list if testcase objects
- */
- public static List fetch() {
- logger.debug("inside fetch function");
-
- List testCases = new ArrayList<>();
- int i = 0;
- while (true) {
- Service.getTCSRequest tcsRequest = Service.getTCSRequest.newBuilder()
- .setApp(k.getCfg().getApp().getName())
- .setLimit("25")
- .setOffset(String.valueOf(i))
- .setTestCasePath(k.getCfg().getApp().getTestPath())
- .setMockPath(k.getCfg().getApp().getMockPath())
- .build();
-
- Service.getTCSResponse tcs = null;
-
- try {
- tcs = blockingStub.getTCS(tcsRequest);
- } catch (Exception e) {
- if (e.getMessage().contains("no such file or directory")) {
- if (Mode.getMode().equals(Mode.ModeType.MODE_RECORD)) {
- return null;
- } else if (Mode.getMode().equals(Mode.ModeType.MODE_TEST)) {
- logger.info("No existing tests found at {} directory", k.getCfg().getApp().getTestPath());
- }
- } else {
- logger.error(CROSS + " failed to fetch testcases from keploy cloud, please ensure keploy server is up!", e);
- }
- AssertKTests.finalTestResult.set(false);
- System.exit(1);
- }
-
- if (tcs == null) {
- break;
- }
-
- int cnt = tcs.getTcsCount();
- if (cnt == 0) {
- break;
- }
- List tc = tcs.getTcsList();
- testCases.addAll(tc);
-
- boolean eof = tcs.getEof();
- if (eof) {
- break;
- }
-
- i += 25;
- }
-
- //reverse in order to get testcases in which they were recorded.
- Collections.reverse(testCases);
- return testCases;
- }
-
- /**
- * Starts the simulate for every test case and compared with the response recorded before
- *
- * @param testrunId - test run id
- * @param tc - test case object
- * @return - Boolean whether pass or fail
- */
- public static boolean check(String testrunId, Service.TestCase tc) {
- logger.debug("running test case with [{}] testrunId", testrunId);
-
- Service.HttpResp resp;
- try {
- resp = simulate(tc);
- logger.debug("response got from simulate request: {}", resp);
- } catch (Exception e) {
- logger.error(CROSS + " failed to simulate request on local server", e);
- AssertKTests.finalTestResult.set(false);
- return false;
- }
- Service.TestReq testReq = Service.TestReq.newBuilder()
- .setID(tc.getId())
- .setAppID(k.getCfg().getApp().getName())
- .setRunID(testrunId)
- .setResp(resp)
- .setTestCasePath(k.getCfg().getApp().getTestPath())
- .setMockPath(k.getCfg().getApp().getMockPath())
- .setType(HTTP_EXPORT.value)
- .build();
-
- Service.testResponse testResponse;
- try {
- testResponse = blockingStub.test(testReq);
- } catch (Exception e) {
- logger.error(CROSS + " failed to send test request to backend, please check keploy server logs", e);
- return false;
- }
-
- if (testResponse == null) {
- return false;
- }
-
- Map res = testResponse.getPassMap();
- logger.debug("(check): test result of testrunId [{}]: {}", testrunId, res.get("pass"));
- return res.getOrDefault("pass", false);
- }
-
- private static void setCustomRequestHeaderMap(HttpURLConnection conn, Map srcMap) {
-
- Map> headerMap = new HashMap<>();
-
- for (String key : srcMap.keySet()) {
- Service.StrArr values = srcMap.get(key);
- ProtocolStringList valueList = values.getValueList();
- List headerValues = new ArrayList<>(valueList);
- headerMap.put(key, headerValues);
- }
-
- for (String key : headerMap.keySet()) {
- if (isModifiable(key)) {
- List values = headerMap.get(key);
- for (String value : values) {
- conn.addRequestProperty(key, value);
- }
- }
- }
- }
-
- private static boolean isModifiable(String key) {
- switch (key) {
- case "connection":
- return false;
- case "content-length":
- return false;
- case "date":
- return false;
- case "expect":
- return false;
- case "from":
- return false;
- case "host":
- return false;
- case "upgrade":
- return false;
- case "via":
- return false;
- case "warning":
- return false;
- }
- return true;
- }
-
- private static Map getRequestHeaderMap(HttpServletRequest httpServletRequest) {
-
- Map map = new HashMap<>();
-
- List headerNames = Collections.list(httpServletRequest.getHeaderNames());
- for (String name : headerNames) {
-
- List values = Collections.list(httpServletRequest.getHeaders(name));
- Service.StrArr.Builder builder = Service.StrArr.newBuilder();
-
- for (String s : values) {
- builder.addValue(s);
- }
- Service.StrArr value = builder.build();
-
- map.put(name, value);
- }
- return map;
- }
-
- private static List saveFiles(Map> multipartData) {
- //TODO: same file but different size, handle that case also, just override the file and give a warning for the same.
- List data = new ArrayList<>(multipartData.size());
-
- for (String partName : multipartData.keySet()) {
-
- List contents = multipartData.get(partName);
- Service.FormData.Builder formDataBuilder = Service.FormData.newBuilder().setKey(partName);
-
- List values = new ArrayList<>();
-
- boolean isFile = false;
-
- for (MultipartContent content : contents) {
- String fileName = content.getFileName();
- byte[] body = content.getBody();
- if (fileName != null) {
- isFile = true;
- String filePath = determineFilePath(fileName);
- saveFile(filePath, body);
- values.add(filePath);
- } else {
- isFile = false;
- String nonFileBody = getStringValue(body, String.valueOf(StandardCharsets.UTF_8));
- values.add(nonFileBody);
- }
- }
-
- if (isFile) {
- formDataBuilder.addAllPaths(values);
- } else {
- formDataBuilder.addAllValues(values);
- }
-
- Service.FormData formData = formDataBuilder.build();
- data.add(formData);
- }
- return data;
- }
-
- public static String saveFile(String filePath, byte[] body) {
-
- File file = new File(filePath);
- String fileName = Utility.getFileNameFromPath(filePath);
- if (file.exists()) {
- logger.warn("file {} already exist at location {}", fileName, k.getCfg().getApp().getAssetPath());
- }
-
- FileOutputStream fos;
- try {
- fos = new FileOutputStream(filePath);
- fos.write(body);
- fos.close();
- logger.debug("saved file at location {}", filePath);
- } catch (
- IOException e) {
- logger.error(CROSS + " location not found", e);
- }
- return filePath;
- }
-
- public static String determineFilePath(String fileName) {
- String folderPath = k.getCfg().getApp().getAssetPath();
- createFolder(folderPath);
- String filePath = folderPath + "/" + fileName;
- return filePath;
- }
-
- private static String getStringValue(byte[] contentAsByteArray, String characterEncoding) {
- try {
- return new String(contentAsByteArray, 0, contentAsByteArray.length, characterEncoding);
- } catch (UnsupportedEncodingException e) {
- e.printStackTrace();
- }
- return "";
- }
-}
diff --git a/api/src/main/java/io/keploy/service/HttpPostMultipart.java b/api/src/main/java/io/keploy/service/HttpPostMultipart.java
deleted file mode 100644
index ac0ff82a..00000000
--- a/api/src/main/java/io/keploy/service/HttpPostMultipart.java
+++ /dev/null
@@ -1,92 +0,0 @@
-package io.keploy.service;
-
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.io.*;
-import java.net.HttpURLConnection;
-
-import java.net.URLConnection;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-
-public class HttpPostMultipart {
- // reference: https://blog.cpming.top/p/httpurlconnection-multipart-form-data
- private static final Logger logger = LogManager.getLogger(HttpPostMultipart.class);
-
- private static final String CROSS = new String(Character.toChars(0x274C));
-
- private final String boundary;
- private static final String LINE = "\r\n";
- private final HttpURLConnection httpConn;
- private final String charset;
- private final OutputStream outputStream;
- private final PrintWriter writer;
-
- public HttpPostMultipart(String charset, HttpURLConnection httpConn) throws IOException {
- boundary = UUID.randomUUID().toString();
- this.charset = charset;
- this.httpConn = httpConn;
- this.httpConn.setUseCaches(false);
- this.httpConn.setDoOutput(true); // indicates POST method
- this.httpConn.setDoInput(true);
- this.httpConn.setRequestProperty("Content-Type", "multipart/form-data; boundary=" + boundary);
- outputStream = this.httpConn.getOutputStream();
- writer = new PrintWriter(new OutputStreamWriter(outputStream, charset), true);
- }
-
- public void addFormField(String name, String value) {
- writer.append("--" + boundary).append(LINE);
- writer.append("Content-Disposition: form-data; name=\"" + name + "\"").append(LINE);
- writer.append("Content-Type: text/plain; charset=" + charset).append(LINE);
- writer.append(LINE);
- writer.append(value).append(LINE);
- writer.flush();
- }
-
-
- public void addFilePart(String fieldName, File uploadFile)
- throws IOException {
- String fileName = uploadFile.getName();
- writer.append("--" + boundary).append(LINE);
- writer.append("Content-Disposition: form-data; name=\"" + fieldName + "\"; filename=\"" + fileName + "\"").append(LINE);
- writer.append("Content-Type: " + URLConnection.guessContentTypeFromName(fileName)).append(LINE);
- writer.append("Content-Transfer-Encoding: binary").append(LINE);
- writer.append(LINE);
- writer.flush();
-
- FileInputStream inputStream = new FileInputStream(uploadFile);
- byte[] buffer = new byte[4096];
- int bytesRead = -1;
- while ((bytesRead = inputStream.read(buffer)) != -1) {
- outputStream.write(buffer, 0, bytesRead);
- }
- outputStream.flush();
- inputStream.close();
- writer.append(LINE);
- writer.flush();
- }
-
-
- public void finish() throws IOException {
- String responseBody = "";
- writer.flush();
- writer.append("--" + boundary + "--").append(LINE);
- writer.close();
-
- // checks server's status code first
- final int status = this.httpConn.getResponseCode();
- logger.debug("status code got from simulate request: {}", status);
-
- final Map> responseHeaders = httpConn.getHeaderFields();
- logger.debug("response headers got from simulate request: {}", responseHeaders);
-
- if (GrpcService.isSuccessfulResponse(httpConn)) {
- responseBody = GrpcService.getSimulateResponseBody(httpConn);
- logger.debug("response body got from multipart simulate request: {}", responseBody);
- } else {
- throw new IOException("Server returned non-OK status: " + status);
- }
- }
-}
\ No newline at end of file
diff --git a/api/src/main/java/io/keploy/service/mock/Config.java b/api/src/main/java/io/keploy/service/mock/Config.java
deleted file mode 100644
index d9ea1332..00000000
--- a/api/src/main/java/io/keploy/service/mock/Config.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package io.keploy.service.mock;
-
-import io.keploy.regression.Mode;
-import io.keploy.regression.context.Context;
-import io.keploy.regression.context.Kcontext;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * This is a config class for mocking feature which provides all the configurations required for mocking feature.
- */
-public class Config {
-
- public static Mode.ModeType mode;
- public static String Name = "";
- public static Kcontext CTX = Context.getCtx();
- public static String Path = "";
- public static Boolean Overwrite = false;
- public static Map MockId = Collections.synchronizedMap(new HashMap<>());
- public static String MockPath = "";
-
-}
diff --git a/api/src/main/java/io/keploy/service/mock/MockLib.java b/api/src/main/java/io/keploy/service/mock/MockLib.java
deleted file mode 100644
index e1b159b2..00000000
--- a/api/src/main/java/io/keploy/service/mock/MockLib.java
+++ /dev/null
@@ -1,181 +0,0 @@
-package io.keploy.service.mock;
-
-import io.keploy.grpc.stubs.Service;
-import io.keploy.regression.KeployInstance;
-import io.keploy.regression.Mode;
-import io.keploy.regression.context.Context;
-import io.keploy.regression.context.Kcontext;
-import io.keploy.regression.keploy.AppConfig;
-import io.keploy.regression.keploy.Keploy;
-import io.keploy.regression.keploy.ServerConfig;
-import io.keploy.service.GrpcService;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.List;
-
-import static io.keploy.service.GrpcService.blockingStub;
-import static io.keploy.service.mock.Config.*;
-
-/**
- * This is a service class used by Mocking feature. This is the class where Java-sdk communicates with Keploy server to
- * record/store mocks and to perform mocking.
- */
-public class MockLib {
- private static final Logger logger = LogManager.getLogger(MockLib.class);
- static Keploy k = null;
- AppConfig appConfig = new AppConfig();
-
- /**
- * Initialising Keploy instance and GRPC server
- * @param name - App name
- */
- public MockLib(String name) {
- KeployInstance ki = KeployInstance.getInstance();
- k = ki.getKeploy();
- io.keploy.regression.keploy.Config cfg = new io.keploy.regression.keploy.Config();
- Name = name;
- appConfig.setName(Name);
- cfg.setApp(appConfig);
-
- ServerConfig serverConfig = new ServerConfig();
-
- if (System.getenv("DENOISE") != null) {
- serverConfig.setDenoise(Boolean.parseBoolean(System.getenv("DENOISE")));
- }
-
- if (System.getenv("KEPLOY_URL") != null) {
- serverConfig.setURL(System.getenv("KEPLOY_URL"));
- }
-
- cfg.setApp(appConfig);
- cfg.setServer(serverConfig);
- k.setCfg(cfg);
- new GrpcService();
-
- Kcontext ctx = NewContext();
- System.out.println(ctx);
- }
-
- /**
- * Set the context according to the Keploy mode i.e. if it is in test mode add all the mock data into the context
- * @return - Kcontext
- */
- public Kcontext NewContext() {
- mode = Mode.ModeType.MODE_TEST;
-
- String mpath = System.getenv("KEPLOY_MOCK_PATH");
- Path path = Paths.get("");
-// AppConfig appConfig = new AppConfig();
- if (mpath != null && mpath.length() > 0 && !Paths.get(mpath).isAbsolute()) {
- Path effectivePath = path.resolve(mpath).toAbsolutePath();
- String absolutePath = effectivePath.normalize().toString();
- appConfig.setMockPath(absolutePath);
- } else if (mpath == null || mpath.length() == 0) {
- String currDir = System.getProperty("user.dir") + "/src/test/e2e/mocks";
- mpath = currDir;
- appConfig.setMockPath(currDir);
- } else {
- //if user gives the path
- appConfig.setMockPath(mpath);
- }
- MockPath = appConfig.getMockPath();
- logger.debug("mock path: {}", appConfig.getMockPath());
-
- mode = System.getenv().getOrDefault("KEPLOY_MODE", "test").equals("record") ? Mode.ModeType.MODE_RECORD : Mode.ModeType.MODE_TEST;
- ArrayList mocks = new ArrayList<>();
- if (mode == Mode.ModeType.MODE_TEST) {
- if (k.getCfg().getApp().getName() == null || k.getCfg().getApp().getName().length() == 0) {
- logger.error("Please enter the auto generated name to mock the dependencies using Keploy !");
-// return;
- }
- Service.GetMockReq request = Service.GetMockReq.newBuilder().setName(k.getCfg().getApp().getName()).setPath(mpath).build();
-
- mocks = GetAllMocks(request);
- if (mocks == null) {
- logger.error("No mocks found for the given name: {}", k.getCfg().getApp().getName());
- logger.error("Failed to get the mocks from keploy server. Please ensure that keploy server is running.");
- }
- }
- Kcontext kctx = new Kcontext();
- Context.setCtx(kctx);
- kctx.setMock(mocks);
- kctx.setMode(mode);
- kctx.setTestId(appConfig.getName());
- kctx.setFileExport(true);
- String name = "";
- if (k.getCfg().getApp().getName() != null) {
- name = " for " + k.getCfg().getApp().getName();
- }
- System.out.println(name + " -=-==-=-=-= " + mode.value);
- logger.info("Keploy created new mocking context in {} mode {}.If you dont see any logs about your dependencies below, your dependency/s are NOT wrapped.", mode, name);
- boolean exists = StartRecordingMocks(mpath + "/" + name + ".yaml", mode.value, name, Config.Overwrite);
- if (exists && !Config.Overwrite) {
- logger.error(" Keploy failed to record dependencies because yaml file already exists {} in directory: {}.", name, mpath);
- Config.MockId.put(name, true);
- }
-
- return kctx;
- }
-
- public static boolean StartRecordingMocks(String path, String mode, String name, Boolean overWrite) {
- Service.StartMockReq startMockReq = Service.StartMockReq.newBuilder().setMode(mode).setPath(path).setName(name).setOverWrite(overWrite).build();
- Service.StartMockResp startMockResp = blockingStub.startMocking(startMockReq);
- if (startMockResp == null) { // TODO - check how to handle this error
- logger.error("Failed to make StartMocking grpc call to keploy server" + name + " mock");
- return false;
- }
- return startMockResp.getExists();
- }
-
- /**
- * Gets all the mocks in the test mode from the server
- *
- * @param getMockReq - contains mock path and app name
- * @return - all the mocks that are recorded
- */
- public static ArrayList GetAllMocks(Service.GetMockReq getMockReq) {
- final Service.getMockResp resp = blockingStub.getMocks(getMockReq);
- if (resp != null) {
- if (resp.getMocksList().size() == 0) {
- logger.info("Mocklist size is zero !!");
- return null;
- }
- return getM(resp.getMocksList());
- }
-
- logger.error("returned nil as array mocks from keploy server");
- return null;
- }
-
- private static ArrayList getM(List mocksList) {
- ArrayList mockArrayList = new ArrayList<>();
- for (int i = 0; i < mocksList.size(); i++) {
- mockArrayList.add(mocksList.get(0));
- }
- return mockArrayList;
- }
-
- /**
- * Send recorded mocks to the server
- *
- * @param path - folder path where mock should be stored
- * @param mock - mock object
- * @return - Boolean that determines whether mocks are stored or not
- */
- public static boolean PutMock(String path, Service.Mock mock) {
-
- Service.PutMockReq putMockReq = Service.PutMockReq.newBuilder().setMock(mock).setPath(path).build();
- Service.PutMockResp putMockResp = blockingStub.putMock(putMockReq);
- if (putMockResp == null) { // check iska error handle
- logger.error("Failed to call the putMock method");
- return false;
- }
- return true;
- }
-
-
-}
diff --git a/api/src/main/resources/log4j2.properties b/api/src/main/resources/log4j2.properties
deleted file mode 100644
index 9127ebba..00000000
--- a/api/src/main/resources/log4j2.properties
+++ /dev/null
@@ -1,9 +0,0 @@
-appenders=console
-appender.console.type=Console
-appender.console.name=STDOUT
-appender.console.layout.type=PatternLayout
-#appender.console.layout.pattern=%highlight{%d{HH:mm:ss.SSS}} [%thread] %-5level %logger{36} - %msg %n
-appender.console.layout.pattern=%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} %highlight{${LOG_LEVEL_PATTERN:-%5p}}{FATAL=red blink, ERROR=red, WARN=yellow bold, INFO=green, DEBUG=green bold, TRACE=blue} %style{}{magenta} [%M] %style{%40C}{cyan} : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%xEx}
-rootLogger.level=info
-rootLogger.appenderRefs=stdout
-rootLogger.appenderRef.stdout.ref=STDOUT
\ No newline at end of file
diff --git a/checkstyle-config.xml b/checkstyle-config.xml
deleted file mode 100644
index 7f714125..00000000
--- a/checkstyle-config.xml
+++ /dev/null
@@ -1,182 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/common/README.md b/common/README.md
deleted file mode 100644
index 4d86c09b..00000000
--- a/common/README.md
+++ /dev/null
@@ -1 +0,0 @@
-Module for common classes like Exception,Serializer/Deserializer,Models, etc.
\ No newline at end of file
diff --git a/common/pom.xml b/common/pom.xml
deleted file mode 100644
index b9a3250b..00000000
--- a/common/pom.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-
-
-
- java-sdk
- io.keploy
- 1.0.0-SNAPSHOT
-
- 4.0.0
-
- common
-
- 1.8
- 1.8
-
-
-
- io.keploy
- core
- 1.0.0-SNAPSHOT
- compile
-
-
-
\ No newline at end of file
diff --git a/common/src/main/java/io/keploy/utils/AssertKTests.java b/common/src/main/java/io/keploy/utils/AssertKTests.java
deleted file mode 100644
index 854ffb87..00000000
--- a/common/src/main/java/io/keploy/utils/AssertKTests.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package io.keploy.utils;
-
-import java.util.concurrent.atomic.AtomicBoolean;
-
-/**
- * This class help in providing the result of test recorded by keploy when run along the unit tests
- */
-public class AssertKTests {
- public static final AtomicBoolean finalTestResult = new AtomicBoolean(false);
-
- public static boolean result() {
- return finalTestResult.get();
- }
-}
\ No newline at end of file
diff --git a/common/src/main/java/io/keploy/utils/GenericRequestWrapper.java b/common/src/main/java/io/keploy/utils/GenericRequestWrapper.java
deleted file mode 100644
index 0044826e..00000000
--- a/common/src/main/java/io/keploy/utils/GenericRequestWrapper.java
+++ /dev/null
@@ -1,208 +0,0 @@
-package io.keploy.utils;
-
-import java.io.BufferedReader;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URLEncoder;
-import java.util.Arrays;
-import java.util.Enumeration;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import javax.servlet.ReadListener;
-import javax.servlet.ServletInputStream;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-
-/**
- * GenericRequestWrapper is a wrapper over the request sent to the filter. So that the request data is cached/stored and
- * not lost
- */
-public class GenericRequestWrapper extends HttpServletRequestWrapper {
- private static final String FORM_CONTENT_TYPE = "application/x-www-form-urlencoded";
- private final ByteArrayOutputStream cachedContent;
- private final Integer contentCacheLimit;
- private ServletInputStream inputStream;
- private BufferedReader reader;
-
- public GenericRequestWrapper(HttpServletRequest request) {
- super(request);
- int contentLength = request.getContentLength();
- this.cachedContent = new ByteArrayOutputStream(contentLength >= 0 ? contentLength : 1024);
- this.contentCacheLimit = null;
- }
-
- public GenericRequestWrapper(HttpServletRequest request, int contentCacheLimit) {
- super(request);
- this.cachedContent = new ByteArrayOutputStream(contentCacheLimit);
- this.contentCacheLimit = contentCacheLimit;
- }
-
- public ServletInputStream getInputStream() throws IOException {
- if (this.inputStream == null) {
- this.inputStream = new ContentCachingInputStream(this.getRequest().getInputStream());
- }
-
- return this.inputStream;
- }
-
- public String getCharacterEncoding() {
- String enc = super.getCharacterEncoding();
- return enc != null ? enc : "ISO-8859-1";
- }
-
- public BufferedReader getReader() throws IOException {
- if (this.reader == null) {
- this.reader = new BufferedReader(new InputStreamReader(this.getInputStream(), this.getCharacterEncoding()));
- }
-
- return this.reader;
- }
-
- public String getParameter(String name) {
- if (this.cachedContent.size() == 0 && this.isFormPost()) {
- this.writeRequestParametersToCachedContent();
- }
-
- return super.getParameter(name);
- }
-
- public Map getParameterMap() {
- if (this.cachedContent.size() == 0 && this.isFormPost()) {
- this.writeRequestParametersToCachedContent();
- }
-
- return super.getParameterMap();
- }
-
- public Enumeration getParameterNames() {
- if (this.cachedContent.size() == 0 && this.isFormPost()) {
- this.writeRequestParametersToCachedContent();
- }
-
- return super.getParameterNames();
- }
-
- public String[] getParameterValues(String name) {
- if (this.cachedContent.size() == 0 && this.isFormPost()) {
- this.writeRequestParametersToCachedContent();
- }
-
- return super.getParameterValues(name);
- }
-
- private boolean isFormPost() {
- String contentType = this.getContentType();
- return contentType != null && contentType.contains("application/x-www-form-urlencoded") && (this.getMethod().equals("POST"));
- }
-
- private void writeRequestParametersToCachedContent() {
- try {
- if (this.cachedContent.size() == 0) {
- String requestEncoding = this.getCharacterEncoding();
- Map form = super.getParameterMap();
- Iterator nameIterator = form.keySet().iterator();
-
- while (nameIterator.hasNext()) {
- String name = (String) nameIterator.next();
- List values = Arrays.asList(form.get(name));
- Iterator valueIterator = values.iterator();
-
- while (valueIterator.hasNext()) {
- String value = (String) valueIterator.next();
- this.cachedContent.write(URLEncoder.encode(name, requestEncoding).getBytes());
- if (value != null) {
- this.cachedContent.write(61);
- this.cachedContent.write(URLEncoder.encode(value, requestEncoding).getBytes());
- if (valueIterator.hasNext()) {
- this.cachedContent.write(38);
- }
- }
- }
-
- if (nameIterator.hasNext()) {
- this.cachedContent.write(38);
- }
- }
- }
-
- } catch (IOException var8) {
- throw new IllegalStateException("Failed to write request parameters to cached content", var8);
- }
- }
-
- public byte[] getData() {
- return this.cachedContent.toByteArray();
- }
-
- protected void handleContentOverflow(int contentCacheLimit) {
- }
-
- private class ContentCachingInputStream extends ServletInputStream {
- private final ServletInputStream is;
- private boolean overflow = false;
-
- public ContentCachingInputStream(ServletInputStream is) {
- this.is = is;
- }
-
- public int read() throws IOException {
- int ch = this.is.read();
- if (ch != -1 && !this.overflow) {
- if (GenericRequestWrapper.this.contentCacheLimit != null && GenericRequestWrapper.this.cachedContent.size() == GenericRequestWrapper.this.contentCacheLimit) {
- this.overflow = true;
- GenericRequestWrapper.this.handleContentOverflow(GenericRequestWrapper.this.contentCacheLimit);
- } else {
- GenericRequestWrapper.this.cachedContent.write(ch);
- }
- }
-
- return ch;
- }
-
- public int read(byte[] b) throws IOException {
- int count = this.is.read(b);
- this.writeToCache(b, 0, count);
- return count;
- }
-
- private void writeToCache(final byte[] b, final int off, int count) {
- if (!this.overflow && count > 0) {
- if (GenericRequestWrapper.this.contentCacheLimit != null && count + GenericRequestWrapper.this.cachedContent.size() > GenericRequestWrapper.this.contentCacheLimit) {
- this.overflow = true;
- GenericRequestWrapper.this.cachedContent.write(b, off, GenericRequestWrapper.this.contentCacheLimit - GenericRequestWrapper.this.cachedContent.size());
- GenericRequestWrapper.this.handleContentOverflow(GenericRequestWrapper.this.contentCacheLimit);
- return;
- }
-
- GenericRequestWrapper.this.cachedContent.write(b, off, count);
- }
-
- }
-
- public int read(final byte[] b, final int off, final int len) throws IOException {
- int count = this.is.read(b, off, len);
- this.writeToCache(b, off, count);
- return count;
- }
-
- public int readLine(final byte[] b, final int off, final int len) throws IOException {
- int count = this.is.readLine(b, off, len);
- this.writeToCache(b, off, count);
- return count;
- }
-
- public boolean isFinished() {
- return this.is.isFinished();
- }
-
- public boolean isReady() {
- return this.is.isReady();
- }
-
- public void setReadListener(ReadListener readListener) {
- this.is.setReadListener(readListener);
- }
- }
-}
diff --git a/common/src/main/java/io/keploy/utils/GenericResponseWrapper.java b/common/src/main/java/io/keploy/utils/GenericResponseWrapper.java
deleted file mode 100644
index fa1e4207..00000000
--- a/common/src/main/java/io/keploy/utils/GenericResponseWrapper.java
+++ /dev/null
@@ -1,96 +0,0 @@
-package io.keploy.utils;
-
-import javax.servlet.ServletOutputStream;
-import javax.servlet.WriteListener;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpServletResponseWrapper;
-import java.io.*;
-
-/**
- * GenericResponseWrapper is a wrapper over the Response sent to the filter. So that the response data is cached/stored and
- * not lost
- */
-public class GenericResponseWrapper extends HttpServletResponseWrapper {
- private ServletOutputStream outputStream;
- private PrintWriter writer;
- private FilterServletOutputStream copier;
-
- public GenericResponseWrapper(HttpServletResponse response) throws IOException {
- super(response);
- }
-
- @Override
- public ServletOutputStream getOutputStream() throws IOException {
- if (writer != null) {
- throw new IllegalStateException("getWriter() has already been called on this response.");
- }
-
- if (outputStream == null) {
- outputStream = getResponse().getOutputStream();
- copier = new FilterServletOutputStream(outputStream);
- }
-
- return copier;
- }
-
- @Override
- public PrintWriter getWriter() throws IOException {
- if (outputStream != null) {
- throw new IllegalStateException("getOutputStream() has already been called on this response.");
- }
-
- if (writer == null) {
- copier = new FilterServletOutputStream(getResponse().getOutputStream());
- writer = new PrintWriter(new OutputStreamWriter(copier, getResponse().getCharacterEncoding()), true);
- }
-
- return writer;
- }
-
- @Override
- public void flushBuffer() throws IOException {
- if (writer != null) {
- writer.flush();
- } else if (outputStream != null) {
- copier.flush();
- }
- }
-
- public byte[] getData() {
- if (copier != null) {
- return copier.getData();
- } else {
- return new byte[0];
- }
- }
-}
-
-class FilterServletOutputStream extends ServletOutputStream {
- private final OutputStream outputStream;
- private final ByteArrayOutputStream copy;
-
- public FilterServletOutputStream(OutputStream outputStream) {
- this.outputStream = outputStream;
- this.copy = new ByteArrayOutputStream(1024);
- }
-
- @Override
- public void write(int b) throws IOException {
- outputStream.write(b);
- copy.write(b);
- }
-
- public byte[] getData() {
- return copy.toByteArray();
- }
-
- @Override
- public boolean isReady() {
- return false;
- }
-
- @Override
- public void setWriteListener(WriteListener writeListener) {
-
- }
-}
\ No newline at end of file
diff --git a/common/src/main/java/io/keploy/utils/HaltThread.java b/common/src/main/java/io/keploy/utils/HaltThread.java
deleted file mode 100644
index 1f844fb6..00000000
--- a/common/src/main/java/io/keploy/utils/HaltThread.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package io.keploy.utils;
-
-import java.util.concurrent.CountDownLatch;
-
-/**
- * While Keploy is running tests in unit test file all the test recorded will be running in separate threads. Threads
- * are needed to be halted at the end as we need to capture the progress at once . HaltThread class is used for halting
- * threads
- */
-public class HaltThread {
- private static volatile HaltThread haltThread;
- private final CountDownLatch countDownLatch;
-
- private HaltThread() {
- countDownLatch = new CountDownLatch(2);
- }
-
- public static HaltThread getInstance() {
- if (haltThread == null) {
- synchronized (HaltThread.class) { //thread safe.
- if (haltThread == null) {
- haltThread = new HaltThread();
- }
- }
- }
- return haltThread;
- }
-
- public CountDownLatch getCountDownLatch() {
- return countDownLatch;
- }
-}
\ No newline at end of file
diff --git a/common/src/main/java/io/keploy/utils/HttpStatusReasons.java b/common/src/main/java/io/keploy/utils/HttpStatusReasons.java
deleted file mode 100644
index 5c3463f3..00000000
--- a/common/src/main/java/io/keploy/utils/HttpStatusReasons.java
+++ /dev/null
@@ -1,92 +0,0 @@
-package io.keploy.utils;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class HttpStatusReasons {
-
- private static final String UNKNOWN_STATUS = "Unknown Status";
-
- private static final Map REASONS = new HashMap<>();
-
-
- static {
- //informational
- REASONS.put(100, "Continue");
- REASONS.put(101, "Switching Protocols");
- REASONS.put(102, "Processing");
- REASONS.put(103, "Checkpoint");
-
- // successful
- REASONS.put(200, "OK");
- REASONS.put(201, "Created");
- REASONS.put(202, "Accepted");
- REASONS.put(203, "Non-Authoritative Information");
- REASONS.put(204, "No Content");
- REASONS.put(205, "Reset Content");
- REASONS.put(206, "Partial Content");
- REASONS.put(207, "Multi-Status");
- REASONS.put(208, "Already Reported");
- REASONS.put(209, "IM Used");
-
- // redirection
- REASONS.put(300, "Multiple Choices");
- REASONS.put(301, "Moved Permanently");
- REASONS.put(302, "Found");
- REASONS.put(303, "See Other");
- REASONS.put(304, "Not Modified");
- REASONS.put(305, "Use Proxy");
- REASONS.put(307, "Temporary Redirect");
- REASONS.put(308, "Permanent Redirect");
-
- // client error
- REASONS.put(400, "Bad Request");
- REASONS.put(401, "Unauthorized");
- REASONS.put(402, "Payment Required");
- REASONS.put(403, "Forbidden");
- REASONS.put(404, "Not Found");
- REASONS.put(405, "Method Not Allowed");
- REASONS.put(406, "Not Acceptable");
- REASONS.put(407, "Proxy Authentication Required");
- REASONS.put(408, "Request Timeout");
- REASONS.put(409, "Conflict");
- REASONS.put(410, "Gone");
- REASONS.put(411, "Length Required");
- REASONS.put(412, "Precondition Failed");
- REASONS.put(413, "Payload Too Large");
- REASONS.put(414, "URI Too Long");
- REASONS.put(415, "Unsupported Media Type");
- REASONS.put(416, "Requested range not satisfiable");
- REASONS.put(417, "Expectation Failed");
- REASONS.put(418, "I'm a teapot");
- REASONS.put(421, "Destination Locked");
- REASONS.put(422, "Unprocessable Entity");
- REASONS.put(423, "Locked");
- REASONS.put(424, "Failed Dependency");
- REASONS.put(425, "Too Early");
- REASONS.put(426, "Upgrade Required");
- REASONS.put(428, "Precondition Required");
- REASONS.put(429, "Too Many Requests");
- REASONS.put(431, "Request Header Fields Too Large");
- REASONS.put(451, "Unavailable For Legal Reasons");
-
- //server error
- REASONS.put(500, "Internal Server Error");
- REASONS.put(501, "Not Implemented");
- REASONS.put(502, "Bad Gateway");
- REASONS.put(503, "Service Unavailable");
- REASONS.put(504, "Gateway Timeout");
- REASONS.put(505, "HTTP Version not supported");
- REASONS.put(506, "Variant Also Negotiates");
- REASONS.put(507, "Insufficient Storage");
- REASONS.put(508, "Loop Detected");
- REASONS.put(509, "Bandwidth Limit Exceeded");
- REASONS.put(510, "Not Extended");
- REASONS.put(511, "Network Authentication Required");
- }
-
- public static String getStatusMsg(Integer val) {
- return REASONS.getOrDefault(val, UNKNOWN_STATUS);
- }
-}
-
diff --git a/common/src/main/java/io/keploy/utils/MagicBytes.java b/common/src/main/java/io/keploy/utils/MagicBytes.java
deleted file mode 100644
index be4074a7..00000000
--- a/common/src/main/java/io/keploy/utils/MagicBytes.java
+++ /dev/null
@@ -1,277 +0,0 @@
-package io.keploy.utils;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * Magic bytes can be checked at : https://en.wikipedia.org/wiki/List_of_file_signatures
- * And converted to integer hex params using JS in the browser console, for example :
- * "78 01 73 0D 62 ?? 60".split(" ").map(str => str == "??" ? "MagicBytes.ANY" : "0x"+str).join(", ")
- * prints => "0x78, 0x01, 0x73, 0x0D, 0x62, MagicBytes.ANY, 0x60"
- */
-public enum MagicBytes {
- // Executables
- EXE(Header.builder()
- .add("EXE (includes PE32 + DOS)", 0x4D, 0x5A)),
- MACH_O(Header.builder()
- .add("MACH-O 32bit", 0xFE, 0xED, 0xFA, 0xCE)
- .add("MACH-O 64bit", 0xFE, 0xED, 0xFA, 0xCF)),
- SHEBANG(Header.builder()
- .add("SHEBANG (#!) script", 0x23, 0x21)),
- ELF(Header.builder()
- .add("ELF", 0x7F, 0x45, 0x4C, 0x46)),
- COM(Header.builder()
- .add("COM", 0xC9)),
- DALVIK(Header.builder()
- .add("DEX", 0x64, 0x65, 0x78, 0x0A, 0x30, 0x33, 0x35, 0x00)),
- DMG(Header.builder()
- .add("DMG", 0x78, 0x01, 0x73, 0x0D, 0x62, 0x62, 0x60)),
- // Archives
- SQLITE(Header.builder()
- .add("SQLITE3", 0x53, 0x51, 0x4c, 0x69, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x20, 0x33, 0x00)),
- TAR_LZW(Header.builder()
- .add("TAR LZW", 0x1F, 0x9D)
- .add("TAR LZ", 0x1F, 0xA0)),
- BZIP2(Header.builder()
- .add("BZ2", 0x42, 0x5A, 0x68)),
- LZIP(Header.builder()
- .add("LZIP", 0x4C, 0x5A, 0x49, 0x50)),
- ZIP(Header.builder()
- .add("ZIP", 0x50, 0x4B, 0x03, 0x04)
- .add("ZIP (empty)", 0x50, 0x4B, 0x05, 0x06)
- .add("ZIP (spanned)", 0x50, 0x4B, 0x07, 0x08)),
- RAR(Header.builder()
- .add("RAR v1.5+", 0x52, 0x61, 0x72, 0x21, 0x1A, 0x07, 0x00)
- .add("RAR v5+", 0x52, 0x61, 0x72, 0x21, 0x1A, 0x07, 0x01, 0x00)),
- ISO(Header.builder()
- .add("ISO9660 CD/DVD Image File", 0x43, 0x44, 0x30, 0x30, 0x31)),
- VMDK(Header.builder()
- .add("VMDK", 0x4B, 0x44, 0x4D)),
- VDI(Header.builder()
- .add("VDI (VirtualBox)", 0x3C, 0x3C, 0x3C, 0x20, 0x4F, 0x72, 0x61, 0x63, 0x6C, 0x65, 0x20, 0x56, 0x4D, 0x20, 0x56, 0x69, 0x72, 0x74, 0x75, 0x61, 0x6C, 0x42, 0x6F, 0x78, 0x20, 0x44, 0x69, 0x73, 0x6B, 0x20, 0x49, 0x6D, 0x61, 0x67, 0x65, 0x20, 0x3E, 0x3E, 0x3E)),
- VHD(Header.builder()
- .add("VHD (Win)", 0x63, 0x6F, 0x6E, 0x6E, 0x65, 0x63, 0x74, 0x69, 0x78)),
- VHDX(Header.builder()
- .add("VHDX (Win8)", 0x76, 0x68, 0x64, 0x78, 0x66, 0x69, 0x6C, 0x65)),
- ISZ(Header.builder()
- .add("ISZ (compressed ISO)", 0x49, 0x73, 0x5A, 0x21)),
- EVT(Header.builder()
- .add("Windows Event Viewer", 0x4C, 0x66, 0x4C, 0x65)),
- XAR(Header.builder()
- .add("eXtensible ARchive", 0x78, 0x61, 0x72, 0x21)),
- TAR(Header.builder()
- .add("TAR (subpackage)", 0x75, 0x73, 0x74, 0x61, 0x72, 0x00, 0x30, 0x30)
- .add("TAR", 0x75, 0x73, 0x74, 0x61, 0x72, 0x20, 0x20, 0x00)),
- SEVEN_ZIP(Header.builder()
- .add("7Z", 0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C)),
- GZIP(Header.builder()
- .add("GZ", 0x1F, 0x8B)),
- MATROSKA(Header.builder()
- .add("MKV/WebM", 0x1A, 0x45, 0xDF, 0xA3)),
- DICOM(Header.builder()
- .add("DICOM", 0x44, 0x49, 0x43, 0x4D)),
- ZLIB(Header.builder()
- .add("ZLIB (No compression - no preset dictionary)", 0x78, 0x01)
- .add("ZLIB (Best speed - no preset dictionary)", 0x78, 0x5E)
- .add("ZLIB (Default compression - no preset dictionary)", 0x78, 0x9C)
- .add("ZLIB (Best compression - no preset dictionary)", 0x78, 0xDA)
- .add("ZLIB (No compression - with preset dictionary)", 0x78, 0x20)
- .add("ZLIB (Best speed - with preset dictionary)", 0x78, 0x7D)
- .add("ZLIB (Default compression - with preset dictionary)", 0x78, 0xBB)
- .add("ZLIB (Best compression - with preset dictionary)", 0x78, 0xF9)),
- LZFSE(Header.builder()
- .add("LZFSE (Apple)", 0x62, 0x76, 0x78, 0x32)),
- PST(Header.builder()
- .add("Microsoft Outlook", 0x21, 0x42, 0x44, 0x4E)),
- // Text
- REG(Header.builder()
- .add("Windows Registry File/DAT", 0x72, 0x65, 0x67, 0x66)),
- DAT(Header.builder()
- .add("DAT/USMT 3+", 0x50, 0x4D, 0x4F, 0x43, 0x43, 0x4D, 0x4F, 0x43)),
- OFFICE_OLD(Header.builder()
- .add("Compound File Binary Format (MS-Office)", 0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1)),
- PDF(Header.builder()
- .add("PDF", 0x25, 0x50, 0x44, 0x46, 0x2d)),
- XML(Header.builder()
- .add("XML", 0x3c, 0x3f, 0x78, 0x6d, 0x6c, 0x20)),
- RTT(Header.builder()
- .add("RTT", 0x7B, 0x5C, 0x72, 0x74, 0x66, 0x31)),
- // Images
- PNG(Header.builder()
- .add("PNG", 0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A)),
- PBM(Header.builder()
- .add("PBM", 0x50, 0x31, 0x0A)),
- PGM(Header.builder()
- .add("PGM", 0x50, 0x32, 0x0A)),
- PPM(Header.builder()
- .add("PPM", 0x50, 0x33, 0x0A)),
- JPG(Header.builder()
- .add("JPG Raw", 0xFF, 0xD8, 0xFF, 0xDB)
- .add("JPG Raw 2", 0xFF, 0xD8, 0xFF, 0xEE)
- .add("JPG JFIF", 0xFF, 0xD8, 0xFF, 0xE0, 0x00, 0x10, 0x4A, 0x46, 0x49, 0x46, 0x00, 0x01)
- .add("JPG EXIF", 0xFF, 0xD8, 0xFF, 0xE1, MagicBytes.ANY, MagicBytes.ANY, 0x45, 0x78, 0x69, 0x66, 0x00, 0x00)),
- GIF(Header.builder()
- .add("GIF87a", 0x47, 0x49, 0x46, 0x38, 0x37, 0x61)
- .add("GIF89a", 0x47, 0x49, 0x46, 0x38, 0x39, 0x61)),
- TIFF(Header.builder()
- .add("TIFF LE", 0x49, 0x49, 0x2A, 0x00)
- .add("TIFF BE", 0x4D, 0x4D, 0x00, 0x2A)),
- BMP(Header.builder()
- .add("BMP", 0x42, 0x4D)),
- // Audio
- WAV(Header.builder()
- .add("WAV", 0x52, 0x49, 0x46, 0x46, MagicBytes.ANY, MagicBytes.ANY, MagicBytes.ANY, MagicBytes.ANY, 0x57, 0x41, 0x56, 0x45)),
- MP3(Header.builder()
- .add("MP3", 0x49, 0x44, 0x33)),
- FLAC(Header.builder()
- .add("FLAC", 0x66, 0x4C, 0x61, 0x43)),
- MIDI(Header.builder()
- .add("MIDI", 0x4D, 0x54, 0x68, 0x64)),
- // Video
- AVI(Header.builder()
- .add("AVI", 0x52, 0x49, 0x46, 0x46, MagicBytes.ANY, MagicBytes.ANY, MagicBytes.ANY, MagicBytes.ANY, 0x41, 0x56, 0x49, 0x20)),
- MP4(Header.builder()
- .add("MP4", 0x00, 0x00, 0x00, 0x18, 0x66, 0x74, 0x79, 0x70, 0x69, 0x73, 0x6F, 0x6D)),
- FLV(Header.builder()
- .add("FLV", 0x46, 0x4C, 0x56)),
- ;
-
- private static final int ANY = -1;
- private final Header[] headers;
-
- private MagicBytes(Header.Builder builder) {
- this.headers = builder.build();
- }
-
- public Header[] getHeaders() {
- return headers;
- }
-
- /* Checks if bytes match a specific magic bytes sequence.
- * Tries to match each header sequentially, the code
- * short-circuits on match found. */
- public Header is(byte[] bytes) {
- boolean matches;
- for (Header header : headers) {
- matches = true;
- for (int i = 0; i < header.bytes.length; i++) {
- if (header.bytes[i] != ANY && header.bytes[i] != Byte.toUnsignedInt(bytes[i])) {
- matches = false;
- break;
- }
- }
- if (matches)
- return header;
- }
- return null;
- }
-
- // Extracts head bytes from any stream
- public static byte[] extract(InputStream is, int length) throws IOException {
- try {
- byte[] buffer = new byte[length];
- is.read(buffer, 0, length);
- return buffer;
- } finally {
- is.close();
- }
- }
-
- public static Header matches(byte[] bytes) {
- Header header;
- for (MagicBytes magic : MagicBytes.values()) {
- header = magic.is(bytes);
- if (header != null)
- return header;
- }
- return null;
- }
-
- //TODO: Add support for more content type.
- public static String getContentType(Header ct) {
- String contentType = ct.getName();
- if (contentType.contains("PNG")) {
- return "png";
- } else if (contentType.contains("JPG")) {
- return "jpg";
- } else if (contentType.contains("PDF")) {
- return "pdf";
- } else if (contentType.contains("XML")) {
- return "xml";
- }
- return "";
- }
-
- /* Convenience methods */
-
- public Header is(String name) throws FileNotFoundException, IOException {
- return is(new File(name));
- }
-
- public Header is(File file) throws FileNotFoundException, IOException {
- return is(new FileInputStream(file));
- }
-
- public Header is(InputStream is) throws IOException {
- return is(extract(is, 50));
- }
-
- public static Header matching(String name) throws FileNotFoundException, IOException {
- return matching(new File(name));
- }
-
- public static Header matching(File file) throws FileNotFoundException, IOException {
- return matching(new FileInputStream(file));
- }
-
- public static Header matching(InputStream is) throws IOException {
- return matches(extract(is, 50));
- }
-
- public static final class Header {
- private final String name;
- private final int[] bytes;
-
- public Header(String name, int[] bytes) {
- this.name = name;
- this.bytes = bytes;
- }
-
- public String getName() {
- return name;
- }
-
- public int[] getBytes() {
- return bytes;
- }
-
- @Override
- public String toString() {
- return String.format("%s: %s", name, Arrays.toString(bytes));
- }
-
- private static Builder builder() {
- return new Builder();
- }
-
- private static final class Builder {
- private final List headers = new ArrayList<>();
-
-
- public Builder add(String name, int... bytes) {
- headers.add(new Header(name, bytes));
- return this;
- }
-
- public Header[] build() {
- return headers.toArray(new Header[0]);
- }
- }
- }
-}
\ No newline at end of file
diff --git a/common/src/main/java/io/keploy/utils/MultipartContent.java b/common/src/main/java/io/keploy/utils/MultipartContent.java
deleted file mode 100644
index 0f6c1747..00000000
--- a/common/src/main/java/io/keploy/utils/MultipartContent.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package io.keploy.utils;
-
-
-public class MultipartContent {
-
- private final String fileName;
- private final byte[] body;
-
- public MultipartContent(String fileName, byte[] body) {
- this.fileName = fileName;
- this.body = body;
- }
-
- public String getFileName() {
- return fileName;
- }
-
- public byte[] getBody() {
- return body;
- }
-
-}
diff --git a/common/src/main/java/io/keploy/utils/ProcessSQL.java b/common/src/main/java/io/keploy/utils/ProcessSQL.java
deleted file mode 100644
index 14f201cb..00000000
--- a/common/src/main/java/io/keploy/utils/ProcessSQL.java
+++ /dev/null
@@ -1,96 +0,0 @@
-package io.keploy.utils;
-
-import com.google.protobuf.InvalidProtocolBufferException;
-import io.keploy.grpc.stubs.Service;
-import io.keploy.regression.Mock;
-import io.keploy.regression.context.Context;
-import io.keploy.regression.context.Kcontext;
-import lombok.NoArgsConstructor;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-
-@NoArgsConstructor
-
-/// this is sql specific process dep
-public class ProcessSQL {
-
- private static final Logger logger = LogManager.getLogger(ProcessSQL.class);
-
- // @SafeVarargs
- public static Service.Table ProcessDep(Map meta, Service.Table table, int id) throws InvalidProtocolBufferException {
-
- Kcontext kctx = Context.getCtx();
- if (kctx == null) {
- logger.error("dependency mocking failed: failed to get Keploy context");
- return null;
- }
- switch (kctx.getMode()) {
- case MODE_TEST:
- if (kctx.getMock().size() > 0 && kctx.getMock().get(0).getKind().equals("SQL")) {
- List mocks = kctx.getMock();
- if (mocks.size() > 0) {
- final Service.Table ttable = mocks.get(0).getSpec().getTable();
- mocks.remove(0);
- return ttable;
- }
- // for int
- }
-
- break;
- case MODE_RECORD:
-
- Service.Mock.SpecSchema specSchema = null;
-
- specSchema = Service.Mock.SpecSchema.newBuilder().putAllMetadata(meta).setInt(id).setTable(table).setType("TABLE").build();
-
- Service.Mock mock = Service.Mock.newBuilder()
- .setVersion(Mock.Version.V1_BETA1.value)
- .setName("")
- .setKind(Mock.Kind.SQL.value)
- .setSpec(specSchema)
- .build();
-
- kctx.getMock().add(mock);
- break;
- }
- return null;
- }
-
- public static List toRowList(List