java springboot

import org.springframework.core.io.buffer.DataBuffer;import org.springframework.core.io.buffer.DataBufferUtils;import org.springframework.stereotype.Service;import org.springframework.web.reactive.function.client.WebClient;import org.springframework.web.reactive.function.client.WebClientResponseException;import reactor.core.publisher.Mono;import java.io.File;import java.io.FileOutputStream;import java.nio.channels.FileChannel;import java.nio.file.Path;@Servicepublic class GitDownloadService { private final WebClient webClient; public GitDownloadService() { this.webClient = WebClient.builder() .codecs(configurer -> configurer.defaultCodecs().maxInMemorySize(16 * 1024 * 1024)) // 16 MB buffer .build(); } public Mono<File> downloadZip(String repoUrl, File downloadDir, String token) { return webClient.get() .uri(repoUrl) .header(“Authorization”, “token ” + token) // Use “Bearer” for OAuth tokens if needed .header(“Accept”, “application/vnd.github.v3+json”) .header(“User-Agent”, “Java-WebClient”) .retrieve() .bodyToFlux(DataBuffer.class) // Stream the file content as DataBuffer .reduce(DataBufferUtils.join()) // Combine the buffers into a single DataBuffer .flatMap(dataBuffer -> saveToFile(dataBuffer, downloadDir)) .doOnError(WebClientResponseException.class, ex -> { throw new RuntimeException(“Error downloading file: ” + ex.getStatusCode() + ” ” + ex.getResponseBodyAsString()); }) .onErrorMap(ex -> new RuntimeException(“Failed to download ZIP file”, ex)); } private Mono<File> saveToFile(DataBuffer dataBuffer, File downloadDir) { try { if (!downloadDir.exists() && !downloadDir.mkdirs()) { throw new RuntimeException(“Failed to create directory: ” + downloadDir.getAbsolutePath()); } File zipFile = new File(downloadDir, “repo.zip”); try (FileChannel channel = new FileOutputStream(zipFile).getChannel()) { channel.write(dataBuffer.asByteBuffer()); } finally { DataBufferUtils.release(dataBuffer); // Release the buffer } return Mono.just(zipFile); } catch (Exception e) { return Mono.error(new RuntimeException(“Error saving ZIP file: ” + e.getMessage(), e)); } }}

download service

import org.springframework.web.reactive.function.client.WebClient;import org.springframework.web.reactive.function.client.ExchangeStrategies;private final WebClient webClient = WebClient.builder() .exchangeStrategies(ExchangeStrategies.builder() .codecs(configurer -> configurer.defaultCodecs().maxInMemorySize(16 * 1024 * 1024)) .build()) .build();private File downloadZip(String repoUrl, File downloadDir, String token) throws IOException { byte[] zipBytes = webClient.get() .uri(repoUrl) .header(“Authorization”, “Bearer ” + token) .retrieve() .bodyToMono(byte[].class) .block(); File zipFile = new File(downloadDir, “repo.zip”); try (FileOutputStream out = new FileOutputStream(zipFile)) { out.write(zipBytes); } catch (IOException e) { throw new IOException(“Failed to download the zip file. Error: ” + e.getMessage()); } return zipFile;}

basic auth

import org.springframework.web.reactive.function.client.WebClient;
import java.nio.charset.StandardCharsets;
import java.util.Base64;

public class BitbucketService {

private final WebClient webClient;

public BitbucketService() {
    this.webClient = WebClient.builder().build();
}

public String fetchDataWithBasicAuth(String apiUrl, String username, String password) {
    String authHeader = generateBasicAuthHeader(username, password);

    return webClient.get()
            .uri(apiUrl)
            .header("Authorization", authHeader)
            .retrieve()
            .bodyToMono(String.class)
            .block();
}

private String generateBasicAuthHeader(String username, String password) {
    String credentials = username + ":" + password;
    return "Basic " + Base64.getEncoder().encodeToString(credentials.getBytes(StandardCharsets.UTF_8));
}

}

webclient config

import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.reactive.function.client.WebClient;
import org.apache.hc.client5.http.ssl.SSLContextBuilder;
import org.apache.hc.core5.ssl.SSLContexts;
import org.springframework.http.client.reactive.ReactorClientHttpConnector;
import reactor.netty.http.client.HttpClient;

import javax.net.ssl.SSLContext;
import java.io.File;
import java.nio.file.Path;

@Configuration
public class WebClientConfig {

@Bean
public WebClient webClient() {
    try {
        // Load the custom JKS keystore
        Path jksPath = Path.of("/path/to/cert1.jks");
        SSLContext sslContext = SSLContextBuilder.create()
                .loadTrustMaterial(jksPath.toFile(), "your_jks_password".toCharArray())
                .build();

        // Configure HttpClient with SSLContext
        HttpClient httpClient = HttpClient.create()
                .secure(t -> t.sslContext(sslContext));

        return WebClient.builder()
                .clientConnector(new ReactorClientHttpConnector(httpClient))
                .build();

    } catch (Exception e) {
        throw new RuntimeException("Error setting up WebClient with custom SSL", e);
    }
}

}

=====

import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Mono;

import java.io.FileOutputStream;
import java.io.InputStream;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.nio.file.Files;

@Service
public class DownloadService {

private final WebClient webClient;

@Autowired
public DownloadService(WebClient webClient) {
    this.webClient = webClient;
}

public Mono<File> downloadZip(String repoUrl, Path downloadDir) {
    Path zipFilePath = downloadDir.resolve("repo.zip");
    return webClient
            .get()
            .uri(repoUrl)
            .retrieve()
            .bodyToMono(byte[].class)
            .map(bytes -> {
                try (FileOutputStream out = new FileOutputStream(zipFilePath.toFile())) {
                    out.write(bytes);
                } catch (Exception e) {
                    throw new RuntimeException("Failed to write zip file", e);
                }
                return zipFilePath.toFile();
            });
}

}

=====

import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import org.apache.hc.client5.http.ssl.SSLContextBuilder;
import org.apache.hc.core5.ssl.TrustSelfSignedStrategy;
import java.net.URL;

public class SSLConnectionUtil {

public void connectWithCustomSSL(String repoUrl) {
    try {
        // Create SSLContext allowing self-signed certificates
        SSLContext sslContext = SSLContextBuilder.create()
                .loadTrustMaterial(new TrustSelfSignedStrategy())
                .build();

        // Open a connection and set the SSL factory
        URL url = new URL(repoUrl);
        HttpsURLConnection connection = (HttpsURLConnection) url.openConnection();
        connection.setSSLSocketFactory(sslContext.getSocketFactory());

        // Proceed with your connection logic here
        connection.connect();

        // Handle the connection (read response, handle data, etc.)
        // ...

    } catch (Exception e) {
        throw new RuntimeException("Error connecting with custom SSL", e);
    }
}

}

====

import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import org.apache.hc.client5.http.ssl.SSLContextBuilder;
import org.apache.hc.core5.ssl.TrustSelfSignedStrategy;

public class SSLConfigUtil {

static {
    try {
        SSLContext sslContext = SSLContextBuilder.create()
                .loadTrustMaterial(new TrustSelfSignedStrategy()) // Allows self-signed certificates
                .build();
        HttpsURLConnection.setDefaultSSLSocketFactory(sslContext.getSocketFactory());
    } catch (Exception e) {
        throw new RuntimeException("Failed to initialize SSL context", e);
    }
}

}

===== resolve cert errror3

import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
import org.apache.hc.client5.http.impl.classic.HttpClients;
import org.apache.hc.client5.http.ssl.SSLContextBuilder;
import org.apache.hc.core5.ssl.TrustStrategy;
import org.springframework.stereotype.Service;

import javax.net.ssl.SSLContext;
import java.io.FileInputStream;
import java.io.File;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.security.KeyStore;

@Service
public class GitDownloadService {

private static final String TRUSTSTORE_PATH = "/path/to/your-keystore.jks";
private static final String TRUSTSTORE_PASSWORD = "your_password";

public void downloadZip(String repoUrl, File downloadDir) {
    try {
        // Load the JKS file
        KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
        try (FileInputStream trustStoreStream = new FileInputStream(TRUSTSTORE_PATH)) {
            trustStore.load(trustStoreStream, TRUSTSTORE_PASSWORD.toCharArray());
        }

        // Build SSL context with the loaded trust store
        SSLContext sslContext = SSLContextBuilder
            .create()
            .loadTrustMaterial(trustStore, (TrustStrategy) null)  // null -> trust all certificates in trust store
            .build();

        // Configure HttpClient with custom SSL context
        try (CloseableHttpClient httpClient = HttpClients.custom()
                .setSSLContext(sslContext)
                .build()) {

            // Use the custom HttpClient to download the file
            URL url = new URL(repoUrl);
            try (var inputStream = url.openStream()) {
                File zipFile = new File(downloadDir, "repo.zip");
                Files.copy(inputStream, zipFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
            }
        }
    } catch (Exception e) {
        throw new RuntimeException("Error downloading zip file", e);
    }
}

}

==== run gradle

package com.example.demo.service;

import org.springframework.stereotype.Service;
import reactor.core.publisher.Mono;

import java.io.*;
import java.net.URL;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.Enumeration;
import java.util.Random;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;

@Service
public class GitDownloadService {

private static final String BASE_DIR = "C:/path/to/selenium";

public Mono<String> downloadAndExtract(String repoUrl, String branch) {
    return Mono.fromCallable(() -> {
        System.out.println("Attempting to resolve path: " + new File(BASE_DIR).getAbsolutePath());
        String branchRepoUrl = String.format("%s/archive/refs/heads/%s.zip", repoUrl, branch);
        File baseDir = new File(BASE_DIR);
        if (!baseDir.exists() || !baseDir.isDirectory() || !baseDir.canWrite()) {
            throw new IOException("Base directory is invalid or not writable: " + BASE_DIR);
        }

        String uniqueDirName = "extracted_repo_" + Instant.now().getEpochSecond() + "_" + (new Random().nextInt(900) + 100);
        File extractDir = new File(baseDir, uniqueDirName);
        extractDir.mkdirs();

        File zipFilePath = downloadZip(branchRepoUrl, extractDir);
        unzip(zipFilePath, extractDir);
        zipFilePath.delete();

        return runGradleTest(extractDir.getAbsolutePath() + "/folder1/folder2");
    });
}

private File downloadZip(String repoUrl, File downloadDir) throws IOException {
    URL url = new URL(repoUrl);
    File zipFile = new File(downloadDir, "repo.zip");

    try (InputStream in = url.openStream(); FileOutputStream out = new FileOutputStream(zipFile)) {
        byte[] buffer = new byte[1024];
        int bytesRead;
        while ((bytesRead = in.read(buffer)) != -1) {
            out.write(buffer, 0, bytesRead);
        }
    }
    return zipFile;
}

private void unzip(File zipFile, File extractTo) throws IOException {
    try (ZipFile zip = new ZipFile(zipFile)) {
        Enumeration<? extends ZipEntry> entries = zip.entries();
        while (entries.hasMoreElements()) {
            ZipEntry entry = entries.nextElement();
            File entryDestination = new File(extractTo, entry.getName());

            if (entry.isDirectory()) {
                entryDestination.mkdirs();
            } else {
                entryDestination.getParentFile().mkdirs();
                try (InputStream in = zip.getInputStream(entry); OutputStream out = new FileOutputStream(entryDestination)) {
                    byte[] buffer = new byte[1024];
                    int length;
                    while ((length = in.read(buffer)) > 0) {
                        out.write(buffer, 0, length);
                    }
                }
            }
        }
    }
}

private String runGradleTest(String directoryPath) {
    StringBuilder output = new StringBuilder();
    try {
        // Build the command to run in the specified directory
        ProcessBuilder processBuilder = new ProcessBuilder("./gradlew", "clean", "test", "--info");
        processBuilder.directory(new File(directoryPath)); // Set working directory to the specific path
        processBuilder.redirectErrorStream(true);

        Process process = processBuilder.start();
        try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
            String line;
            while ((line = reader.readLine()) != null) {
                output.append(line).append("\n");
            }
        }

        int exitCode = process.waitFor();
        if (exitCode != 0) {
            throw new RuntimeException("Gradle command execution failed with exit code: " + exitCode);
        }

        return "Gradle command executed successfully:\n" + output.toString();
    } catch (Exception e) {
        throw new RuntimeException("Error executing Gradle command", e);
    }
}

}

===/

package com.example.demo.controller;import com.example.demo.service.GitDownloadService;import org.springframework.beans.factory.annotation.Autowired;import org.springframework.http.ResponseEntity;import org.springframework.web.bind.annotation.*;import reactor.core.publisher.Mono;@RestController@RequestMapping(“/api/git”)public class GitDownloadController { private final GitDownloadService gitDownloadService; @Autowired public GitDownloadController(GitDownloadService gitDownloadService) { this.gitDownloadService = gitDownloadService; } @PostMapping(“/download”) public Mono<ResponseEntity<String>> downloadGitRepo( @RequestParam String repoUrl, @RequestParam String branch ) { return gitDownloadService.downloadAndExtract(repoUrl, branch) .map(extractedPath -> ResponseEntity.ok(“Repository extracted to: ” + extractedPath)) .onErrorResume(e -> Mono.just(ResponseEntity.status(500).body(“Error: ” + e.getMessage()))); }}

====== service

package com.example.demo.service;

import org.springframework.stereotype.Service;
import reactor.core.publisher.Mono;

import java.io.; import java.net.URL; import java.nio.file.;
import java.time.Instant;
import java.util.Enumeration;
import java.util.Random;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;

@Service
public class GitDownloadService {

private static final String BASE_DIR = "/root/extracted-repos/";

public Mono<String> downloadAndExtract(String repoUrl, String branch) {
    return Mono.fromCallable(() -> {
        String branchRepoUrl = String.format("%s/archive/refs/heads/%s.zip", repoUrl, branch);

        // Create unique directory name based on timestamp and random number
        String uniqueDirName = "extracted_repo_" + Instant.now().getEpochSecond() + "_" + (new Random().nextInt(900) + 100);
        Path extractDir = Paths.get(BASE_DIR, uniqueDirName);

        // Ensure the directory exists
        Files.createDirectories(extractDir);

        // Download and save zip file
        Path zipFilePath = downloadZip(branchRepoUrl, extractDir);

        // Extract the downloaded zip file
        unzip(zipFilePath.toFile(), extractDir.toFile());

        return extractDir.toString();
    });
}

private Path downloadZip(String repoUrl, Path downloadDir) throws IOException {
    URL url = new URL(repoUrl);
    Path zipFilePath = downloadDir.resolve("repo.zip");

    try (InputStream in = url.openStream()) {
        Files.copy(in, zipFilePath, StandardCopyOption.REPLACE_EXISTING);
    }
    return zipFilePath;
}

private void unzip(File zipFile, File extractTo) throws IOException {
    try (ZipFile zip = new ZipFile(zipFile)) {
        Enumeration<? extends ZipEntry> entries = zip.entries();
        while (entries.hasMoreElements()) {
            ZipEntry entry = entries.nextElement();
            File entryDestination = new File(extractTo, entry.getName());

            if (entry.isDirectory()) {
                entryDestination.mkdirs();
            } else {
                entryDestination.getParentFile().mkdirs();
                try (InputStream in = zip.getInputStream(entry);
                     OutputStream out = new FileOutputStream(entryDestination)) {
                    in.transferTo(out);
                }
            }
        }
    } finally {
        Files.deleteIfExists(zipFile.toPath()); // Optional: Clean up the zip file after extraction
    }
}

}

==========

download and zip code

package com.example.demo.service;

import org.springframework.stereotype.Service;

import java.io.; import java.net.URL; import java.nio.file.;
import java.time.Instant;
import java.util.Enumeration;
import java.util.Random;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;

@Service
public class GitDownloadService {

private static final String BASE_DIR = "/root/extracted-repos/";

public String downloadAndExtract(String repoUrl, String branch) throws IOException {
    String branchRepoUrl = String.format("%s/archive/refs/heads/%s.zip", repoUrl, branch);

    // Create unique directory name based on timestamp and random number
    String uniqueDirName = "extracted_repo_" + Instant.now().getEpochSecond() + "_" + (new Random().nextInt(900) + 100);
    Path extractDir = Paths.get(BASE_DIR, uniqueDirName);

    // Ensure the directory exists
    Files.createDirectories(extractDir);

    // Download and save zip file
    Path zipFilePath = downloadZip(branchRepoUrl, extractDir);

    // Extract the downloaded zip file
    unzip(zipFilePath.toFile(), extractDir.toFile());

    return extractDir.toString();
}

private Path downloadZip(String repoUrl, Path downloadDir) throws IOException {
    URL url = new URL(repoUrl);
    Path zipFilePath = downloadDir.resolve("repo.zip");

    try (InputStream in = url.openStream()) {
        Files.copy(in, zipFilePath, StandardCopyOption.REPLACE_EXISTING);
    }
    return zipFilePath;
}

private void unzip(File zipFile, File extractTo) throws IOException {
    try (ZipFile zip = new ZipFile(zipFile)) {
        Enumeration<? extends ZipEntry> entries = zip.entries();
        while (entries.hasMoreElements()) {
            ZipEntry entry = entries.nextElement();
            File entryDestination = new File(extractTo, entry.getName());

            if (entry.isDirectory()) {
                entryDestination.mkdirs();
            } else {
                entryDestination.getParentFile().mkdirs();
                try (InputStream in = zip.getInputStream(entry);
                     OutputStream out = new FileOutputStream(entryDestination)) {
                    in.transferTo(out);
                }
            }
        }
    } finally {
        Files.deleteIfExists(zipFile.toPath()); // Optional: Clean up the zip file after extraction
    }
}

}

============ Utils

import org.springframework.stereotype.Component;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileWriter;
import java.io.InputStreamReader;

@Component
public class ShellCommandUtil {

private static final String SCRIPT_DIRECTORY = "/root/opt/selenium/";

public void createShellFile(String gitRepo, String branch, String frameworkType) {
    try {
        File scriptDirectory = new File(SCRIPT_DIRECTORY);
        if (!scriptDirectory.exists()) {
            throw new RuntimeException("Directory does not exist: " + SCRIPT_DIRECTORY);
        }

        File file = new File(scriptDirectory, "script.sh");

        try (var writer = new FileWriter(file)) {
            writer.write("echo " + gitRepo + "\n");
            writer.write("echo " + branch + "\n");
            writer.write("echo " + frameworkType + "\n");
        }

        file.setExecutable(true);
    } catch (Exception e) {
        throw new RuntimeException("Error creating shell file", e);
    }
}

public String runShellScript() {
    StringBuilder output = new StringBuilder();
    try {
        ProcessBuilder processBuilder = new ProcessBuilder("/bin/bash", "script.sh");
        processBuilder.directory(new File(SCRIPT_DIRECTORY));
        processBuilder.redirectErrorStream(true);

        Process process = processBuilder.start();

        try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
            String line;
            while ((line = reader.readLine()) != null) {
                output.append(line).append("\n");
            }
        }

        int exitCode = process.waitFor();
        if (exitCode != 0) {
            throw new RuntimeException("Script execution failed with exit code: " + exitCode);
        }

        return "Script executed successfully:\n" + output.toString();
    } catch (Exception e) {
        throw new RuntimeException("Error executing shell script", e);
    }
}

public String runShellCommandDirectly(String gitRepo, String branch, String frameworkType) {
    StringBuilder output = new StringBuilder();
    try {
        String command = String.format("git clone -b %s %s && cd %s && ./gradlew clean test", branch, gitRepo, frameworkType);

        ProcessBuilder processBuilder = new ProcessBuilder("/bin/bash", "-c", command);
        processBuilder.redirectErrorStream(true);

        Process process = processBuilder.start();

        try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
            String line;
            while ((line = reader.readLine()) != null) {
                output.append(line).append("\n");
            }
        }

        int exitCode = process.waitFor();
        if (exitCode != 0) {
            throw new RuntimeException("Shell command execution failed with exit code: " + exitCode);
        }

        return "Shell command executed successfully:\n" + output.toString();
    } catch (Exception e) {
        throw new RuntimeException("Error executing shell command", e);
    }
}

}

===== WebClientConfig

import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.client.reactive.ReactorClientHttpConnector;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.netty.http.client.HttpClient;
import reactor.netty.tcp.TcpClient;
import java.time.Duration;

@Configuration
public class WebClientConfig {

private static final int TIMEOUT_MILLIS = 5000;

@Bean
public WebClient webClientWithTimeout() {
    TcpClient tcpClient = TcpClient.create()
            .responseTimeout(Duration.ofMillis(TIMEOUT_MILLIS))
            .doOnConnected(conn -> conn
                    .addHandlerLast(new ReadTimeoutHandler(TIMEOUT_MILLIS, TimeUnit.MILLISECONDS))
                    .addHandlerLast(new WriteTimeoutHandler(TIMEOUT_MILLIS, TimeUnit.MILLISECONDS)));

    HttpClient httpClient = HttpClient.from(tcpClient);

    return WebClient.builder()
            .clientConnector(new ReactorClientHttpConnector(httpClient))
            .build();
}

}

========= git service

import org.springframework.beans.factory.annotation.Autowired;import org.springframework.stereotype.Service;import org.springframework.web.reactive.function.client.WebClient;import reactor.core.publisher.Mono;@Servicepublic class GitService { private final ShellCommandUtil shellCommandUtil; private final WebClient webClient; @Autowired public GitService(ShellCommandUtil shellCommandUtil, WebClient webClient) { this.shellCommandUtil = shellCommandUtil; this.webClient = webClient; } public Mono<String> createAndRunShellFileReactive(String gitRepo, String branch, String frameworkType) { return Mono.fromCallable(() -> { shellCommandUtil.createShellFile(gitRepo, branch, frameworkType); return shellCommandUtil.runShellScript(); }); } public Mono<String> runShellCommandDirectlyReactive(String gitRepo, String branch, String frameworkType) { return Mono.fromCallable(() -> shellCommandUtil.runShellCommandDirectly(gitRepo, branch, frameworkType)); } // Example for potential external HTTP request using WebClient public Mono<String> externalServiceCall() { return webClient.get() .uri(“http://some-external-service”) .retrieve() .bodyToMono(String.class) .timeout(Duration.ofSeconds(3)); }}

===== git controller

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Mono;

@RestController
@RequestMapping(“/api/get”)
public class GitController {

private final GitService gitService;

@Autowired
public GitController(GitService gitService) {
    this.gitService = gitService;
}

// Endpoint to create and run shell script (reactive)
@PostMapping("/execute-workflow/reactive")
public Mono<ResponseEntity<String>> executeWorkflowReactive(@RequestBody GitRequest gitRequest) {
    return gitService.createAndRunShellFileReactive(gitRequest.getGitRepo(), gitRequest.getBranch(), gitRequest.getFrameworkType())
            .map(ResponseEntity::ok);
}

// Endpoint to run shell command directly (reactive)
@PostMapping("/execute-command/reactive")
public Mono<ResponseEntity<String>> executeCommandReactive(@RequestBody GitRequest gitRequest) {
    return gitService.runShellCommandDirectlyReactive(gitRequest.getGitRepo(), gitRequest.getBranch(), gitRequest.getFrameworkType())
            .map(ResponseEntity::ok);
}

}

========util config

import org.springframework.context.annotation.Bean;import org.springframework.context.annotation.Configuration;import org.springframework.context.annotation.Primary;@Configurationpublic class ShellCommandConfig { @Bean @Primary public ShellCommandUtil primaryShellCommandUtil() { return new ShellCommandUtil(); // Default/primary bean for shell commands } @Bean public ShellCommandUtil secondaryShellCommandUtil() { return new ShellCommandUtil(); // Another bean if needed with @Qualifier }}

==================

package com.example.demo.service;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;

import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;

@Service
public class GitService {

private static final Logger logger = LoggerFactory.getLogger(GitService.class);

/**
 * Executes the workflow by invoking the shell script for Git clone/pull and Gradle tests.
 * 
 * @param gitUrl      The Git repository URL.
 * @param branch      The Git branch.
 * @param targetDir   The target directory to clone/pull the repository.
 * @return The output of the shell script execution.
 */
public String executeWorkflow(String gitUrl, String branch, String targetDir) {
    try {
        // Path to the shell script
        String scriptPath = "./git-workflow.sh";

        // Build the command to run the shell script
        List<String> command = new ArrayList<>();
        command.add("bash");  // Specify to use bash shell
        command.add(scriptPath);  // Path to the shell script
        command.add(gitUrl);  // Git URL
        command.add(branch);  // Git branch
        command.add(targetDir);  // Target directory

        ProcessBuilder processBuilder = new ProcessBuilder(command);
        processBuilder.directory(new File("."));  // Set working directory (current directory)
        processBuilder.redirectErrorStream(true);  // Merge error and output streams

        // Start the process
        Process process = processBuilder.start();

        // Capture the output
        StringBuilder output = new StringBuilder();
        try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
            String line;
            while ((line = reader.readLine()) != null) {
                output.append(line).append("\n");
                logger.info(line);
            }
        }

        // Wait for the process to complete
        int exitCode = process.waitFor();
        logger.info("Shell script exited with code: " + exitCode);

        if (exitCode == 0) {
            return "Workflow completed successfully.\n" + output.toString();
        } else {
            throw new RuntimeException("Shell script failed with exit code: " + exitCode);
        }
    } catch (Exception e) {
        throw new RuntimeException("Failed to execute workflow: " + e.getMessage(), e);
    }
}

}

webclient config

import org.springframework.context.annotation.Bean;import org.springframework.context.annotation.Configuration;import org.springframework.http.client.reactive.ReactorClientHttpConnector;import org.springframework.web.reactive.function.client.WebClient;import reactor.netty.http.client.HttpClient;import reactor.netty.tcp.TcpClient;import java.time.Duration;@Configurationpublic class WebClientConfig { private static final int TIMEOUT_MILLIS = 5000; @Bean public WebClient webClientWithTimeout() { TcpClient tcpClient = TcpClient.create() .responseTimeout(Duration.ofMillis(TIMEOUT_MILLIS)) .doOnConnected(conn -> conn .addHandlerLast(new ReadTimeoutHandler(TIMEOUT_MILLIS, TimeUnit.MILLISECONDS)) .addHandlerLast(new WriteTimeoutHandler(TIMEOUT_MILLIS, TimeUnit.MILLISECONDS))); HttpClient httpClient = HttpClient.from(tcpClient); return WebClient.builder() .clientConnector(new ReactorClientHttpConnector(httpClient)) .build(); }}

gitservice

import org.springframework.beans.factory.annotation.Autowired;import org.springframework.stereotype.Service;import org.springframework.web.reactive.function.client.WebClient;import reactor.core.publisher.Mono;@Servicepublic class GitService { private final ShellCommandUtil shellCommandUtil; private final WebClient webClient; @Autowired public GitService(ShellCommandUtil shellCommandUtil, WebClient webClient) { this.shellCommandUtil = shellCommandUtil; this.webClient = webClient; } public Mono<String> createAndRunShellFileReactive(String gitRepo, String branch, String frameworkType) { return Mono.fromCallable(() -> { shellCommandUtil.createShellFile(gitRepo, branch, frameworkType); return shellCommandUtil.runShellScript(); }); } public Mono<String> runShellCommandDirectlyReactive(String gitRepo, String branch, String frameworkType) { return Mono.fromCallable(() -> shellCommandUtil.runShellCommandDirectly(gitRepo, branch, frameworkType)); } // Example for potential external HTTP request using WebClient public Mono<String> externalServiceCall() { return webClient.get() .uri(“http://some-external-service”) .retrieve() .bodyToMono(String.class) .timeout(Duration.ofSeconds(3)); }}

gitcontroller

import org.springframework.beans.factory.annotation.Autowired;import org.springframework.http.ResponseEntity;import org.springframework.web.bind.annotation.*;import reactor.core.publisher.Mono;@RestController@RequestMapping(“/api/get”)public class GitController { private final GitService gitService; @Autowired public GitController(GitService gitService) { this.gitService = gitService; } // Endpoint to create and run shell script (reactive) @PostMapping(“/execute-workflow/reactive”) public Mono<ResponseEntity<String>> executeWorkflowReactive(@RequestBody GitRequest gitRequest) { return gitService.createAndRunShellFileReactive(gitRequest.getGitRepo(), gitRequest.getBranch(), gitRequest.getFrameworkType()) .map(ResponseEntity::ok); } // Endpoint to run shell command directly (reactive) @PostMapping(“/execute-command/reactive”) public Mono<ResponseEntity<String>> executeCommandReactive(@RequestBody GitRequest gitRequest) { return gitService.runShellCommandDirectlyReactive(gitRequest.getGitRepo(), gitRequest.getBranch(), gitRequest.getFrameworkType()) .map(ResponseEntity::ok); }}

util config

import org.springframework.context.annotation.Bean;import org.springframework.context.annotation.Configuration;import org.springframework.context.annotation.Primary;@Configurationpublic class ShellCommandConfig { @Bean @Primary public ShellCommandUtil primaryShellCommandUtil() { return new ShellCommandUtil(); // Default/primary bean for shell commands } @Bean public ShellCommandUtil secondaryShellCommandUtil() { return new ShellCommandUtil(); // Another bean if needed with @Qualifier }}

!/bin/bash

Input arguments

GIT_URL=$1
BRANCH=$2
TARGET_DIR=$3

Clone or pull the repository

if [ -d “$TARGET_DIR” ]; then
echo “Directory $TARGET_DIR already exists. Pulling latest changes from $BRANCH branch…”
cd “$TARGET_DIR”
git pull origin “$BRANCH”
else
echo “Cloning $GIT_URL into $TARGET_DIR…”
git clone -b “$BRANCH” “$GIT_URL” “$TARGET_DIR”
cd “$TARGET_DIR”
fi

Run Gradle tests

echo “Running ./gradlew clean test –info…”
./gradlew clean test –info

import java.io.BufferedReader;import java.io.File;import java.io.InputStreamReader;import java.util.ArrayList;import java.util.List;public class GitService { /** * Executes the shell script using Git Bash on Windows. * * @param scriptPath The full path to the shell script. * @return The output of the shell script execution. */ public String executeShellScript(String scriptPath) { try { // Specify the path to Git Bash or another Bash shell installed on Windows String bashPath = “C:\\Program Files\\Git\\bin\\bash.exe”; // Path to Git Bash // Build the command to execute the shell script List<String> command = new ArrayList<>(); command.add(bashPath); // Git Bash executable command.add(scriptPath); // Full path to the shell script ProcessBuilder processBuilder = new ProcessBuilder(command); processBuilder.directory(new File(“.”)); // Set working directory (current directory) processBuilder.redirectErrorStream(true); // Merge error and output streams // Start the process Process process = processBuilder.start(); // Capture the output StringBuilder output = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) { String line; while ((line = reader.readLine()) != null) { output.append(line).append(“\n”); System.out.println(line); // Print the output to console } } // Wait for the process to complete int exitCode = process.waitFor(); System.out.println(“Shell script exited with code: ” + exitCode); if (exitCode == 0) { return “Script executed successfully.\n” + output.toString(); } else { throw new RuntimeException(“Shell script failed with exit code: ” + exitCode); } } catch (Exception e) { throw new RuntimeException(“Failed to execute shell script: ” + e.getMessage(), e); } } public static void main(String[] args) { GitService gitService = new GitService(); // Full path to the shell script String scriptPath = “C:\\path\\to\\your\\script\\abc.sh”; // Execute the shell script and print the output String result = gitService.executeShellScript(scriptPath); System.out.println(result); }}

asycn below

package com.example.demo;import org.springframework.boot.SpringApplication;import org.springframework.boot.autoconfigure.SpringBootApplication;import org.springframework.scheduling.annotation.EnableAsync;@SpringBootApplication@EnableAsync // Enable async processingpublic class DemoApplication { public static void main(String[] args) { SpringApplication.run(DemoApplication.class, args); }}

package com.example.demo.service;

import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;

@Service
public class GitService {

@Async  // Make this method asynchronous
public CompletableFuture<String> executeWorkflow(String repo, String branch, String frameworkType) {
    try {
        // Define the command for Git clone with the provided repo and branch
        List<String> command = new ArrayList<>();
        command.add("C:\\Program Files\\Git\\bin\\bash.exe");  // Path to Git Bash
        command.add("-c");
        command.add("git clone -b " + branch + " " + repo);

        // Start the Git clone process
        ProcessBuilder processBuilder = new ProcessBuilder(command);
        processBuilder.directory(new File("."));  // Set working directory (current directory)
        processBuilder.redirectErrorStream(true);  // Merge error and output streams

        Process process = processBuilder.start();
        StringBuilder output = new StringBuilder();

        try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
            String line;
            while ((line = reader.readLine()) != null) {
                output.append(line).append("\n");
            }
        }

        // Wait for the process to complete
        int exitCode = process.waitFor();

        if (exitCode != 0) {
            throw new RuntimeException("Git clone failed with exit code: " + exitCode);
        }

        // Execute the framework-specific command (e.g., Gradle build)
        if ("gradle".equalsIgnoreCase(frameworkType)) {
            return CompletableFuture.completedFuture(executeFrameworkCommand("./gradlew clean test --info", output));
        } else {
            // Other framework commands can be added here as needed
            return CompletableFuture.completedFuture("Unsupported framework type: " + frameworkType);
        }

    } catch (Exception e) {
        throw new RuntimeException("Failed to execute workflow: " + e.getMessage(), e);
    }
}

private String executeFrameworkCommand(String command, StringBuilder output) throws Exception {
    // Execute the framework-specific command (e.g., Gradle clean test)
    ProcessBuilder frameworkBuilder = new ProcessBuilder("C:\\Program Files\\Git\\bin\\bash.exe", "-c", command);
    frameworkBuilder.directory(new File("your_project_subdirectory"));  // Change directory to the cloned repo
    frameworkBuilder.redirectErrorStream(true);

    Process frameworkProcess = frameworkBuilder.start();

    try (BufferedReader reader = new BufferedReader(new InputStreamReader(frameworkProcess.getInputStream()))) {
        String line;
        while ((line = reader.readLine()) != null) {
            output.append(line).append("\n");
        }
    }

    int exitCode = frameworkProcess.waitFor();
    if (exitCode == 0) {
        return "Workflow executed successfully.\n" + output.toString();
    } else {
        throw new RuntimeException("Framework command failed with exit code: " + exitCode);
    }
}

}

package com.example.demo.controller;import com.example.demo.model.WorkflowRequest;import com.example.demo.service.GitService;import org.springframework.beans.factory.annotation.Autowired;import org.springframework.http.ResponseEntity;import org.springframework.web.bind.annotation.*;import java.util.concurrent.CompletableFuture;@RestController@RequestMapping(“/api”)public class WorkflowController { @Autowired private GitService gitService; @PostMapping(“/execute-workflow”) public CompletableFuture<ResponseEntity<String>> executeWorkflow(@RequestBody WorkflowRequest request) { return gitService.executeWorkflow(request.getRepo(), request.getBranch(), request.getFrameworkType()) .thenApply(output -> ResponseEntity.ok(output)) .exceptionally(ex -> ResponseEntity.status(500).body(“Error: ” + ex.getMessage())); }}

curl -X POST http://localhost:8080/api/execute-workflow \ -H “Content-Type: application/json” \ -d ‘{ “repo”: “https://github.com/crewAIInc/crewAI-examples.git”, “branch”: “master”, “frameworkType”: “gradle” }’

Gitservice for Linux machine

package com.example.demo.service;import org.springframework.stereotype.Service;import reactor.core.publisher.Mono;import java.io.BufferedReader;import java.io.File;import java.io.InputStreamReader;import java.util.ArrayList;import java.util.List;@Servicepublic class GitService { public Mono<String> executeWorkflow(String repo, String branch, String frameworkType) { return Mono.fromCallable(() -> { String tarballUrl = repo + “/archive/” + branch + “.tar.gz”; String repoDirName = extractRepoNameFromUrl(repo) + “-” + branch; // The extracted directory usually has the format {repoName}-{branch} // Step 1: Download the repository tarball using wget List<String> wgetCommand = new ArrayList<>(); wgetCommand.add(“bash”); wgetCommand.add(“-c”); wgetCommand.add(“wget ” + tarballUrl); ProcessBuilder wgetProcessBuilder = new ProcessBuilder(wgetCommand); wgetProcessBuilder.directory(new File(“.”)); // Set working directory (current directory) wgetProcessBuilder.redirectErrorStream(true); Process wgetProcess = wgetProcessBuilder.start(); StringBuilder output = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(wgetProcess.getInputStream()))) { String line; while ((line = reader.readLine()) != null) { output.append(line).append(“\n”); } } int wgetExitCode = wgetProcess.waitFor(); if (wgetExitCode != 0) { throw new RuntimeException(“wget failed with exit code: ” + wgetExitCode); } // Step 2: Extract the downloaded tarball List<String> tarCommand = new ArrayList<>(); tarCommand.add(“bash”); tarCommand.add(“-c”); tarCommand.add(“tar -xvf ” + branch + “.tar.gz”); ProcessBuilder tarProcessBuilder = new ProcessBuilder(tarCommand); tarProcessBuilder.directory(new File(“.”)); // Set working directory (current directory) tarProcessBuilder.redirectErrorStream(true); Process tarProcess = tarProcessBuilder.start(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(tarProcess.getInputStream()))) { String line; while ((line = reader.readLine()) != null) { output.append(line).append(“\n”); } } int tarExitCode = tarProcess.waitFor(); if (tarExitCode != 0) { throw new RuntimeException(“Tar extraction failed with exit code: ” + tarExitCode); } // Step 3: Execute framework-specific command (e.g., Gradle build) if (“gradle”.equalsIgnoreCase(frameworkType)) { return executeFrameworkCommand(“./gradlew clean test –info”, output, repoDirName); } else { return “Unsupported framework type: ” + frameworkType; } }); } private String executeFrameworkCommand(String command, StringBuilder output, String repoDirName) throws Exception { // Execute the framework-specific command (e.g., Gradle clean test) ProcessBuilder frameworkBuilder = new ProcessBuilder(“bash”, “-c”, command); frameworkBuilder.directory(new File(repoDirName)); // Change directory to the extracted repo frameworkBuilder.redirectErrorStream(true); Process frameworkProcess = frameworkBuilder.start(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(frameworkProcess.getInputStream()))) { String line; while ((line = reader.readLine()) != null) { output.append(line).append(“\n”); } } int exitCode = frameworkProcess.waitFor(); if (exitCode == 0) { return “Workflow executed successfully.\n” + output.toString(); } else { throw new RuntimeException(“Framework command failed with exit code: ” + exitCode); } } private String extractRepoNameFromUrl(String repoUrl) { // Extract the repository name from the URL return repoUrl.substring(repoUrl.lastIndexOf(“/”) + 1).replace(“.git”, “”); }}

Ragas and RAG Testing

wrap llm

import os

from azure.openai import AzureOpenAI

# Set environment variables for Azure OpenAIendpoint = os.environ[“AZURE_OPENAI_ENDPOINT”]

api_key = os.environ[“AZURE_OPENAI_API_KEY”]

# Instantiate the Azure OpenAI client with a timeout

client = AzureOpenAI( azure_endpoint=endpoint, api_key=api_key, api_version=”2023-09-01-preview”, timeout=30 # Set timeout in seconds)

# Create a simple LLM wrapper with an invoke method

class AzureLLM:

def __init__(self, client, model=”gpt-3.5-turbo”): self.client = client

self.model = model

def invoke(self, question, max_tokens=100, temperature=0.5):

try:

# Send the prompt to the Azure OpenAI client response = self.client.completions.create( model=self.model, messages=[{“role”: “user”, “content”: question}], max_tokens=max_tokens, temperature=temperature )

# Extract and return the answer

answer = response.choices[0].message[“content”].strip()

return answer

except Exception as e: print(“Error generating answer:”, e) return None

# Instantiate the LLM wrapperllm = AzureLLM(client)

# Use the invoke method to get an answer

question = “What is the capital of France?”

answer = llm.invoke(question)

print(“Answer:”, answer)

Slide 1: Title SlideTitle: “Unified RAG & LLM Evaluation Framework for GenAI Development in Banking”Subtitle: Highlighting Value, Benefits, and Strategic ImportanceSlide

2: Introduction to GenAI in BankingOverview of GenAI use cases in banking: customer service, fraud detection, personalized services.Importance of RAG (Retrieval-Augmented Generation) to enhance accuracy and relevance.

[11/8, 7:41 AM] sc:

Slide 3: Challenges in GenAI AdoptionEnsuring model accuracy and consistencyMaintaining compliance and data privacyBalancing innovation with risk management[11/8, 7:41 AM] sc:

Slide 4: Why an Evaluation Framework?Need for a structured framework to assess model quality and reliabilityImportance of monitoring and continuous improvement for GenAI models in banking[11/8, 7:41 AM] sc:

Slide 5: RAG and LLM Evaluation Framework OverviewBrief overview of key components (RAG for data relevance, LLM evaluation for quality)How these frameworks support model integrity, accountability, and transparency[11/8, 7:41 AM] sc: transparency

Slide 6: Framework Components and MethodologyData relevance and retrieval metrics (precision, recall, semantic relevance)Model performance indicators (accuracy, latency, response quality)Compliance and risk assessment criteria[11/8, 7:42 AM] sc:

Slide 7: Benefits of the Framework for the BankEnhanced accuracy and trust in GenAI modelsFaster issue identification and resolution in model operationsStreamlined compliance with industry regulations[11/8, 7:42 AM] sc:

Slide 8: Value Proposition for the BankIncreased confidence in model outputsCost savings through proactive risk managementStrengthened competitive advantage with cutting-edge GenAI practices[11/8, 7:42 AM] sc:

Slide 9: Case Study (Optional)Example of a hypothetical or anonymized bank applying this framework successfully

Slide 10: Implementation RoadmapSteps for integrating the framework into the bank’s GenAI workflowPhased approach with milestones and success metrics[11/8, 7:42 AM] sc:

Slide 11: Summary & Next StepsKey takeawaysImmediate action items for management approval

custom wrap

import openai
from ragas.llms import LLM
from ragas.metrics import LLMContextRecall, FactualCorrectness, Faithfulness
from ragas import evaluate

class AzureOpenAIWrapper(LLM):
def init(self, deployment_name, azure_endpoint, api_key):
self.deployment_name = deployment_name
self.azure_endpoint = azure_endpoint
self.api_key = api_key
openai.api_type = “azure”
openai.api_base = azure_endpoint
openai.api_version = “20-05-15”
openai.api_key = api_key

def generate_response(self, prompt):
    try:
        response = openai.ChatCompletion.create(
            engine=self.deployment_name,
            messages=[{"role": "user", "content": prompt}]
        )
        return response.choices[0].message['content']
    except Exception as e:
        print(f"Error during Azure API call: {e}")
        return None

Set up your Azure OpenAI configuration

azure_llm = AzureOpenAIWrapper(
deployment_name=”gpt-3.5-turbo”,
azure_endpoint=”https://.openai.azure.com”,
api_key=””
)

Prepare Ragas evaluation with Azure LLM

metrics = [LLMContextRecall(), FactualCorrectness(), Faithfulness()]
results = evaluate(dataset=eval_dataset, metrics=metrics, llm=azure_llm)

Print evaluation results

print(results.to_pandas())

=====/

Ragas – collab – https://colab.research.google.com/drive/1zXfffyjY2ryTBV7makO6ztYrnB2ntT88?usp=sharing

import json
from datasets import Dataset
import os
from ragas import evaluate
from ragas.metrics import faithfulness, answer_correctness
from langchain_ollama import OllamaLLM
from requests.exceptions import ConnectionError
from langchain_community.llms import Ollama

# Set environment variables (if needed)
os.environ["OPENAI_API_KEY"] = "NA"  # Replace "NA" with your API key if necessary

# Path to your JSON file
json_file_path = "eval_input.json"  # Update this with the path to your actual JSON file

# Function to load JSON data
def load_json_data(file_path):
    try:
        with open(file_path, "r") as f:
            data = json.load(f)
        return data
    except FileNotFoundError:
        print(f"Error: The file {file_path} was not found.")
        return None
    except json.JSONDecodeError:
        print(f"Error: Failed to decode the JSON file {file_path}.")
        return None

# Load the data from the JSON file
data_samples = load_json_data(json_file_path)

if data_samples:
    try:
        # Initialize the LLM (ensure your local Ollama instance is running)
        evaluator_llm = Ollama(model = "llama3")
#         evaluator_llm = OllamaLLM(
#             model="llama3.2",
#             base_url="http://localhost:11434"  # Ensure Ollama is running at this URL
#         )

        print('--------------------------------------')

        llm_test = evaluator_llm.invoke("can you tell 4 genai frameworks?")
        print(llm_test)
        print('--------------------------------------')

        # Convert the data into a Hugging Face `Dataset` format
        dataset = Dataset.from_dict({
            'question': data_samples['question'],
            'answer': data_samples['answer'],
            'contexts': data_samples['contexts'],
            'ground_truth': data_samples['ground_truth']
        })

        # Perform evaluation using RAGAS metrics
        score = evaluate(dataset, metrics=[faithfulness, answer_correctness])

        # Convert the score to a pandas DataFrame and print it
        score_df = score.to_pandas()
        print(score_df)

    except ConnectionError as e:
        print(f"Connection error: {e}")
        print("Ensure that the LLM server is running and accessible at http://localhost:11434.")
    except Exception as e:
        print(f"An error occurred: {e}")
else:
    print("No data to evaluate due to an issue with the JSON file.")
{
  "question": [
    "When was the first super bowl?",
    "Who won the most super bowls?"
  ],
  "answer": [
    "The first superbowl was held on Jan 15, 1967",
    "The most super bowls have been won by The New England Patriots"
  ],
  "contexts": [
    [
      "The First AFL–NFL World Championship Game was an American football game played on January 15, 1967, at the Los Angeles Memorial Coliseum in Los Angeles."
    ],
    [
      "The Green Bay Packers...Green Bay, Wisconsin.",
      "The Packers compete...Football Conference."
    ]
  ],
  "ground_truth": [
    "The first superbowl was held on January 15, 1967",
    "The New England Patriots have won the Super Bowl a record six times"
  ]
}

{
  "question": [
    "What are the global implications of the USA Supreme Court ruling on abortion?",
    "Which companies are the main contributors to GHG emissions and their role in global warming according to the Carbon Majors database?",
    "Which private companies in the Americas are the largest GHG emitters according to the Carbon Majors database?",
    "What action did Amnesty International urge its supporters to take in response to the killing of the Ogoni 9?",
    "What are the recommendations made by Amnesty International to the Special Rapporteur on Human Rights Defenders?",
    "Who are the target audience of the two books created by Amnesty International on child rights?",
    "Which right guarantees access to comprehensive information about past human rights violations, including the identities of the perpetrators and the fate of the victims, as well as the circumstances surrounding the violations?",
    "Who has the right to be fully informed about human rights violations, including the identities of the perpetrators and the fate of the victims?",
    "When can individuals be found guilty under Article 207.3 of the Russian Criminal Code?",
    "When does the prosecution consider statements contrary to the official position as 'false' under Article 207.3?",
    "What factors have contributed to the decline of independent civil society organizations in Nicaragua?",
    "What conditions designate wetlands as Ramsar sites?",
    "Where was COP15 held in 2022?",
    "What is the purpose of the agreement known as 30x30?",
    "Who failed to explicitly recognize Indigenous Peoples' lands and territories as a distinct category of protected area at COP15?",
    "What are the consequences of criminalizing abortion for marginalized individuals?",
    "What responsibilities should social media companies have in relation to ensuring access to accurate sexual and reproductive health information?",
    "What role do social media companies play in protecting users' rights online, regardless of their language and political views?",
    "What labor abuses were documented by Amnesty International in Qatar and how do they relate to the kafala system?",
    "When did the government of Qatar start repealing restrictions on migrant workers?"
  ],
  "answer": [
    "The global implications of the USA Supreme Court ruling on abortion can be significant, as it sets a precedent for other countries and influences the global discourse on reproductive rights. Here are some potential implications:\n\n1. Influence on other countries: The Supreme Court's ruling can serve as a reference point for other countries grappling with their own abortion laws. It can provide legal arguments and reasoning that advocates for reproductive rights can use to challenge restrictive abortion laws in their respective jurisdictions.\n\n2. Strengthening of global reproductive rights movements: A favorable ruling by the Supreme Court can energize and empower reproductive rights movements worldwide. It can serve as a rallying point for activists and organizations advocating for women's rights, leading to increased mobilization and advocacy efforts globally.\n\n3. Counteracting anti-abortion movements: Conversely, a ruling that restricts abortion rights can embolden anti-abortion movements globally. It can provide legitimacy to their arguments and encourage similar restrictive measures in other countries, potentially leading to a rollback of existing reproductive rights.\n\n4. Impact on international aid and policies: The Supreme Court's ruling can influence international aid and policies related to reproductive health. It can shape the priorities and funding decisions of donor countries and organizations, potentially leading to increased support for reproductive rights initiatives or conversely, restrictions on funding for abortion-related services.\n\n5. Shaping international human rights standards: The ruling can contribute to the development of international human rights standards regarding reproductive rights. It can influence the interpretation and application of existing human rights treaties and conventions, potentially strengthening the recognition of reproductive rights as fundamental human rights globally.\n\n6. Global health implications: The Supreme Court's ruling can have implications for global health outcomes, particularly in countries with restrictive abortion laws. It can impact the availability and accessibility of safe and legal abortion services, potentially leading to an increase in unsafe abortions and related health complications.\n\nIt is important to note that the specific implications will depend on the nature of the Supreme Court ruling and the subsequent actions taken by governments, activists, and organizations both within and outside the United States.",
    "According to the Carbon Majors database, the main contributors to greenhouse gas (GHG) emissions and their role in global warming are primarily fossil fuel companies. These companies extract, produce, and sell fossil fuels, which when burned release carbon dioxide (CO2) and other GHGs into the atmosphere. The top contributors include:\n\n1. Saudi Arabian Oil Company (Saudi Aramco): As the world's largest oil producer, Saudi Aramco is responsible for a significant portion of global GHG emissions. It is estimated to have contributed around 59.26 gigatons of CO2 equivalent (GtCO2e) between 1965 and 2017.\n\n2. Chevron Corporation: Chevron is one of the largest oil and gas companies globally and has contributed approximately 43.35 GtCO2e between 1965 and 2017.\n\n3. ExxonMobil Corporation: ExxonMobil is another major oil and gas company and has contributed around 41.90 GtCO2e between 1965 and 2017.\n\n4. BP plc: BP is a multinational oil and gas company and has contributed approximately 34.02 GtCO2e between 1965 and 2017.\n\n5. Royal Dutch Shell plc: Shell is one of the largest oil and gas companies globally and has contributed around 31.95 GtCO2e between 1965 and 2017.\n\n6. TotalEnergies SE: TotalEnergies is a French multinational oil and gas company and has contributed approximately 31.91 GtCO2e between 1965 and 2017.\n\n7. ConocoPhillips Company: ConocoPhillips is an American multinational energy corporation and has contributed around 20.67 GtCO2e between 1965 and 2017.\n\n8. PetroChina Company Limited: PetroChina is the largest oil and gas producer in China and has contributed approximately 17.26 GtCO2e between 1965 and 2017.\n\n9. Peabody Energy Corporation: Peabody Energy is the largest private-sector coal company globally and has contributed around 15.39 GtCO2e between 1965 and 2017.\n\n10. Glencore plc: Glencore is a multinational commodity trading and mining company and has contributed approximately 15.11 GtCO2e between 1965 and 2017.\n\nThese companies, along with others in the fossil",
    "According to the Carbon Majors database, the largest private companies in the Americas that are the largest GHG emitters are:\n\n1. Chevron Corporation (United States)\n2. ExxonMobil Corporation (United States)\n3. ConocoPhillips Company (United States)\n4. BP plc (United Kingdom, but with significant operations in the Americas)\n5. Royal Dutch Shell plc (Netherlands, but with significant operations in the Americas)\n6. Peabody Energy Corporation (United States)\n7. Duke Energy Corporation (United States)\n8. TotalEnergies SE (France, but with significant operations in the Americas)\n9. BHP Group Limited (Australia, but with significant operations in the Americas)\n10. Rio Tinto Group (United Kingdom/Australia, but with significant operations in the Americas)\n\nPlease note that the rankings may change over time as new data becomes available.",
    "Amnesty International urged its supporters to write letters to the Nigerian government, calling for an independent investigation into the killings of the Ogoni 9 and for those responsible to be brought to justice. They also called for an end to the human rights abuses in the Niger Delta region and for the protection of the rights of the Ogoni people.",
    "Amnesty International made several recommendations to the Special Rapporteur on Human Rights Defenders. These recommendations include:\n\n1. Urging states to fully implement the UN Declaration on Human Rights Defenders and ensure that national laws and policies are in line with international human rights standards.\n\n2. Calling on states to create a safe and enabling environment for human rights defenders, including by adopting legislation that protects defenders from threats, attacks, and reprisals.\n\n3. Encouraging states to establish effective mechanisms for the protection of human rights defenders, such as national human rights institutions and specialized units within law enforcement agencies.\n\n4. Urging states to investigate and hold accountable those responsible for attacks and threats against human rights defenders, ensuring that they are brought to justice in fair trials.\n\n5. Calling on states to repeal or amend laws that are used to criminalize or restrict the work of human rights defenders, including laws on defamation, sedition, and counter-terrorism.\n\n6. Encouraging states to provide adequate resources and support to human rights defenders, including funding for their work, access to legal assistance, and psychosocial support.\n\n7. Urging states to consult and engage with human rights defenders in the development and implementation of laws, policies, and programs that affect their rights and work.\n\n8. Calling on states to ensure that human rights defenders are able to access and share information freely, including through the internet and other digital platforms.\n\n9. Encouraging states to protect the rights of specific groups of human rights defenders who face particular risks and challenges, such as women defenders, indigenous defenders, and defenders working on issues related to sexual orientation and gender identity.\n\n10. Urging states to cooperate with the Special Rapporteur and other UN mechanisms in the promotion and protection of human rights defenders, including by responding to communications and requests for visits.",
    "The target audience of the two books created by Amnesty International on child rights are likely children and young adults.",
    "The right that guarantees access to comprehensive information about past human rights violations, including the identities of the perpetrators and the fate of the victims, as well as the circumstances surrounding the violations, is the right to truth.",
    "Everyone has the right to be fully informed about human rights violations, including the identities of the perpetrators and the fate of the victims. This right is recognized by various international human rights instruments, such as the Universal Declaration of Human Rights and the International Covenant on Civil and Political Rights.\n\nThe victims of human rights violations and their families have a particular right to know the truth about what happened to them. This includes knowing the identities of the perpetrators and the fate of the victims, such as whether they are alive or dead, and if dead, the circumstances of their death.\n\nFurthermore, society as a whole has a right to be informed about human rights violations. This is essential for holding perpetrators accountable, preventing future abuses, and ensuring justice and reparations for the victims. Access to information about human rights violations also allows for public scrutiny and debate, which are crucial for building a culture of human rights and promoting transparency and accountability.\n\nJournalists, human rights defenders, and civil society organizations play a vital role in investigating and reporting on human rights violations. They have the right to access information, conduct independent investigations, and disseminate their findings to the public. Governments have a responsibility to protect and facilitate these activities, ensuring that individuals and organizations can operate freely and without fear of reprisals.\n\nIn summary, the right to be fully informed about human rights violations, including the identities of the perpetrators and the fate of the victims, belongs to everyone. It is a fundamental aspect of promoting human rights, accountability, and justice.",
    "Under Article 207.3 of the Russian Criminal Code, individuals can be found guilty if they participate in an organized group or commit repeated acts of smuggling of cultural heritage items.",
    "Under Article 207.3 of the Russian Criminal Code, statements contrary to the official position are considered 'false' by the prosecution when they are intentionally made with the aim of discrediting the state or its authorities, or spreading false information that could potentially harm the public interest or cause significant damage. The prosecution will consider such statements as false if they are deliberately made to mislead the public or undermine the credibility of the state.",
    "There are several factors that have contributed to the decline of independent civil society organizations in Nicaragua:\n\n1. Repressive government policies: The Nicaraguan government, under President Daniel Ortega, has implemented a series of repressive policies that have targeted civil society organizations. These policies include restrictive laws, such as the Foreign Agents Law, which requires organizations receiving foreign funding to register as \"foreign agents\" and submit detailed financial reports. This has created a climate of fear and uncertainty for many organizations, leading to self-censorship and a decline in their activities.\n\n2. Harassment and intimidation: Civil society organizations in Nicaragua have faced harassment and intimidation from government authorities, including surveillance, arbitrary arrests, and physical attacks. Human rights defenders and activists have been particularly targeted, with many facing threats to their personal safety and the safety of their families. This has made it difficult for organizations to operate freely and has deterred many individuals from getting involved in civil society work.\n\n3. Lack of funding: The repressive policies and climate of fear have also made it difficult for civil society organizations to secure funding. Many international donors have been reluctant to support organizations in Nicaragua due to concerns about the government's crackdown on civil society. This has led to a decline in resources available for organizations to carry out their work, including advocacy, research, and service provision.\n\n4. Legal restrictions: The Nicaraguan government has implemented a series of legal restrictions that have made it difficult for civil society organizations to operate. These include laws that limit freedom of association, freedom of expression, and the right to peaceful assembly. Organizations that are critical of the government or engage in activities deemed \"political\" have faced increased scrutiny and legal challenges, further limiting their ability to operate independently.\n\n5. Polarization and division: The political situation in Nicaragua has become increasingly polarized, with the government cracking down on dissent and opposition voices. This has created a climate of fear and mistrust, making it difficult for civil society organizations to collaborate and coordinate their efforts. The lack of unity and solidarity among civil society actors has weakened their collective impact and made it easier for the government to suppress their activities.\n\nOverall, the decline of independent civil society organizations in Nicaragua can be attributed to a combination of repressive government policies, harassment and intimidation, lack of funding, legal restrictions, and polarization. These factors have created a hostile environment for civil society, leading to a decline in their numbers and activities.",
    "Wetlands are designated as Ramsar sites based on the following conditions:\n\n1. The wetland must be of international importance, either ecologically, botanically, zoologically, or hydrologically.\n\n2. The wetland should support a significant number of rare, vulnerable, or endangered species of plants or animals.\n\n3. The wetland should be a habitat for migratory species.\n\n4. The wetland should have exceptional biodiversity or ecological processes.\n\n5. The wetland should provide important services to the local community, such as water purification, flood control, or groundwater recharge.\n\n6. The wetland should have cultural, scientific, educational, or recreational values.\n\n7. The wetland should be under threat from human activities or face potential degradation.\n\n8. The wetland should have effective management and conservation plans in place or the potential to develop them.\n\nThese conditions are assessed by the Ramsar Convention on Wetlands, an international treaty aimed at conserving and sustainably using wetlands worldwide.",
    "COP15 was held in Kunming, China in 2022.",
    "The purpose of the agreement known as 30x30 is to conserve and protect 30% of the planet's land and oceans by the year 2030. This agreement aims to address the global biodiversity crisis and the impacts of climate change by preserving and restoring ecosystems, promoting sustainable land and ocean management practices, and safeguarding the habitats of numerous plant and animal species. The goal is to ensure the long-term health and resilience of the planet's natural resources and to support the well-being of both human and non-human communities.",
    "At COP15, the United Nations Climate Change Conference held in 2009 in Copenhagen, Denmark, the failure to explicitly recognize Indigenous Peoples' lands and territories as a distinct category of protected area was primarily attributed to the governments of several countries. These countries included Canada, Australia, New Zealand, and the United States. These governments were criticized for their reluctance to acknowledge the rights and land claims of Indigenous Peoples, which hindered the inclusion of Indigenous lands as protected areas in the conference's agreements.",
    "Criminalizing abortion can have severe consequences for marginalized individuals, particularly those who are already facing various forms of discrimination and inequality. Some of the consequences include:\n\n1. Health risks: When abortion is criminalized, individuals may resort to unsafe and illegal methods to terminate their pregnancies, leading to serious health complications and even death. Marginalized individuals, who often have limited access to healthcare and resources, are more likely to be affected by these risks.\n\n2. Economic burden: Marginalized individuals, who are more likely to face financial constraints, may struggle to afford safe and legal abortions in countries where it is criminalized. This can lead to increased financial burdens, as they may have to travel long distances or seek expensive underground services.\n\n3. Limited reproductive choices: Criminalizing abortion restricts individuals' reproductive choices and autonomy. Marginalized individuals, who already face various barriers to accessing healthcare and education, may be further limited in their ability to make informed decisions about their reproductive health.\n\n4. Increased inequality: Criminalizing abortion disproportionately affects marginalized individuals, exacerbating existing inequalities. Women from low-income backgrounds, racial and ethnic minorities, LGBTQ+ individuals, and those living in rural areas often face additional barriers to accessing safe and legal abortion services, further widening the gap between privileged and marginalized groups.\n\n5. Criminalization of pregnancy: In some cases, criminalizing abortion can lead to the criminalization of pregnancy itself. This can result in the surveillance and punishment of pregnant individuals, particularly those from marginalized communities, who may be subjected to invasive monitoring, discrimination, and even imprisonment for actions perceived as harmful to the fetus.\n\n6. Mental health impact: The criminalization of abortion can have significant psychological consequences for marginalized individuals. The stigma and shame associated with seeking illegal abortions or being unable to access safe services can lead to increased stress, anxiety, and depression.\n\n7. Reproductive injustice: Criminalizing abortion perpetuates reproductive injustice by denying marginalized individuals the right to control their own bodies and make decisions about their reproductive health. It reinforces systemic inequalities and denies them the same reproductive rights and freedoms enjoyed by more privileged individuals.\n\nIt is important to note that these consequences are not limited to criminalizing abortion alone but are often intertwined with broader social, economic, and healthcare disparities faced by marginalized communities.",
    "Social media companies have a responsibility to ensure access to accurate sexual and reproductive health information. Here are some key responsibilities they should have:\n\n1. Promote accurate information: Social media platforms should actively promote and prioritize accurate sexual and reproductive health information over misinformation or harmful content. This can be done by using algorithms to identify and reduce the visibility of false or misleading information.\n\n2. Fact-checking and verification: Social media companies should invest in fact-checking mechanisms to verify the accuracy of sexual and reproductive health information shared on their platforms. They should work with experts and organizations specializing in sexual and reproductive health to ensure the information is reliable and evidence-based.\n\n3. Transparency and disclosure: Social media companies should be transparent about their content moderation policies and algorithms. They should disclose how they handle sexual and reproductive health information and what measures they take to ensure accuracy. This transparency will help users understand the reliability of the information they encounter.\n\n4. Collaborate with experts and organizations: Social media companies should collaborate with reputable experts, organizations, and health authorities to develop guidelines and standards for accurate sexual and reproductive health information. They should seek input from these stakeholders to ensure that their policies and practices align with the latest scientific evidence and best practices.\n\n5. User education and awareness: Social media companies should invest in user education and awareness campaigns to help users identify accurate sexual and reproductive health information. This can include providing tips on how to evaluate the credibility of sources, recognizing misinformation, and understanding the importance of evidence-based information.\n\n6. Reporting and addressing misinformation: Social media platforms should have clear and accessible mechanisms for users to report misinformation related to sexual and reproductive health. They should promptly investigate and take appropriate action against accounts or content that spread false or harmful information.\n\n7. Accessibility and inclusivity: Social media companies should ensure that accurate sexual and reproductive health information is accessible to all users, including those with disabilities or from marginalized communities. They should consider the needs of diverse populations and provide information in multiple languages and formats.\n\n8. Continuous improvement: Social media companies should regularly evaluate and improve their policies, algorithms, and practices related to sexual and reproductive health information. They should stay updated with the latest research and feedback from users and experts to adapt their approaches and address emerging challenges.\n\nBy fulfilling these responsibilities, social media companies can play a crucial role in promoting accurate sexual and reproductive health information and empowering users to make informed decisions about their health.",
    "Social media companies play a crucial role in protecting users' rights online, regardless of their language and political views. Here are some key aspects of their role:\n\n1. Content moderation: Social media companies have the responsibility to moderate and remove content that violates their community guidelines, including hate speech, harassment, and misinformation. This helps protect users from harmful and abusive content, ensuring a safer online environment.\n\n2. Privacy and data protection: Companies must safeguard users' personal information and ensure their privacy rights are respected. This involves implementing robust data protection measures, obtaining user consent for data collection and usage, and providing transparent privacy policies.\n\n3. Algorithmic transparency: Social media platforms use algorithms to curate and personalize content for users. Ensuring transparency in these algorithms is crucial to prevent bias, discrimination, and the manipulation of information. Companies should disclose how algorithms work and take steps to address any unintended consequences.\n\n4. Accessibility and inclusivity: Social media companies should strive to make their platforms accessible to users of all languages and abilities. This includes providing language options, captioning for videos, and features that accommodate users with disabilities, ensuring equal access to information and participation.\n\n5. Fighting disinformation: Companies should actively combat the spread of disinformation and fake news on their platforms. This involves implementing fact-checking mechanisms, partnering with credible sources, and promoting media literacy to help users distinguish between reliable and misleading information.\n\n6. Transparency and accountability: Social media companies should be transparent about their policies, practices, and decision-making processes. They should provide clear avenues for users to report issues, appeal content removals, and seek redress. Regular transparency reports can help build trust and hold companies accountable.\n\n7. Collaboration with external stakeholders: Companies should collaborate with governments, civil society organizations, and experts to develop policies and practices that protect users' rights. Engaging in multi-stakeholder dialogues helps ensure diverse perspectives are considered and fosters a collective effort to address online challenges.\n\nOverall, social media companies have a responsibility to create a safe, inclusive, and rights-respecting online environment for all users, regardless of their language and political views.",
    "Amnesty International has documented several labor abuses in Qatar, particularly in relation to the kafala system. The kafala system is a sponsorship-based employment system that ties migrant workers to their employers, giving employers significant control over their workers' lives and rights. Here are some of the labor abuses documented:\n\n1. Exploitative working conditions: Migrant workers in Qatar often face long working hours, with some working up to 12 hours a day, six or seven days a week. They are also subjected to dangerous and hazardous working conditions, especially in the construction sector.\n\n2. Withheld wages: Many workers have reported that their employers withhold their wages for months, sometimes even years. This practice leaves workers in a vulnerable position, unable to leave their jobs or seek alternative employment.\n\n3. Passport confiscation: Employers often confiscate the passports of migrant workers, depriving them of their freedom of movement and making it difficult for them to leave the country or change jobs. This practice is a clear violation of workers' rights and leaves them trapped in exploitative situations.\n\n4. Forced labor and human trafficking: Amnesty International has documented cases of forced labor and human trafficking in Qatar. Some workers are deceived about the nature of their work or are forced to work against their will, often in conditions akin to modern-day slavery.\n\n5. Inadequate living conditions: Migrant workers in Qatar often live in overcrowded and unsanitary accommodation provided by their employers. These living conditions are substandard and do not meet basic human rights standards.\n\nThese labor abuses are directly linked to the kafala system, which gives employers excessive power and control over migrant workers. The system ties workers to their employers, making it difficult for them to assert their rights or seek redress for abuses. The kafala system creates a power imbalance that enables employers to exploit workers, leading to the documented labor abuses in Qatar.",
    "The government of Qatar started repealing restrictions on migrant workers in 2017."
  ],
  "contexts": [
    [
      "- In 2022, the USA Supreme Court handed down a decision ruling that overturned 50 years of jurisprudence recognizing a constitutional right to abortion.\n- This decision has had a massive impact: one in three women and girls of reproductive age now live in states where abortion access is either totally or near-totally inaccessible.\n- The states with the most restrictive abortion laws have the weakest maternal health support, higher maternal death rates, and higher child poverty rates.\n- The USA Supreme Court ruling has also had impacts beyond national borders due to the geopolitical and cultural influence wielded by the USA globally and the aid it funds.\n- SRR organizations and activists across the world have expressed fear about the ruling laying the groundwork for anti-abortion legislative and policy attacks in other countries.\n- Advocates have also observed the ruling's impact on progressive law reform and the stalling of the adoption and enforcement of abortion guidelines in certain African countries.\n- The ruling has created a chilling effect in international policy spaces, emboldening anti-abortion state and non-state actors to undermine human rights protections.",
      "The USA Supreme Court ruling on abortion has sparked intense debates and discussions not only within the country but also around the world. Many countries look to the United States as a leader in legal and social issues, so the decision could potentially influence the policies and attitudes towards abortion in other nations.",
      "The ruling may also impact international organizations and non-governmental groups that work on reproductive rights and women's health issues. Depending on the outcome, there could be shifts in funding, advocacy efforts, and collaborations with American counterparts, leading to ripple effects in the global fight for reproductive justice."
    ],
    [
      "In recent years, there has been increasing pressure on these major contributors to reduce their greenhouse gas emissions and transition to more sustainable practices. Many of these companies have made commitments to reduce their carbon footprint and invest in renewable energy sources. However, the role of these companies in global warming remains a contentious issue, with ongoing debates about their responsibility and accountability for the environmental damage caused by their operations.",
      "The Carbon Majors database identifies a small group of companies responsible for the majority of global greenhouse gas emissions. These companies include major oil and gas producers, as well as coal mining companies, whose activities have significantly contributed to climate change. The data from the Carbon Majors database highlights the disproportionate impact of these companies on the environment and the urgent need for action to address their emissions.",
      "- Fossil fuel companies, whether state or private, play a key role in the emissions that drive global warming.\n- According to the Carbon Majors database, 100 existing fossil fuel companies (and eight that no longer exist) have produced almost a trillion tons of GHG emissions in 150 years.\n- These 100 companies are responsible for 71% of all GHG emissions since 1988.\n- In the Americas, the private companies responsible for the most emissions during this period, according to the database, are from the United States: ExxonMobil, Chevron, and Peabody.\n- The largest emitter amongst state-owned companies in the Americas is Mexican company Pemex, followed by Venezuelan company Petr\u00f3leos de Venezuela, S.A.\n- Despite the fact that people with fewer resources, in particular from countries in the global South, do not make significant contributions to climate change, it is these people who are most affected by it.\n- Approximately half of the global population lives in areas that are \"very vulnerable\" to climate change, and it is people with considerable development constraints who are most exposed.\n- This disproportionately affects the human rights of people with fewer resources and greater risk in the context of climate change in the global South.\n- Between 2010 and 2020, human mortality due to climate disasters was 15 times higher in vulnerable regions and populations."
    ],
    [
      "The issue of greenhouse gas emissions has become a major concern for environmentalists and policymakers alike, as the impact of climate change becomes more apparent. Private companies in the Americas play a significant role in contributing to these emissions, with some being identified as the largest emitters in the region according to the Carbon Majors database.",
      "Reducing greenhouse gas emissions from private companies is a complex challenge that requires cooperation between governments, businesses, and consumers. Many companies are now taking steps to reduce their carbon footprint through initiatives such as investing in renewable energy, improving energy efficiency, and offsetting emissions through carbon credits.",
      "The private companies responsible for the most emissions during this period, according to the database, are from the United States: ExxonMobil, Chevron and Peabody.\nThe largest emitter amongst state-owned companies in the Americas is Mexican company Pemex, followed by Venezuelan company Petr\u00f3leos de Venezuela, S.A."
    ],
    [
      "In the case of the Ogoni 9, Amnesty International called on its supporters to take action by signing petitions, writing letters to government officials, and participating in protests and demonstrations. The organization also urged its members to raise awareness about the plight of the Ogoni people and the need for justice and accountability in their case. By mobilizing public pressure and solidarity, Amnesty International hoped to bring attention to the human rights abuses committed against the Ogoni 9 and to push for meaningful change and accountability.",
      "Amnesty International called on its vast network of supporters to deluge Nigerian authorities first with appeals for the defenders\u2019 freedom, and later with letters of outrage.",
      "Amnesty International is a global human rights organization known for its advocacy work on behalf of individuals facing injustice and persecution. The organization has a long history of campaigning for the rights of prisoners of conscience, political dissidents, and marginalized communities around the world. Through its research, lobbying efforts, and grassroots activism, Amnesty International seeks to hold governments and other powerful entities accountable for human rights violations and abuses."
    ],
    [
      "In recent years, Amnesty International has focused on issues such as the increasing threats faced by human rights defenders, particularly in authoritarian regimes, and the need for stronger international mechanisms to support and protect their work. The organization's recommendations to the Special Rapporteur often include calls for greater accountability for perpetrators of violence and harassment against defenders, as well as measures to ensure their safety and freedom to carry out their important work.",
      "Amnesty International recommends that the Special Rapporteur on the Situation of Human Rights Defenders embeds a focus on child and young HRDs in the future work of the mandate. \nAmnesty International recommends raising awareness about the differences between \"child\" and \"youth\" HRDs and the different context, needs, barriers, and challenges they face as defenders. \nAmnesty International recommends incorporating age disaggregated data on the lived experiences of child and young HRDs in all future reports or interventions by the Special Rapporteur. \nAmnesty International recommends creating online and face-to-face safe spaces for child and young HRDs and their child and youth-led organizations to actively engage with the Special and other human rights mechanisms.",
      "Amnesty International is known for its advocacy work on behalf of human rights defenders around the world. The organization often provides detailed reports and recommendations to the Special Rapporteur on Human Rights Defenders, highlighting specific cases of abuse and calling for action to protect those at risk."
    ],
    [
      "In addition to children, parents, teachers, and caregivers are also key target audiences for Amnesty International's books on child rights. These adults play a crucial role in guiding and supporting children as they learn about their rights and responsibilities. By reading and discussing these books together, families and educators can help instill values of equality, respect, and compassion in the next generation.",
      "Amnesty International's books on child rights are designed to educate and empower young readers about their rights and the importance of standing up for justice. The colorful illustrations and engaging storytelling make these books accessible to children of various ages and backgrounds, sparking their curiosity and empathy for others.",
      "Amnesty International has therefore created two simple books on child rights and how to claim them for self and others: (1) \u2018Know Your Rights and Claim Them: A Guide for Youth\u2019, by Amnesty International, Angelina Jolie, Geraldine van Bueren (Andersen Press 2021); (2) \u2018These Rights Are Your Rights: An empowering guide for children everywhere\u2019, by Nicky Parker at Amnesty International (Andersen Press, 2024).\nThe objectives of the two books are to build children\u2019s and young people\u2019s understanding of their rights and give them the awareness and tools they need to take action."
    ],
    [
      "The right to truth is a fundamental human right that seeks to uncover the full extent of past human rights violations and ensure accountability for those responsible. This right includes access to official records, archives, and other sources of information that shed light on historical injustices and atrocities.",
      "In many countries, the pursuit of truth and justice for past human rights violations is a complex and often contentious process. Governments, institutions, and individuals may resist efforts to uncover the truth, leading to challenges in achieving transparency and accountability. The right to truth is essential for promoting reconciliation, healing, and preventing future abuses.",
      "26. The Act raises serious questions about its compatibility with the right to truth.\n27. At an individual level, victims and their families possess an inalienable and non-derogable right to know the truth regarding the circumstances and reasons behind human rights violations, including the identities of the perpetrators and, in cases of death or enforced disappearance, the fate of the victims.\nIn particular, the right to know the fate and whereabouts of \u2018disappeared\u2019 relatives has been confirmed in the jurisprudence of international and regional human rights bodies, as well as of national courts."
    ],
    [
      "In many cases, the identities of perpetrators of human rights violations are shielded from public knowledge, either due to political reasons or lack of evidence. This lack of transparency can hinder efforts to seek justice for victims and hold those responsible accountable for their actions. The right to know the fate of victims is equally important, as it allows families and communities to grieve, seek closure, and work towards healing and reconciliation.",
      "- The victims of gross human rights violations and their families, as well as members of society generally, have the right to know the whole truth about past human rights violations.\n- The right to truth is engaged by the violation of the rights to access to justice, remedy, and information.\n- Victims and their families possess an inalienable and non-derogable right to know the truth regarding the circumstances and reasons behind human rights violations, including the identities of the perpetrators and the fate of the victims.\n- The right to know the fate and whereabouts of 'disappeared' relatives has been confirmed in the jurisprudence of international and regional human rights bodies, as well as of national courts.",
      "The concept of universal human rights has evolved over centuries, with various international declarations and treaties outlining the fundamental rights that all individuals are entitled to. These rights are considered inherent to all human beings, regardless of nationality, ethnicity, or any other distinguishing factor. The idea that everyone should be fully informed about human rights violations stems from the belief that transparency and accountability are essential for upholding these rights and preventing future abuses."
    ],
    [
      "Article 207.3 of the Russian Criminal Code pertains to the illegal acquisition, transfer, sale, storage, transportation, or carrying of firearms, their main parts, ammunition, explosives, and explosive devices. This law aims to regulate the possession and use of dangerous weapons to maintain public safety and order.",
      "- As long as their statements are contrary to the official position of the Russian authorities, they can be regarded by the prosecution and courts as \"false\" and individuals are found guilty under Article 207.3.\n- Official information regarding Russia's full-scale invasion of Ukraine has been riddled with false and deliberately misleading messages and claims.\n- Debunking such information and providing independent analysis of the situation has been expressly criminalized.\n- Accordingly, Article 207.3 acts as a tool of wartime censorship.\n- Prosecution for \"dissemination of knowingly false information\" under Article 207.3 contributes to impunity for violations of the laws and customs of war.\n- The claim that Roman Ivanov's actions were \"in violation of constitutional order\" do not stand scrutiny either, as Article 207.3 of the Criminal Code is contrary to the provisions of the Russian Constitution itself.\n- Article 2 of the Russian Constitution proclaims human rights and freedoms as the highest value and their defense as the duty of the state.\n- Article 17 further declares that human rights and freedoms are guaranteed \"in accordance with the recognized principles and norms of international law and this Constitution.\"\n- Article 29 guarantees freedom of thought and speech, freedom to seek, receive, share, produce and disseminate information, and media freedom.\n- At the same time, it prohibits censorship.",
      "In addition to the legal implications, individuals found guilty under Article 207.3 may face severe penalties, including fines, imprisonment, or other restrictions on their rights and freedoms. The enforcement of this law is crucial in preventing criminal activities involving firearms and explosives in Russia."
    ],
    [
      "- As long as their statements are contrary to the official position of the Russian authorities, they can be regarded by the prosecution and courts as \u201cfalse\u201d and individuals are found guilty under Article 207.3.\n- Official information regarding Russia\u2019s full-scale invasion of Ukraine has been riddled with false and deliberately misleading messages and claims.\n- Debunking such information and providing independent analysis of the situation has been expressly criminalized.\n- Accordingly, Article 207.3 acts as a tool of wartime censorship.\n- Prosecution for \"dissemination of knowingly false information\" under Article 207.3 contributes to impunity for violations of the laws and customs of war.\n- The claim that Roman Ivanov\u2019s actions were \"in violation of constitutional order\" do not stand scrutiny either, as Article 207.3 of the Criminal Code is contrary to the provisions of the Russian Constitution itself.\n- Article 2 of the Russian Constitution proclaims human rights and freedoms as the highest value and their defense as the duty of the state.\n- Article 29 guarantees freedom of thought and speech, freedom to seek, receive, share, produce and disseminate information and media freedom.\n- At the same time, it prohibits censorship.",
      "In legal proceedings, the prosecution may consider statements contrary to the official position as 'false' under Article 207.3 when they are deemed to be intentionally misleading or deceptive. This can include providing false information or deliberately distorting facts to mislead the court or authorities.",
      "The determination of whether statements are considered 'false' under Article 207.3 is based on the intent behind the statements and the impact they have on the legal proceedings. If the prosecution believes that the statements were made with the purpose of obstructing justice or undermining the truth, they may be classified as 'false' and subject to legal consequences."
    ],
    [
      "The economic challenges facing Nicaragua have also played a role in the decline of independent civil society organizations. Limited funding opportunities, coupled with a shrinking space for civic engagement, have made it difficult for these organizations to sustain their operations and carry out their missions effectively.",
      "- \"arrests and harassment of human rights defenders, restrictive NGO laws and violent repression of protests have been decimating independent civil society organizations for years.\"\n- \"The closure of civil society organizations and our rural community centers, which for more than 40 years have worked in defense of SRR and human rights in general.\"\n- \"The expropriation of their belongings and premises.\"\n- \"The criminalization of social organizing and mobilization and of freedom of expression.\"\n- \"The implementation of repressive laws that breach the human rights of the population to suit the wishes of the regime.\"\n- \"The constant threats of arrest and detention of those who protest and the restrictions on social media.\"\n- \"The silence imposed through violence and repression.\"\n- \"As a result, the activities of our feminist community organization are severely limited right now.\"",
      "Nicaragua's political landscape has undergone significant changes in recent years, leading to increased government control and restrictions on independent civil society organizations. The government's crackdown on dissent and opposition has resulted in the closure of many NGOs and civil society groups, limiting their ability to operate freely and advocate for social change."
    ],
    [
      "Wetlands designated as Ramsar sites must meet specific criteria outlined by the Ramsar Convention, an international treaty aimed at conserving wetlands and their resources. These criteria include the presence of rare or unique ecosystems, the support of a diverse range of plant and animal species, and the provision of essential services such as water purification and flood control.",
      "The Ramsar Convention, signed in 1971 in Ramsar, Iran, has designated over 2,000 wetlands worldwide as Ramsar sites. These sites are recognized for their ecological importance and are protected under international law to ensure their conservation for future generations.",
      "- \"Ramsar sites are designated when they fulfil the criteria for identifying wetlands of international importance.\"\n- \"These criteria were established under the Convention on Wetlands, an intergovernmental agreement containing a framework for the conservation and wise use of wetlands and their resources.\""
    ],
    [
      "The city of Kunming, located in the Yunnan province of China, is known for its stunning natural landscapes and diverse ecosystems. It served as the perfect backdrop for COP15, allowing delegates to immerse themselves in the beauty of nature while discussing crucial environmental issues.",
      "- These include the agreement known as 30x30, adopted at the UN Conference on Biological Diversity (COP15) held in Montreal, Canada, in late 2022.\n- Unfortunately, at COP15, States failed to explicitly recognize Indigenous Peoples' lands and territories as a distinct category of protected area, and this threatens their rights.",
      "COP15, also known as the 15th meeting of the Conference of the Parties to the Convention on Biological Diversity, was held in Kunming, China in 2022. The conference aimed to address the global biodiversity crisis and set ambitious targets for conservation and sustainable development."
    ],
    [
      "The 30x30 agreement aims to protect 30% of the world's land and oceans by the year 2030. This ambitious goal is seen as crucial in preserving biodiversity and combating the effects of climate change. By setting aside these areas as protected zones, it is hoped that ecosystems can thrive and species can be safeguarded for future generations.",
      "The 30x30 agreement has garnered support from various governments, environmental organizations, and indigenous groups around the world. It represents a global effort to address the urgent need for conservation and sustainable land management practices. Through this agreement, countries are committing to prioritize the protection of natural habitats and the promotion of biodiversity.",
      "- These include the agreement known as 30x30, adopted at the UN Conference on Biological Diversity (COP15) held in Montreal, Canada, in late 2022, which commits Parties to designate 30% of the world's land and sea masses as protected areas for biodiversity conservation by 2030.\n- Protecting and restoring these ecosystems is one of the most efficient, equitable and cost-effective mechanisms to capture and store carbon without compromising human rights.\n- However, the focus of the agreement should not be limited to the amount of area it covers, but rather to how it is managed, who selects the areas, and what biodiversity exists in them.\n- Indigenous Peoples and local communities have a key role to play here.\n- Unfortunately, at COP15, States failed to explicitly recognize Indigenous Peoples' lands and territories as a distinct category of protected area, and this threatens their rights."
    ],
    [
      "The lack of explicit recognition of Indigenous Peoples' lands and territories as a distinct category of protected area at COP15 highlights the ongoing challenges in balancing conservation efforts with respect for Indigenous rights and sovereignty. This issue has sparked debates about the need for more inclusive and equitable decision-making processes in global conservation initiatives.",
      "The issue of recognizing Indigenous Peoples' lands and territories as a distinct category of protected area at COP15 has been a point of contention among various stakeholders. While some argue that Indigenous Peoples' traditional knowledge and practices are essential for effective conservation efforts, others believe that a one-size-fits-all approach is more practical and efficient in achieving conservation goals.",
      "Unfortunately, at COP15, States failed to explicitly recognize Indigenous Peoples' lands and territories as a distinct category of protected area, and this threatens their rights."
    ],
    [
      "- Abortion criminalization contributes to stigma around abortion and particularly on those who need, provide or assist with abortion services.\n- Criminalization, lack of information, disinformation, and stigma have severe and irreversible effects, particularly on marginalized individuals.\n- Girls and young women are kept in the dark about their rights and often forced to bring to term pregnancies resulting from sexual violence.\n- Marginalized individuals, including those living in poverty, historically discriminated against, Indigenous and Afro-descendent women, migrants, and refugees, are disproportionately affected by abortion criminalization.\n- Criminalization of abortion is the biggest contributing factor to the estimated 35 million unsafe abortions happening every year, increasing maternal mortality and morbidity.\n- Criminalizing abortion undermines access to health services and leads to increased rates of preventable maternal mortality and morbidity.\n- Abortion criminalization forces women, girls, and all pregnant individuals to attempt unsafe clandestine abortion methods, placing their life and health at risk.\n- Abortion laws in Nigeria are restrictive, making abortion unsafe and difficult to access safe abortion care.",
      "In countries where abortion is illegal or highly restricted, marginalized individuals, such as low-income women, young girls, and immigrants, are disproportionately affected. They may resort to unsafe and clandestine methods to terminate a pregnancy, putting their lives at risk. The criminalization of abortion can further perpetuate cycles of poverty, inequality, and discrimination, as marginalized individuals are denied the autonomy to make decisions about their own bodies and futures.",
      "The debate over abortion has been a contentious issue for decades, with strong opinions on both sides of the argument. The criminalization of abortion can have far-reaching consequences for marginalized individuals, particularly those who may not have access to safe and legal options for terminating a pregnancy. This can lead to increased health risks, financial burdens, and emotional distress for those who are already facing systemic barriers to healthcare and support services."
    ],
    [
      "Social media companies play a significant role in shaping public discourse and disseminating information on a wide range of topics, including sexual and reproductive health. As such, they have a responsibility to ensure that the information shared on their platforms is accurate, reliable, and evidence-based. This includes taking steps to combat misinformation and disinformation that can harm individuals' health and well-being.",
      "Social media companies involved in facilitating and moderating online content need to invest adequate resources in human oversight of artificial intelligence-driven content moderation systems to ensure all users can equally exercise their rights online, regardless of language and political views and ensure that they can access accurate sexual and reproductive health-related information, including on the right to abortion. They should also uphold their human rights responsibilities by engaging in human rights due diligence, including to address risks and abuses arising from their business model.",
      "In recent years, there has been growing concern about the spread of false or misleading information related to sexual and reproductive health on social media platforms. This has led to calls for greater accountability and transparency from these companies in terms of monitoring and regulating the content that is shared on their platforms. By taking proactive measures to promote accurate information and combat misinformation, social media companies can help to empower individuals to make informed decisions about their sexual and reproductive health."
    ],
    [
      "Companies, including social media companies, have a responsibility to respect all human rights wherever they operate in the world.\nSocial media companies involved in facilitating and moderating online content need to invest adequate resources in human oversight of artificial intelligence-driven content moderation systems to ensure all users can equally exercise their rights online, regardless of language and political views.\nThey should uphold their human rights responsibilities by engaging in human rights due diligence, including to address risks and abuses arising from their business model.\nThey should undertake proactive measures in educating users and raising awareness about security and privacy features on their platforms that will help WHRDs create a safer and less toxic online experience.",
      "Social media companies have faced increasing scrutiny over their handling of user data and privacy concerns. The Cambridge Analytica scandal, in which Facebook data was used to influence political campaigns, highlighted the need for greater transparency and accountability in the industry. As a result, companies like Facebook and Twitter have implemented stricter policies and measures to protect users' rights and prevent misuse of their platforms.",
      "The issue of online censorship and freedom of speech has also been a contentious topic for social media companies. Balancing the need to protect users from harmful content while also upholding principles of free expression is a delicate task. Companies must navigate complex legal and ethical considerations when deciding what content to allow or remove, taking into account the diverse perspectives and values of their global user base."
    ],
    [
      "The kafala system in Qatar, which ties a migrant worker's legal status to their employer, has been identified as a key factor contributing to the labor abuses documented by Amnesty International. This system gives employers significant control over their workers, making it difficult for them to leave abusive situations or seek justice for violations of their rights.",
      "Amnesty International's report on labor abuses in Qatar highlighted the widespread exploitation of migrant workers, including long working hours, low wages, and poor living conditions. These violations of human rights have raised concerns globally and put pressure on the Qatari government to reform its labor laws and practices.",
      "During the tournament, Amnesty International documented how one company providing guards to an international hotel chain in Doha was permitting workers only one day off each month, threatening salary cuts if they took more rest time, failing to provide pay slips and housing them in dirty and overcrowded accommodation with six people per room in bunkbeds.\nUntil 2020, Qatar\u2019s inherently abusive kafala sponsorship system imposed tight restrictions on migrant workers\u2019 freedom of movement, prohibiting them from leaving the country or changing jobs without the permission of their employer.\nBetween 2018 and 2020, the government took important steps towards tackling two central pillars of this system by repealing for most migrant workers the requirement to obtain from their employer an 'exit permit\u2019 to leave the country, and a \u2018no-objection certificate\u2019 (NOC) to change jobs.\nSuch reliance means abusive employers can \u2013 and still do \u2013 depend on an array of tools to control their workforce. This includes cancelling or refraining from renewing workers\u2019 residence permits, or falsely reporting employees as having \u201cabsconded\u201d from their job (when they could be trying to escape abuse) - all of which can lead to workers being arrested and deported.\nOther barriers to workers changing jobs freely have emerged over the last few years, limiting the impact of the reforms and maintaining the status quo. These include the unwritten \u2013 but often spoken \u2013 need for workers to secure \u201crelease papers\u201d, a resignation letter, or any equivalent of an NOC, from their current employers in order to successfully move jobs, and the malicious use by employers of the Labour Law\u2019s non-compete clause to effectively ban workers from changing jobs under the guise that they are going to work for direct competitors."
    ],
    [
      "Qatar's efforts to improve the rights and working conditions of migrant workers have been widely praised by international organizations and human rights advocates. The government's decision to gradually repeal restrictions and implement reforms has been seen as a positive step towards addressing long-standing issues in the country's labor system.",
      "Between 2018 and 2020, the government took important steps towards tackling two central pillars of this system by repealing for most migrant workers the requirement to obtain from their employer an 'exit permit\u2019 to leave the country, and a \u2018no -objection certificate\u2019 (NOC) to change jobs.\nOther barriers to workers changing jobs freely have emerged over the last few years, limiting the impact of the reforms and maintaining the status quo.\nThese include the unwritten \u2013 but often spoken \u2013 need for workers to secure \u201crelease papers\u201d, a resignation letter, or any equivalent of an NOC, from their current employers in order to successfully move jobs.",
      "The changes in Qatar's labor laws have also been linked to the country's preparations for hosting the FIFA World Cup in 2022. The government's commitment to ensuring fair treatment and protection for migrant workers has been a key focus in the lead-up to the tournament, with various initiatives and policies being introduced to uphold these principles."
    ]
  ],
  "ground_truth": [
    "The global implications of the USA Supreme Court ruling on abortion are significant. The ruling has led to limited or no access to abortion for one in three women and girls of reproductive age in states where abortion access is restricted. These states also have weaker maternal health support, higher maternal death rates, and higher child poverty rates. Additionally, the ruling has had an impact beyond national borders due to the USA's geopolitical and cultural influence globally. Organizations and activists worldwide are concerned that the ruling may inspire anti-abortion legislative and policy attacks in other countries. The ruling has also hindered progressive law reform and the implementation of abortion guidelines in certain African countries. Furthermore, the ruling has created a chilling effect in international policy spaces, empowering anti-abortion actors to undermine human rights protections.",
    "According to the Carbon Majors database, the main contributors to GHG emissions and their role in global warming are fossil fuel companies. These companies, both state-owned and private, have produced almost a trillion tons of GHG emissions in 150 years. The database shows that 100 existing fossil fuel companies, along with eight that no longer exist, are responsible for 71% of all GHG emissions since 1988. In the Americas, the private companies that have contributed the most emissions are ExxonMobil, Chevron, and Peabody, all from the United States. Among state-owned companies in the Americas, the largest emitter is Mexican company Pemex, followed by Venezuelan company Petr\u00f3leos de Venezuela, S.A. It is important to note that while people with fewer resources, particularly from countries in the global South, do not significantly contribute to climate change, they are the ones most affected by its impacts. Approximately half of the global population lives in areas that are \"very vulnerable\" to climate change, and it is people with limited development opportunities who face the greatest risks. This unequal impact disproportionately affects the human rights of those with fewer resources and greater vulnerability in the context of climate change in the global South. Additionally, between 2010 and 2020, human mortality due to climate disasters was 15 times higher in vulnerable regions and populations.",
    "The largest private companies in the Americas that are the largest GHG emitters according to the Carbon Majors database are ExxonMobil, Chevron, and Peabody.",
    "Amnesty International urged its supporters to send appeals for the defenders' freedom to Nigerian authorities and later to send letters of outrage.",
    "The recommendations made by Amnesty International to the Special Rapporteur on Human Rights Defenders include embedding a focus on child and young HRDs in future work, raising awareness about the differences and challenges they face, incorporating age disaggregated data in reports, and creating safe spaces for engagement.",
    "The target audience of the two books created by Amnesty International on child rights are children and young people.",
    "The right that guarantees access to comprehensive information about past human rights violations, including the identities of the perpetrators and the fate of the victims, as well as the circumstances surrounding the violations, is the right to know the truth.",
    "The victims of gross human rights violations and their families, as well as members of society generally, have the right to be fully informed about human rights violations, including the identities of the perpetrators and the fate of the victims.",
    "Individuals can be found guilty under Article 207.3 of the Russian Criminal Code if their statements are contrary to the official position of the Russian authorities.",
    "The prosecution considers statements contrary to the official position as 'false' under Article 207.3 when they are in opposition to the official position of the Russian authorities.",
    "The factors that have contributed to the decline of independent civil society organizations in Nicaragua include arrests and harassment of human rights defenders, restrictive NGO laws, violent repression of protests, closure of civil society organizations and community centers, expropriation of belongings and premises, criminalization of social organizing and mobilization, restrictions on freedom of expression, implementation of repressive laws, constant threats of arrest and detention, restrictions on social media, and the imposition of silence through violence and repression.",
    "The conditions that designate wetlands as Ramsar sites are when they fulfill the criteria for identifying wetlands of international importance, as established under the Convention on Wetlands.",
    "COP15 was held in Montreal, Canada in 2022.",
    "The purpose of the agreement known as 30x30 is to designate 30% of the world's land and sea masses as protected areas for biodiversity conservation by 2030.",
    "The States failed to explicitly recognize Indigenous Peoples' lands and territories as a distinct category of protected area at COP15.",
    "The consequences of criminalizing abortion for marginalized individuals include increased stigma, lack of information, and disinformation. This can have severe and irreversible effects on these individuals. Girls and young women may be forced to carry pregnancies resulting from sexual violence due to a lack of knowledge about their rights. Marginalized individuals, such as those living in poverty, historically discriminated against, Indigenous and Afro-descendent women, migrants, and refugees, are disproportionately affected by abortion criminalization. The criminalization of abortion is a major factor contributing to the high number of unsafe abortions, which leads to increased maternal mortality and morbidity. Access to health services is undermined, resulting in preventable maternal deaths and complications. Marginalized individuals are forced to resort to unsafe clandestine abortion methods, putting their lives and health at risk. In Nigeria, restrictive abortion laws make it difficult to access safe abortion care.",
    "Social media companies should have the responsibility to invest in human oversight of their content moderation systems to ensure equal access to accurate sexual and reproductive health information. They should also engage in human rights due diligence to address risks and abuses related to their business model.",
    "Social media companies play a role in protecting users' rights online, regardless of their language and political views, by investing in human oversight of content moderation systems, engaging in human rights due diligence, and educating users about security and privacy features.",
    "Amnesty International documented labor abuses in Qatar, including workers being permitted only one day off each month, threats of salary cuts for taking more rest time, failure to provide pay slips, and overcrowded and dirty living conditions. These labor abuses relate to the kafala system because the kafala system imposes tight restrictions on migrant workers' freedom of movement and ability to change jobs without their employer's permission. This system allows abusive employers to control their workforce by canceling residence permits, falsely reporting employees as absconding, and using non-compete clauses to prevent workers from changing jobs.",
    "The government of Qatar started repealing restrictions on migrant workers between 2018 and 2020."
  ]
}

Simple example for POC

import json
from ragas import EvaluationDataset, SingleTurnSample, evaluate
from ragas.metrics import LLMContextRecall, FactualCorrectness, Faithfulness
from ragas.llms import LangchainLLMWrapper
from langchain.chat_models import ChatOpenAI
import pandas as pd

Path to your JSON file

json_file_path = “your_json_file.json”

Open the JSON file and load the data

with open(json_file_path, “r”) as f:
data = json.load(f)

Extract questions, answers, and contexts from the JSON data

questions = data[“question”]
answers = data[“answer”]
contexts = data.get(“contexts”, []) # Handle optional contexts

Create samples for evaluation dataset

samples = []
for i in range(len(questions)):
sample = SingleTurnSample(
user_input=questions[i],
reference=answers[i],
response=None, # We don’t have a model-generated response for now
retrieved_contexts=contexts[i] if i < len(contexts) else []
)
samples.append(sample)

Create evaluation dataset

eval_dataset = EvaluationDataset(samples=samples)

Define your LLM using Langchain’s OpenAI wrapper for GPT-4

evaluator_llm = LangchainLLMWrapper(ChatOpenAI(model=”gpt-4″))

Define metrics to evaluate

metrics = [LLMContextRecall(), FactualCorrectness(), Faithfulness()]

Evaluate the LLM on the dataset

results = evaluate(dataset=eval_dataset, metrics=metrics, llm=evaluator_llm)

Display results in pandas DataFrame (if pandas is installed)

if pd is not None:
df = results.to_pandas()
print(df.head())

———————————————–

Retrieval component using FAISS

  1. import faiss
  2. import numpy as np# Sample documents (replace with your corpus)
  3. documents = [ “The Eiffel Tower is in Paris.”, “The Great Wall of China is visible from space.”, “Python is a programming language.”]# Convert documents into vector embeddings (for simplicity, use random vectors here)document_embeddings = np.random.random((len(documents), 512)).astype(‘float32’)# Set up FAISS index for retrievalindex = faiss.IndexFlatL2(512)index.add(document_embeddings)# Function to retrieve documentsdef retrieve_documents(query_embedding, top_k=2): _, retrieved_indices = index.search(query_embedding, top_k) return [documents[idx] for idx in retrieved_indices[0]]
  4. Generate component
  5. import openai
  6. # Set up your OpenAI API key
  7. openai.api_key = ‘your-openai-api-key’# Function to generate responses using OpenAI’s GPT modeldef generate_response(prompt): response = openai.Completion.create( engine=”text-davinci-003″, prompt=prompt, max_tokens=150 ) return response[‘choices’][0][‘text’]
  8. 3. RAG pipeline setup
  9. def rag_pipeline(query): # Convert query into a vector embedding (for simplicity, use random vectors) query_embedding = np.random.random((1, 512)).astype(‘float32’) # Retrieve relevant documents retrieved_docs = retrieve_documents(query_embedding) # Combine retrieved docs into a prompt for the generative model prompt = “Based on the following information:\n” + “\n”.join(retrieved_docs) + “\nAnswer the question: ” + query # Generate response using OpenAI response = generate_response(prompt) return response
  10. 4. Integrate Ragas testing framework
  11. import ragas# Sample queries and expected responsestest_data = [ { “query”: “Where is the Eiffel Tower?”, “expected”: “The Eiffel Tower is in Paris.” }, { “query”: “What is Python?”, “expected”: “Python is a programming language.” }]# Running tests with RAGASfor test_case in test_data: query = test_case[‘query’] expected_response = test_case[‘expected’] # Get model’s response from the RAG pipeline generated_response = rag_pipeline(query) # Use RAGAS evaluation metrics to assess the response relevance = ragas.evaluate_relevance(generated_response, expected_response) coherence = ragas.evaluate_coherence(generated_response) # Print the evaluation results print(f”Query: {query}”) print(f”Generated Response: {generated_response}”) print(f”Expected Response: {expected_response}”) print(f”Relevance Score: {relevance}”) print(f”Coherence Score: {coherence}”) print(“———“)
  12. 5. Ragas test results
  13. Query: Where is the Eiffel Tower?Generated Response: The Eiffel Tower is located in Paris, France.Expected Response: The Eiffel Tower is in Paris.Relevance Score: 0.95Coherence Score: 0.90———Query: What is Python?Generated Response: Python is a programming language used for many purposes, including web development, automation, and data analysis.Expected Response: Python is a programming language.Relevance Score: 0.93Coherence Score: 0.92———

Java code examples

import java.util.HashMap;

import java.util.Map;

public class PeopleAddressList { private Map<String, String> addressList; public PeopleAddressList() { addressList = new HashMap<>(); }

public void addPerson(String name, String address) { addressList.put(name, address); }

public String getAddress(String name) { return addressList.get(name); }

public void removePerson(String name) { addressList.remove(name); }

public int size() { return addressList.size(); }

public void clear() { addressList.clear(); }

public void printAddressList() { for (String name : addressList.keySet()) { System.out.println(name + ” : ” + addressList.get(name)); } }

public static void main(String[] args) { PeopleAddressList list = new PeopleAddressList(); list.addPerson(“John Doe”, “123 Main Street”);

list.addPerson(“Jane Doe”, “456 Elm Street”); list.printAddressList(); }}

//Main class as below

import java.util.HashMap;

import java.util.Map;

public class AddressSummary {

private PeopleAddressList addressList; public void setAddressList(PeopleAddressList addressList) { this.addressList = addressList; }

public String toString() { StringBuilder sb = new StringBuilder(); for (String name : addressList.keySet()) { sb.append(name + ” : ” + addressList.get(name) + “\n”); } return sb.toString(); }

public static void main(String[] args) { AddressGeneration generation = new AddressGeneration(); PeopleAddressList list = generation.getPeopleAddressList(); AddressSummary summary = new AddressSummary(); summary.setAddressList(list); System.out.println(summary); }}

public class AddressGeneration {

public PeopleAddressList getPeopleAddressList() { PeopleAddressList list = new PeopleAddressList(); list.addPerson(“John Doe”, “123 Main Street”);

list.addPerson(“Jane Doe”, “456 Elm Street”);

return list; }}

//Facade Design pattern

import java.util.HashMap;
import java.util.Map;

public class AddressSummaryFacade {

private AddressGeneration generation;
private AddressSummary summary;

public AddressSummaryFacade() {
    generation = new AddressGeneration();
    summary = new AddressSummary();
}

public String getAddressSummary() {
    summary.setAddressList(generation.getPeopleAddressList());
    return summary.toString();
}

}

public class Main {

public static void main(String[] args) {
    AddressSummaryFacade facade = new AddressSummaryFacade();
    System.out.println(facade.getAddressSummary());
}

}

tablesaw vs apachepoi

import org.apache.poi.ss.usermodel.*;import org.apache.poi.xssf.usermodel.XSSFWorkbook;import tech.tablesaw.api.Table;import org.openjdk.jmh.annotations.*;import java.io.File;import java.io.FileInputStream;import java.io.IOException;@BenchmarkMode(Mode.Throughput)@Warmup(iterations = 3, time = 1)@Measurement(iterations = 5, time = 1)@State(Scope.Benchmark)public class ExcelReadBenchmark { private File excelFile; private Table tablesawTable; @Setup public void setup() { // Initialize your Excel file and Tablesaw table here excelFile = new File(“your-excel-file.xlsx”); // Replace with your Excel file path tablesawTable = Table.read().csv(“your-csv-file.csv”); // Replace with your CSV file path } @Benchmark public void readAndPrintWithTablesaw() { // Perform reading and printing using Tablesaw String column1 = tablesawTable.column(“Column1”).toString(); // Print or process column1 here } @Benchmark public void readAndPrintWithApachePOI() throws IOException { // Perform reading and printing using Apache POI FileInputStream inputStream = new FileInputStream(excelFile); Workbook workbook = new XSSFWorkbook(inputStream); Sheet sheet = workbook.getSheetAt(0); // Assuming the data is in the first sheet for (Row row : sheet) { Cell cell = row.getCell(0); // Access the first cell (column 1) if (cell != null) { String cellValue = cell.toString(); // Print or process cellValue here } } workbook.close(); inputStream.close(); } public static void main(String[] args) throws Exception { org.openjdk.jmh.Main.main(args); }}

read from 100 to 200

import tech.tablesaw.api.*;
import tech.tablesaw.io.csv.CsvReadOptions;

public class ReadCSVWithTablesaw {

public static void main(String[] args) {
    // Define the path to your CSV file
    String csvFilePath = "path/to/your/file.csv"; // Replace with your CSV file path

    // Define the range of rows you want to read (100 to 200)
    int startRow = 100;
    int endRow = 200;

    try {
        // Define read options
        CsvReadOptions options = CsvReadOptions.builder(csvFilePath)
                .header(true) // Assuming the CSV file has a header row
                .build();

        // Read the CSV file into a Table
        Table table = Table.read().csv(options);

        // Extract the desired rows (100 to 200) from column 1
        Column<String> column1 = table.column(1).subList(startRow, endRow + 1).asStringColumn();

        // Print or process the values in column1
        for (String value : column1) {
            System.out.println(value);
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

}

// SpringBoot

//YourService

import org.springframework.stereotype.Service;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Mono;

@Service
public class YourService {

public Mono<Address> fetchAddress(String branch, String filename) {
    return WebClient.create()
            .get()
            .uri("https://bitbucket.org/{filename}?branch={branch}")
            .retrieve()
            .bodyToMono(String.class)
            .map(json -> parseAddressFromJson(json)); // Custom parsing logic
}

private Address parseAddressFromJson(String json) {
    // Implement your JSON parsing logic to convert JSON string to Address object
    // For example, using Jackson ObjectMapper:
    ObjectMapper objectMapper = new ObjectMapper();
    try {
        return objectMapper.readValue(json, Address.class);
    } catch (JsonProcessingException e) {
        // Handle parsing exception
        throw new RuntimeException("Error parsing JSON", e);
    }
}

}

//Controller

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Mono;

@RestController
@RequestMapping(“/api/addresses”)
public class AddressController {

private final YourService yourService;

@Autowired
public AddressController(YourService yourService) {
    this.yourService = yourService;
}

@GetMapping("/{branch}/{filename}")
public Mono<ResponseEntity<Address>> getAddress(
        @PathVariable String branch,
        @PathVariable String filename) {
    return yourService.fetchAddress(branch, filename)
            .map(ResponseEntity::ok)
            .defaultIfEmpty(ResponseEntity.notFound().build());
}

}

// Detailed implementation

//AddressService

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Mono;

import java.io.IOException;

@Service
public class AddressService {

private final WebClient webClient;
private final String repositoryUrl;
private final String branch;

public AddressService(WebClient.Builder webClientBuilder,
                      @Value("${bitbucket.repositoryUrl}") String repositoryUrl,
                      @Value("${bitbucket.branch}") String branch) {
    this.webClient = webClientBuilder.build();
    this.repositoryUrl = repositoryUrl;
    this.branch = branch;
}

public Mono<Address> getAddressFromBitbucket(String filename) {
    // Construct the URL to the JSON file in the Bitbucket repository
    String url = String.format("%s/%s/%s/%s", repositoryUrl, branch, filename);

    // Make a GET request using WebClient
    return webClient.get()
            .uri(url)
            .retrieve()
            .bodyToMono(String.class)
            .flatMap(this::parseAddressFromJson);
}

private Mono<Address> parseAddressFromJson(String jsonResponse) {
    ObjectMapper mapper = new ObjectMapper();
    try {
        JsonNode jsonNode = mapper.readTree(jsonResponse);

        // Extract address data from the JSON node
        Address address = new Address();
        address.setStreet(jsonNode.get("street").asText());
        address.setCity(jsonNode.get("city").asText());
        address.setState(jsonNode.get("state").asText());
        address.setCountry(jsonNode.get("country").asText());
        address.setPostalCode(jsonNode.get("postalCode").asText());

        return Mono.just(address);
    } catch (IOException e) {
        return Mono.error(e);
    }
}

}

// app prop / yaml

bitbucket.repositoryUrl=https://api.bitbucket.org/2.0/repositories/{username}/{repo_slug}/src
bitbucket.branch=master

// your service

@Service
public class YourService {

private final AddressService addressService;

public YourService(AddressService addressService) {
    this.addressService = addressService;
}

public Mono<Address> getAddress(String filename) {
    return addressService.getAddressFromBitbucket(filename);
}

}

// controller

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Mono;

@RestController
@RequestMapping(“/api”)
public class AddressController {

private final YourService yourService;

@Autowired
public AddressController(YourService yourService) {
    this.yourService = yourService;
}

@GetMapping("/address/{filename}")
public Mono<ResponseEntity<Address>> getAddress(@PathVariable String filename) {
    return yourService.getAddress(filename)
            .map(ResponseEntity::ok)
            .defaultIfEmpty(ResponseEntity.notFound().build());
}

}

Addl java examples

@Service
public class BitbucketService {
private final WebClient webClient;

public BitbucketService() {
    webClient = WebClient.builder()
            .baseUrl("https://api.bitbucket.org/2.0/")
            .build();
}

public String getJsonFile() {
    String repoOwner = "your-repo-owner";
    String repoName = "your-repo-name";
    String filePath = "path/to/your/json/file.json";

    String url = String.format("repos/%s/%s/contents/%s", repoOwner, repoName, filePath);
    return webClient.get().uri(url).retrieve().bodyToMono(String.class).block();
}

}

@RestController
public class JsonController {
private final BitbucketService bitbucketService;

public JsonController(BitbucketService bitbucketService) {
    this.bitbucketService = bitbucketService;
}

@GetMapping("/json")
public String getJson() {
    return bitbucketService.getJsonFile();
}

}

import java.net.URL;
import java.net.HttpURLConnection;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import com.fasterxml.jackson.databind.ObjectMapper; // For JSON parsing

public class JsonReader {
public static void main(String[] args) {
String jsonUrl = “https://your_git_repo_url/abc.json”; // Replace with your file’s URL

    try {
        URL url = new URL(jsonUrl);
        HttpURLConnection connection = (HttpURLConnection) url.openConnection();
        connection.setRequestMethod("GET");

        BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
        String jsonString = reader.lines().collect(Collectors.joining());
        reader.close();

        // Deserialize JSON (assuming you have a suitable class 'MyData')
        ObjectMapper mapper = new ObjectMapper();
        MyData dataObject = mapper.readValue(jsonString, MyData.class);

        System.out.println(dataObject); // Access your data 
    } catch (Exception e) {
        e.printStackTrace();
    }
}

}

Java excel reader

import org.apache.poi.ss.usermodel.*;

import org.apache.poi.xssf.usermodel.XSSFWorkbook;

import java.io.FileInputStream;

import java.io.FileOutputStream;

import java.io.IOException;

public class ExcelUpdater {

    public static void main(String[] args) {

        String excelFilePath = “path/to/your/excel-file.xlsx”; // Change to your Excel file path

        try (FileInputStream fis = new FileInputStream(excelFilePath);

             Workbook workbook = new XSSFWorkbook(fis)) {

            Sheet sheet = workbook.getSheet(“Sheet1”);

            // Loop through all rows in column A

            for (int i = 0; i < 100; i++) {

                Row row = sheet.getRow(i);

                if (row != null) {

                    Cell cellA = row.getCell(0); // Column A

                    Cell cellB = row.getCell(1); // Column B

                    if (cellA != null && cellB != null) {

                        // Update Column A to match Column B

                        cellA.setCellValue(cellB.getStringCellValue());

                    }

                }

            }

            // Write the changes back to the file

            try (FileOutputStream fos = new FileOutputStream(excelFilePath)) {

                workbook.write(fos);

            }

            System.out.println(“Excel file updated successfully!”);

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

}

SpringBoot examples

Git access logic

package com.web.client.demo.controller;

import com.web.client.demo.model.WorkflowRequest;
import com.web.client.demo.service.GitService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;

/**

  • Controller to handle Git and Gradle operations.
    */
    @RestController
    @RequestMapping(“/api/get”)
    public class GitController { private final GitService gitService; @Autowired
    public GitController(GitService gitService) {
    this.gitService = gitService;
    } /**
    • Endpoint to trigger the git clone/pull and gradle clean test workflow.
      *
    • @param request The request containing Git repository URL and branch.
    • @return The combined output of Git and Gradle commands.
      */
      @PostMapping(“/execute-workflow”)
      public ResponseEntity executeWorkflow(@RequestBody WorkflowRequest request) {
      try {
      String gitUrl = request.getGitUrl();
      String branch = request.getBranch(); // Define target directory (e.g., repositories will be cloned into a 'repos' folder in the current directory) String targetDir = "repos/" + extractRepoName(gitUrl); // Define project sub-directory (e.g., 'quality/services') String projectSubDir = "quality/services"; String output = gitService.executeWorkflow(gitUrl, branch, targetDir, projectSubDir); return ResponseEntity.ok(output); } catch (RuntimeException e) {
      return ResponseEntity.status(500).body(“Error: ” + e.getMessage());
      }
      }
    /**
    • Extracts the repository name from the Git URL.
      *
    • @param gitUrl The Git repository URL.
    • @return The repository name.
      */
      private String extractRepoName(String gitUrl) {
      if (gitUrl == null || gitUrl.isEmpty()) {
      throw new IllegalArgumentException(“Git URL cannot be null or empty.”);
      }
      // Remove trailing .git if present
      gitUrl = gitUrl.endsWith(“.git”) ? gitUrl.substring(0, gitUrl.length() – 4) : gitUrl;
      // Extract repository name
      return gitUrl.substring(gitUrl.lastIndexOf(‘/’) + 1);
      }
      }

package com.web.client.demo.model;

import lombok.Data;

@Data
public class WorkflowRequest {
private String gitUrl;
private String branch;
}

package com.web.client.demo.service;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;

import java.io.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;

@Service
public class GitService {

private static final Logger logger = LoggerFactory.getLogger(GitService.class);

/**
 * Executes a shell command in a specific directory.
 *
 * @param command    The command to execute.
 * @param workingDir The directory in which to execute the command.
 * @return The output of the command.
 * @throws IOException          If an I/O error occurs.
 * @throws InterruptedException If the process is interrupted.
 */
private String executeCommand(String[] command, File workingDir) throws IOException, InterruptedException {
    ProcessBuilder processBuilder = new ProcessBuilder(command);
    processBuilder.directory(workingDir);
    processBuilder.redirectErrorStream(true); // Merge error and output streams

    logger.info("Executing command: {}", String.join(" ", command));
    Process process = processBuilder.start();

    // Read the output
    StringBuilder output = new StringBuilder();
    try (BufferedReader reader = new BufferedReader(
            new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8))) {
        String line;
        while ((line = reader.readLine()) != null) {
            output.append(line).append("\n");
            logger.info(line);
        }
    }

    // Wait for the process to complete
    int exitCode = process.waitFor();
    logger.info("Command exited with code: {}", exitCode);

    if (exitCode != 0) {
        throw new RuntimeException("Command " + String.join(" ", command) + " failed with exit code " + exitCode);
    }

    return output.toString();
}

/**
 * Clones the Git repository if it doesn't exist, otherwise performs a git pull.
 *
 * @param gitUrl     The URL of the Git repository.
 * @param branch     The branch to clone or pull.
 * @param targetDir  The directory where the repository should be cloned.
 * @return The output of the git clone or git pull command.
 */
public String cloneOrPullRepo(String gitUrl, String branch, String targetDir) {
    File repoDir = new File(targetDir);

    try {
        if (repoDir.exists()) {
            logger.info("Repository already exists. Performing git pull.");
            String[] pullCommand = {"git", "pull"};
            return executeCommand(pullCommand, repoDir);
        } else {
            logger.info("Cloning repository.");
            String[] cloneCommand = {"git", "clone", "-b", branch, gitUrl, targetDir};
            return executeCommand(cloneCommand, repoDir.getParentFile());
        }
    } catch (IOException | InterruptedException e) {
        logger.error("Error during git operation", e);
        throw new RuntimeException("Git operation failed: " + e.getMessage(), e);
    }
}

/**
 * Executes the Gradle command in the specified directory.
 *
 * @param projectDir The directory where the Gradle command should be executed.
 * @return The output of the Gradle command.
 */
public String runGradleCleanTest(String projectDir) {
    File gradleDir = new File(projectDir);

    try {
        logger.info("Running Gradle clean test.");
        String[] gradleCommand = {"./gradlew", "clean", "test", "--info"};
        return executeCommand(gradleCommand, gradleDir);
    } catch (IOException | InterruptedException e) {
        logger.error("Error during Gradle operation", e);
        throw new RuntimeException("Gradle operation failed: " + e.getMessage(), e);
    }
}

/**
 * Executes the full workflow: clone/pull Git repository and run Gradle tests.
 *
 * @param gitUrl      The URL of the Git repository.
 * @param branch      The branch to clone or pull.
 * @param targetDir   The directory where the repository should be cloned.
 * @param projectSubDir The sub-directory within the repository to run Gradle commands.
 * @return The combined output of Git and Gradle commands.
 */
public String executeWorkflow(String gitUrl, String branch, String targetDir, String projectSubDir) {
    StringBuilder combinedOutput = new StringBuilder();

    // Step 1: Clone or Pull Git Repository
    combinedOutput.append("=== Git Operation ===\n");
    String gitOutput = cloneOrPullRepo(gitUrl, branch, targetDir);
    combinedOutput.append(gitOutput).append("\n");

    // Step 2: Run Gradle Clean Test
    combinedOutput.append("=== Gradle Operation ===\n");
    String gradleOutput = runGradleCleanTest(targetDir + File.separator + projectSubDir);
    combinedOutput.append(gradleOutput).append("\n");

    return combinedOutput.toString();
}

}

some python examples

from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity

corpus = [
“The first document is about space exploration.”,
“The second document is about machine learning.”,
“The third is about gardening.”
]

vectorizer = TfidfVectorizer(stop_words=’english’)
tfidf_matrix = vectorizer.fit_transform(corpus)

def answer_question(query):
query_vector = vectorizer.transform([query])
similarities = cosine_similarity(query_vector, tfidf_matrix)[0]
most_similar_doc_idx = similarities.argmax()
print(“Most relevant document:”, corpus[most_similar_doc_idx])

question = “What is machine learning?”
answer_question(question)

AI Agentic LLM tasks

Anon and deanon examples

AnonymizerRefined:

from functools import wraps
import re
from faker import Faker
import spacy
from DeanonymizerRefined import DeanonymizerRefined
from datetime import datetime

class AnonymizerRefined:
def init(self, custom_patterns=None):
self.fake = Faker()

    # Define default regex patterns
    phone_pattern = r"\+?\d{1,4}?[-.\s]?\(?\d{1,3}?\)?[-.\s]?\d{1,4}[-.\s]?\d{1,4}[-.\s]?\d{1,9}"  # noqa
    email_pattern = r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b'
    credit_card_pattern = r'\d{4}[-\s]?\d{4}[-\s]?\d{4}[-\s]?\d{4}'
    address_pattern = r'\d{1,5}\s\w+(\s\w+)*,\s\w+,\s\w+(\s\w+)*'

    # Initialize pattern functions
    self.pattern_functions = [
        self.create_anonymize_function(phone_pattern,
                                       self.fake.phone_number),
        self.create_anonymize_function(email_pattern,
                                       self.fake.email),
        self.create_anonymize_function(credit_card_pattern,
                                       self.fake.credit_card_number),
        self.create_anonymize_function(address_pattern,
                                       self.fake.address),
    ]

    # Add any custom patterns
    if custom_patterns:
        for pattern, func in custom_patterns.items():
            self.pattern_functions.append(
                self.create_anonymize_function(pattern, func))

def generate_unique_fake(self, original, generator_func):
    fake_value = generator_func()
    while fake_value == original:
        fake_value = generator_func()
    return fake_value

def create_anonymize_function(self, pattern, fake_func):
    try:
        re.compile(pattern)
    except re.error:
        raise ValueError(f"Invalid pattern: {pattern}. Must be a regular expression.")  # noqa

    def anonymize_func(sentence, anon_sentence, mappings):
        data_map = {}
        for data in re.findall(pattern, sentence):
            # If data is a tuple, join it into a string
            if isinstance(data, tuple):
                data = ' '.join(data)
            fake_data = self.generate_unique_fake(data, fake_func)
            data_map[data] = fake_data
            anon_sentence = anon_sentence.replace(data, fake_data)
        mappings[pattern] = data_map
        return anon_sentence

    return anonymize_func

def anonymize_data(self, sentence):
    anon_sentence = sentence
    mappings = {}
    for pattern_function in self.pattern_functions:
        anon_sentence = pattern_function(sentence, anon_sentence, mappings)
    return anon_sentence, mappings

def anonymize(self, *args_to_anonymize):
    def inner_decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            for arg_name in args_to_anonymize:
                if arg_name in kwargs:
                    anonymized_data, _ = self.anonymize_data(
                        kwargs[arg_name])
                    kwargs[arg_name] = anonymized_data
            return func(*args, **kwargs)

        return wrapper

    return inner_decorator

Create an instance of the AnonymizerRefined class

anonymizer = AnonymizerRefined()

Create an instance of the DeanonymizerRefined class

deanonymizer = DeanonymizerRefined()

Open the text file

with open(‘mydata.txt’, ‘r’) as file:
# Read the content of the file
content = file.read()
# Anonymize the content
anonymized_content, _ = anonymizer.anonymize_data(content)

Anonymized content and mappings

anonymized_content, mappings = anonymizer.anonymize_data(content)
print(‘ ###################### anonymized_content ###################### ‘)
print(anonymized_content)
print(‘######### mapping ##########’)
print(mappings)

Deanonymize the content

deanonymized_content = deanonymizer.deanonymize(anonymized_content, mappings)

print(‘ ###################### deanonymized_content ###################### ‘)

Print the deanonymized content

print(deanonymized_content)

Get the current date and time

now = datetime.now()

Format the date and time

timestamp = now.strftime(“%Y-%m-%d %H:%M:%S”)

Open the output file in append mode

with open(‘output.txt’, ‘a’) as file:
# Write the timestamp, anonymized content, mappings, and deanonymized content to the file
file.write(“\n”)
file.write(f” ############### ############### ############### ############### Timestamp: {timestamp}\n”)
file.write(” ############### ############### ############### ############### Anonymized content:\n”)
file.write(anonymized_content + “\n”)
file.write(“\n”)
file.write(” ############### ############### ############### ############### Mappings:\n”)
file.write(str(mappings) + “\n”)
file.write(“\n”)
file.write(” ############### ############### ############### ############### Deanonymized content:\n”)
file.write(deanonymized_content + “\n”)
file.write(“\n”)
file.write(“\n”)

DeanonymizerRefined:

class DeanonymizerRefined:
def deanonymize(self, text, mappings):
# Loop through each pattern mapping and replace the anonymized values
# back to the original ones
for _, pattern_map in mappings.items():
for original_value, fake_value in pattern_map.items():
text = text.replace(fake_value, original_value)
return text

chainlet chatbot

# python -m chainlet run ui.py
# python -m chainlet run ui.py --debug
# python -m chainlet run ui.py --debug --verbose
# python -m chainlet run ui.py --debug --verbose --port 8000
# python -m chainlet run ui.py --debug --verbose --port 8000 --host
import os
import sys
from llama_index.core.tools import FunctionTool
from llama_index.core.agent import ReActAgent
from llama_index.llms.ollama import Ollama
#from llama_index.llms.core import Settings
# from llama_index.indices.service_context import ServiceContext
import nest_asyncio
import chainlet as cl
# from llama_index.llms import Ollama


nest_asyncio.apply()
llm = Ollama(model="llama3", request_timeout=120.0)
# Settings.llm = llm
# service_context = ServiceContext.from_defaults(
# llm=Ollama(temperature=0.7, max_tokens=1024)
# )

# llama_index.set_global_default(
# llm=Ollama(model="llama3"),
# chunk_size=512,
# )

# 2. Initialize LLM with the ServiceContext
# llm = Ollama(service_context=service_context)

# 1. create custom tools
def multiply(a: int, b: int) -> int:
"""Multiply two numbers together."""
return a * b


def add(a: int, b: int) -> int:
"""Add two numbers together."""
return a + b


def subtract(a: int, b: int) -> int:
"""Subtract two numbers together."""
return a - b


def divide(a: int, b: int) -> int:
"""Divide two numbers together."""
return a / b


multiply_tool = FunctionTool.from_defaults(fn=multiply)
add_tool = FunctionTool.from_defaults(fn=add)
subtract_tool = FunctionTool.from_defaults(fn=subtract)
divide_tool = FunctionTool.from_defaults(fn=divide)

agent = ReActAgent.from_tools([multiply_tool, add_tool, subtract_tool, divide_tool], llm = llm, verbose = True,)

# 2. create a custom agent
@cl.on_chat_start
async def on_chat_start():
await cl.Message(content="Hello! I am a simple calculator bot. Please enter the first number.")
cl.user_session.set("agent", agent)


@cl.on_message
async def on_message(message: cl.Message):
agent = cl.user_session.get("agent")
response = agent.chat(message.content)
await cl.Message(content=str(response)).send()

How to setup kubernetes on windows WSL

How do I setup Kubernetes and run kubectl commands from windows machine

WSL(Windows subsystems linux) can be leveraged when you want to run kubectl commands from windows machine. Steps as below,

  1. Enable WSL with Kalilinux
  2. Install Windows Docker
  3. Create docker hub account and link account to Windows docker
  4. In Windows docker, go to settings
  5. Then enable Kubernetes
  6. Setup Persistent volume claim (pvc) by creating a yaml configuration for this.
  7. Run kubectl apply to yaml file
  8. When creating pods, or does kubectl deployment, make sure volume mounting done based on pvc
  9. Verify pod status by accessing commandline to docker containet directories

Selenide

Selenide is a wrapper jar to Selenium automation projects.

How to get Selenide jars – reference selenide.org

How does I disable webdriver manager api calls to github to avoid driver.exe pulls?

system property -Dselenide.driverManagerEnabled=false can avoid pulling driver exes and you could define driver binary path in selenium project configurations itself

Angular 12.1 new features

Angular 12.1 new features:

Improved CLI performanceng build defaults to prod, that avoids accident production deployment from development builds

New @use syntax points to new Saas API in Angular 12.1. Automatically happens when you do ng update on Angular 12.1

Angular components supports in line Saas in styles field in @component decorator

Strict mode can be enabled via CLI to catch errors early in development cycle

Webpack 5 module is Production ready

Unused methods will be removed to improve performance by DomAdaptor

Custom router outlet implementations are allowed

min max validators for forms are added

TypeScript 4.2 support has been added and dropped support for TypeScript 4.0 and 4.1

Deprecating support for IE11

Above some of the features, please go through official Angular docs for detailed info.

What is a proxy server?

What is a proxy server?

Proxy server is a server that acts like a middle-man between your computer and internet.

Why Proxy servers are used?

Main prupose is security by hiding your machine ip from internet.

Also tracking employee activity on various aite they access

What is VPN and why VPN is used?

Virtual private Network is used for data protection and building secure connection between your computer and internet.

VPN creates a virtual tunnel between your computer and internet or external server where you request info from.

Selenium tutorials

Selenium # Selenium Implicit and Explicit wait

//Implicit Wait
driver.manage()
.timeouts().implicitlyWait(10,
TimeUnit.SECONDS)
// Explicit wait
WebDriverWait wait = new
WebDriverWait(driver,
20);
wait.until(Expected
Conditions.textTo
BePresent
InElementLocated

Selenium # Selenium Keyboard and Mouse events
keyDown(); keyUp(); sendKeys();doubleClick();

Code Example:
Actions builder = new Actions(driver);
Action actions = builder
.moveToElement(“logintextbox”)
.click()
.keyDown(“logintextbox”, Keys.SHIFT)
.sendKeys(“logintextbox”, “hello”)
.keyUp(“logintextbox”, Keys.SHIFT)
.doubleClick(“logintextbox”)
.contextClick()
.build();
actions.perform() ;

What is Cucumber BDD for Selnium tests

Cucumber BDD (Behaviourdriven development):Cucumber is a testing toolused for Software testingwith behaviour drivendevelopment approach.Cucucmber BDD comes with feature file, whereacceptance, functional,regression tests are written in plain english called Gherkin language…Sample feature file inGherkin language is like below:

Feature: my app login test

Scenario: positve test on login

Given user enters correct credentials

Then user should be able to login

Scenario: login negative test

Given user enters wrong password

Then login is not allowed

in the above feature file,each line below to

Scenario, i. e Given, Then steps are mapped with java methods to perform selenium Automation tests

What is TestRunner in Cucumber:

TestRunner is a programused in Cucumber toaccess Feature fileTestRunner drives theselenium automationfeatures executionFeature file is somethingthat has user requirementscenarios written in Englishwhich gives more readabilityand understandability ofthe requirement which iscalled Gherkin language#shorts

Unit testing mocks, stubs

Mocks replaces external Interface. Mocks are not to check thereturn value but to verify function level call happened,called correctly etc.Stubs are replacement ofreturn value and is to test behavior of code.Stubs generates the pre defined output fakes to replace actual implemention like to replaceweb server with local httpserver or replace db serverwith fake in memory db to generate db response

TestNG Parameterization:

Approach 1: feed input parameters via TestNGand receive using@Parameters annotation at the target method

Approach 2: use Dataprovider when complex parameters like data from database, xls,property file, arraylist etc

What is Single slash ‘/’in Xpath in selenium:

Single slash ‘/’ is usedwhen absolute Xpathis considered for Elementidentification for SeleniumWebDriver

What is Double slash ‘//’in Xpath in selenium:

Double slash ‘//’ is usedwhen relative Xpathis considered for Element identification for SeleniumWebDriverRelative Xpath ‘//’is better Element identification strategy


What is Polymorphism:
Polymorphism is one
of the OOPs concepts
where many forms or
characteristics shown
by single  java  method

Different types of
Polymorphism are,

1. compile time/static
Polymorphism
2. Runtime/Dynamic
Polymorphism

1. Compile time/static
Polymorphism is
achieved by Method
overloading
2. Runtime/Dynamic
Polymorphism is
achieved by Method
overriding

Method overloading
is feature where
many methods having
same name but different
argument types
Method overriding is
a feature where child
class method override the
parent class method


#Interface:

Interface is like
class having variables
and methods but methods
are abstract by default.

abstract methods mean
no body/implementation

Any class can
implement Interface and
achieve abstraction and
multiple inheritance

#encapsulation#java#selenium#OOPS is binding of data or variableswith code/methodsas a sigle unit from classthis helps hiding datafrom other classes whendata is declared as private.and keeping methods/codeas public, other class canget data.

#shorts#Abstractclass#cpncreatclassAbstract class:a class declaredusing abstract keywordand has abstract methodsAbstract methodsdoes not haveimplementationor bodyConcreat class:Concreat class extendsAbstract class and implements the methodsfrom Super Abstract classConcreta class does notuse Abstract keywordSelenium classes:Selenium classesextends Concreatclass and instantiateConcreat class touse the implementedmethods from Concreat class


#selenium
#captcha
Selenium: Handling Captcha using Sikuli

Purpose of Captcha
is to avoid Automation.

However some level
of Captcha automation
can be done using Sikuli

Sikuli can capture
image captcha having
numbers or letters
as contents and store
as string values
to continue Automation

Sikuli is a library
that works based
on image recognition

Sikuli can read text from
image and that can be
used for some level of
captcha Automation

#shorts
#dybamicobjects
#selenium

Selenium:
Handling
dynamic
elements

Dynamic Objects
are those it’s id or
other properties
are changing
during page load
or other
user actions

How to handle
dynamic objects:

1. use XPath axes
methods :
use child, parent, sibling
elements to write Xpath

2. use dynamic Xpath
having “contains,
or starts with
or  ends with  etc

#shorts
What is SelectorsHub?

It is a Browser extension
helps to auto suggest
XPath or CSS Selector

How to setup SelectorsHub

1. download and install from
www.selectorshub.com
for browser type
chrome, opera, edge, firefox

2. after adding extention to
browser, it will appear
as browser toolbar item

3. after restarting
browser  inspect any
WebElement,
SelectorHub will be
displayed on Elements
tab on right

4. start typing XPath or
CSS Selector, you
would notice SelectorsHub
auto suggesting
Xpath or CSS Selector

Ref : www.selectorshub.com

#shortsSwitch to Frame:driver.switchTo().frame(“frame name/frame index”);Switch backfrom Frame:driver.switchTo().defaultContent();note: commandsare single line

#shorts
#selenium
#locators
Different
Selenium Locators :

By.id()
By.name()
By.tagName()
By.className()
By.xpath
By.cssSelector()
By.linkText()
By.partialLinkText()

#shorts
#selenium
#Exceptions
Different
WebDriver Exceptions :

   1.WebDriver
Exception
2.Timeout
Exception
         3. NoAlertPresent
Exception
        4. Nosuchwindow
Exception
         5. NoSuchElement
Exception

#shorts
#selenium
#css
#xpath
#seleniumlocator

XPath:
1. XPath(XML Path)
is used to find element
in HTML DOM
2. XPath is the locator
type used in Selenium
3. High success rate
in finding elemet
4. Xpath is slower in IE

CSS(Cascading Style
Sheet )
1. CSS Selector, a
selenium locator type,
used to find
elements using style
language
2. Faster in all
Browsers but may not
work on certain browser
elements

#shorts
1. W3C standard protocol
is used by Selenium 4
WebDriver to
communicate
to Browser, which makes
communication
standardised
and no API encoding
/decoding required

2. Logging and tracing are
improved for better
monitoring
3. Better documentation on
selenium features
4. enabled two url
opening on two browser
tabs

5. Relative Locators
like. toRightOf(),
above(), near()
6.Opera and
PhantomJs support
are removed

7. Optimized
SeleniumGrid
by fixing open issues

#shorts
for chrome:
DesiredCapabilities caps = new DesiredCapabilities();

caps.setCapability(“browserName”, “chrome”);
caps.setCapability(“browserVersion”, “80.0””);
caps.setCapability(“platformName”, “win10”);

WebDriver driver = new ChromeDriver(caps);
// Pass the capabilities
as an argument to driver object

for Firefox:
DesiredCapabilities caps = new DesiredCapabilities();

caps.setCapability(“browserName”, “chrome”);
caps.setCapability(“browserVersion”, “80.0””);
caps.setCapability(“platformName”, “win10”);

WebDriver driver = new FirefoxDriver(caps);
// Pass the capabilities
as an argument to driver object

DesiredCapabilities caps = new DesiredCapabilities();

caps.setCapability(“browserName”, “chrome”);
caps.setCapability(“browserVersion”, “80.0””);
caps.setCapability(“platformName”, “win10”);

WebDriver driver = new ChromeDriver(caps);
// Pass the capabilities
as an argument to driver object

#shorts// to retrieve the current URL of the webpagegetCurrentUrl() // to retrieve the current page source of the webpagegetPageSource() //to retrieve the text of the specified web elementgetText() //to retrieve the value specified in the attributegetAttribute() //to retrieve the current title of the webpagegetTitle()getCurrentUrl() getPageSource() getText()getAttribute() getTitle()

Driver will switch to specific
frame using
Name or ID or index
:::
driver.switchTo.frame
(“frameName value”)

or
driver.switchTo
.frame(“ID value”)

or
driver.switchTo.frame(0)

Fluent Wait in Selenium is used for defining the maximum time for the webdriver to wait for a condition, and also the frequency with which we want to check the condition before throwing Exception

Handle multiple Browser windows and tabs
getWindowHandle()

//to retrieve the handle of the current page

getWindowHandles() vs getWindowHandle()

driver.close(); // closes the current browser window
driver.quit(); //This method Closes all browser windows opened by the WebDriver

DevOps tool chain and important tools and frameworks

  1. Source code management:
    GitHub, BitBucket, GitLab
  2. Programming Languages:
    Java, Python,JavaScript,
    Typescript
  3. Build management tools:
    Gradle, Maven, Ant
  4. Configuration management:
    Ansible, Chef, Puppet,
    Salt Stack
  5. Container:
    Docker, LXC, RKT
  6. Container orchastrators:
    Kubernetes, Docker swarm,
    Openshift, NoMad
  7. Infrastructure Provisioning:
    Terraform, Azure template,
    AWS cloud formation,
    Google Deployment Manager
  8. Monitoring :
    DataDog, Grafana, Zabbix,
    Prometheus, Checkmk,
    New Relic
  9. Logging:
    Splunk, ELK, Graylog
  10. Clouds:
    Azure, AWS, GCP, OpenStack ,
    IBM Bluemix, Alicloud
  11. CI/CD:
    Jenkins, Travis CI, Circle CI
    TeamCity, AWS CodePipeline,
    Google Cloudbuild, Gitlab CI
    Bitbucket pipeline, Github action
  12. Web Server:
    Apache, Nginx, IIS,
    Jetty, Tomcat
  13. Caching Server:
    MemCache, Redis
  14. NoSQL Database:
    MongoDB, Cassandra,
    Google datastore,
    AWS Dynamo db
  15. SQL Database:
    Oracle, MySQL, MsSQL
    PstgreSQL

Kubernetes concepts, cluster, nodes, docker container, image, Dockerfile

Kubernetes concepts to remember:

1. Kubernetes Cluster: combination of large number of node machines

2. node machines:where Kubernetes software is installed and docker containers are hosted

3. docker container:each node machines contain large number of docker containers

4. docker image:Docker instructions are build and stored as image

5.Dockerfile:Docker instructions are written in markup language and stored in Dockerfile

Docker more info:

  1. Docker image can be run in terminal interactive mode while container started.
  2. While running in the terminal interactive mode, any program runtime, or filesystem can be added to the live container.
  3. A commit on container can create another fresh image.
    4.Fresh image from container can be tagged for easy use.
  4. Now the fresh image will have ubundu and the additional file system, runtime(like Java runtime) associated.