/PerformanceTests/stress-test-data-imports.js
JavaScript | 273 lines | 206 code | 38 blank | 29 comment | 34 complexity | dd4c1a59db655736539482ed428d931f MD5 | raw file
- /******************
- This test provies scenario for stress testing data imports to the transfer-service:
- 1.- Determine how your system will behave under extreme conditions.
- 2.- Determine what is the maximum capacity of your system in terms of users or throughput.
- 3.- Determine the breaking point of your system and its failure mode.
- 4.- Determine if your system will recover without manual intervention after the stress test is over.
- *******************/
- import http from 'k6/http';
- import { check, sleep, group, fail } from 'k6';
- import { Rate, Trend } from 'k6/metrics';
- import { TryToGetNewAccessToken } from './Resources/authenticate.js';
- //Default transfer-service host url
- let BASE_URL = "http://127.0.0.1:93";
- if(typeof __ENV.TRANSFER_SERVICE_HOSTNAME !== 'undefined'){
- BASE_URL = __ENV.TRANSFER_SERVICE_HOSTNAME;
- }
-
- let PID = 3497348;
- if(typeof __ENV.K6_PID !== 'undefined'){
- PID = __ENV.K6_PID;
- }
- //K6 cloud test name
- let TEST_NAME = "stress-test-data-imports";
- if(typeof __ENV.TEST_NAME !== 'undefined'){
- TEST_NAME = __ENV.TEST_NAME;
- }
-
- let DATASPACE="stable";
- if(typeof __ENV.DATASPACE !== 'undefined'){
- DATASPACE = __ENV.DATASPACE;
- }
- //Keycloak parameters
- let ACCESS_TOKEN_URL= "https://keycloak.siscc.org/auth/realms/OECD/protocol/openid-connect/token";
- if(typeof __ENV.KEYCLOAK_AT_URL !== 'undefined'){
- ACCESS_TOKEN_URL = __ENV.KEYCLOAK_AT_URL;
- }
- let currentAccessToken = "";
- let accessTokenExpiry ="";
- //Keycloak credentials
- let USERNAME="";
- if(typeof __ENV.USERNAME !== 'undefined'){
- USERNAME = __ENV.USERNAME;
- }
- let PASSWORD="";
- if(typeof __ENV.PASSWORD !== 'undefined'){
- PASSWORD = __ENV.PASSWORD;
- }
- //login to keycloak?
- let getToken =true;
- if(typeof __ENV.USERNAME === 'undefined' || typeof __ENV.PASSWORD === 'undefined'){
- getToken=false;
- }
- let INPUT_FILE = "./Resources/test-cases-data-imports.json";
- if(typeof __ENV.TEST_CASES_FILE !== 'undefined'){
- INPUT_FILE = __ENV.TEST_CASES_FILE;
- }
- //Load test cases from json file
- const TEST_CASES= JSON.parse(open(INPUT_FILE));
- //Open input files
- for(let testCase in TEST_CASES){
- if(TEST_CASES[testCase].format !=="sdmx")
- TEST_CASES[testCase].data = open(`./Resources/Data/${TEST_CASES[testCase].dataFile}`, "b");
- if(TEST_CASES[testCase].format ==="excel")
- TEST_CASES[testCase].edd = open(`./Resources/Data/${TEST_CASES[testCase].eddFile}`, "b");
- }
- let importRate = new Rate('data_import_completed');
- let importTrend = new Trend('data_import_time', true);
- export let options = {
- setupTimeout: "10s",
- ext: {
- loadimpact: {
- projectID: PID, //k6 CLOUD project id
- name: TEST_NAME
- }
- },
- //Target = number of max users to scale to
- stages: [
- { duration: '40s', target: 1 }, // below normal load
- { duration: '90s', target: 1 },
- { duration: '40s', target: 2 }, // normal load
- { duration: '90s', target: 2 },
- { duration: '40s', target: 3 }, // around the breaking point
- { duration: '90s', target: 3 },
- { duration: '40s', target: 5 }, // beyond the breaking point
- { duration: '90s', target: 5 },
- { duration: '20s', target: 1 }, // scale down. Recovery stage.
- { duration: '4m', target: 1 }, // continue at 1 users to collect pending imports that are still being processed
- ],
- thresholds: {
- "checks": ['rate>0.99'], // more than 99% success rate on import requests
- "data_import_completed": ['rate>0.90'], // more than 90% success rate of transactions imported
- "data_import_time{import_type:csv_small}": ["p(95)<20000"],//less than 20 seconds
- "data_import_time{import_type:csv_medium}": ["p(95)<70000"],//less than 70 seconds
- "data_import_time{import_type:csv_large}": ["p(95)<150000"],//less than 150 seconds
- "data_import_time{import_type:xml_small}": ["p(95)<20000"],//less than 20 seconds
- "data_import_time{import_type:xml_medium}": ["p(95)<70000"],//less than 70 seconds
- "data_import_time{import_type:xml_large}": ["p(95)<150000"],//less than 150 seconds
- "data_import_time{import_type:sdmx_small}": ["p(95)<20000"],//less than 20 seconds
- "data_import_time{import_type:sdmx_medium}": ["p(95)<70000"],//less than 70 seconds
- "data_import_time{import_type:sdmx_large}": ["p(95)<150000"],//less than 150 seconds
- "data_import_time{import_type:excel_extraSmall}": ["p(95)<5000"],//less than 5 seconds
- "data_import_time{import_type:excel_small}": ["p(95)<20000"],//less than 20 seconds
- "data_import_time{import_type:excel_medium}": ["p(95)<70000"],//less than 70 seconds
- "data_import_time{import_type:excel_large}": ["p(95)<150000"],//less than 150 seconds
-
- "data_import_time{datasetSize:extraSmall}": ["p(95)<150000"],
- "data_import_time{datasetSize:small}": ["p(95)<150000"],
- "data_import_time{datasetSize:medium}": ["p(95)<150000"],
- "data_import_time{datasetSize:large}": ["p(95)<150000"],
- "data_import_time{datasetSize:extraLarge}": ["p(95)<150000"],
- },
- //iterations: TEST_CASES.length,
- //vus: TEST_CASES.length,
- };
- export function setup() {
- // 2. setup code
-
- //check that the transfer-service hostname is available
- let healhCheck = http.get(`${BASE_URL}/health`);
- if (healhCheck.status !== 200){
- fail(`Error: the transfer-service {${BASE_URL}/health} is not responding.`);
- }
- console.log(`Testing the transfer-service {${BASE_URL}} version ${healhCheck.json().service.details.version}`);
-
- }
- export default function() {
- var submitedRequests = [];
- TryToGetNewAccessToken();
-
- let testCase = TEST_CASES[Math.floor(Math.random() * TEST_CASES.length)];
-
- //Submit data import requests
- let headers= {
- 'Accept':'application/json',
- 'Authorization': `Bearer ${currentAccessToken}`,
- };
-
- var method = "/1.2/import/sdmxFile";
- let data= { 'dataspace': DATASPACE };
-
- //Import from SDMX source
- if(testCase.format ==="sdmx" ){
- data.filepath= testCase.sdmxSource;
- //Workaround - K6 only supports multipart/from-data request if there is a file in the request.
- data.file= http.file("", "dummyFile.csv");
- console.log(`Importing from url: ${data.filepath}`);
- }
- //Import from Excel
- else if(testCase.format ==="excel" ){
- method = "/1.2/import/excel";
- data.eddFile = http.file(testCase.edd, testCase.eddFile);
- data.excelFile = http.file(testCase.data, testCase.dataFile);
- console.log(`Importing excel file: ${testCase.eddFile}`);
- }
- //Import from CSV and XML
- else{
- data.file = http.file(testCase.data, testCase.dataFile);
- console.log(`Importing from file: ${testCase.dataFile}`);
- }
-
- var res = http.post(
- `${BASE_URL}${method}`,
- data,
- {headers: headers},
- );
- sleep(1);//1s
- console.log(`import status:${res.status}`);
-
- check(res, {
- 'is status 200': (r) => r.status === 200
- });
- var date = new Date();
- var startTime = date.getTime();
-
- if(res.status ==200){
-
- console.log(`import message:${res.json().message}`);
-
- var transactionID = res.json().message.match(/\d+/g);
-
- //Wait for the transfer-service to process import
-
- if(testCase.size=="extraSmall")
- sleep(2);//2s
- else if(testCase.size=="small")
- sleep(5);//4s
- else if(testCase.size=="medium")
- sleep(40);//40s
- else if(testCase.size=="large")
- sleep(180);//3m
- else if(testCase.size=="extraLarge")
- sleep(360);//6m
- else
- sleep(10);//5s
-
- var timeOutTime = 600000;//10min
- do{
- //Get new access token if current token has expired
- TryToGetNewAccessToken();
-
- let data= {
- 'dataspace': DATASPACE,
- 'id': parseInt(transactionID)
- };
-
- let headers= {
- 'Accept':'application/json',
- 'Authorization': 'Bearer ' + currentAccessToken,
- };
-
- method = "/1.2/status/request";
- var res = http.post(
- `${BASE_URL}${method}`,
- data,
- {headers: headers}
- );
- sleep(1);//1s
-
- var d = new Date();
- if(res.status===200){
- if(res.json().executionStatus==="Completed"){
- var actualTime = Date.parse(res.json().executionEnd) - Date.parse(res.json().executionStart);
- importTrend.add(actualTime, { import_type: `${testCase.format}_${testCase.size}` });
- importTrend.add(actualTime, { datasetSize: `${testCase.datasetSize}` });
- //The import was completed
- importRate.add(true);
- break;
- }
- else if(res.json().executionStatus==="TimedOutAborted"){
- var actualTime = Date.parse(res.json().executionEnd) - Date.parse(res.json().executionStart);
- importTrend.add(actualTime, { import_type: `${testCase.format}_$testCase.size}` });
- importTrend.add(actualTime, { datasetSize: `${testCase.datasetSize}` });
- //The import was TimedOutAborted
- importRate.add(false);
- break;
- }
- else if(d.getTime()-startTime>=timeOutTime){
- var actualTime = Date.parse(res.json().executionEnd) - Date.parse(res.json().executionStart);
- importTrend.add(actualTime, { import_type: `${testCase.format}_${testCase.size}` });
- importTrend.add(actualTime, { datasetSize: `${stestCase.datasetSize}` });
- //The import was TimedOutAborted
- importRate.add(false);
- console.log(`timed out ${transactionID}`);
- break;
- }
- }
- sleep(10);//5s
- }while(true);
-
- }
- else{
- importRate.add(false);
-
- if(res.status >= 500){
- console.log(`import message:${res.body}`);
- }
- }
- }